Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Developer's Certificate of Origin
All contributors to ANTLR v4 must formally agree to abide by the certificate of origin by signing on the bottom of that document. To contribute:
fork the ANTLR v4 github repository
make your changes
[first time contributors]: sign contributors.txt by adding your github userid, full name, email address (you can obscure your e-mail, but it must be computable by human), and date.
commit your changes
send a pull request
After you have signed once, you don't have to sign future pull requests. We can merge by simply checking to see your name is in the contributors file.
{"hive_db as inst where inst.name=\"Reporting\" select inst as id, inst.name",1},
{"from hive_db as h select h as id",3},
{"from hive_db",3},
{"hive_db",3},
{"hive_db where hive_db.name=\"Reporting\"",1},
{"hive_db hive_db.name = \"Reporting\"",1},
{"hive_db where hive_db.name=\"Reporting\" select name, owner",1},
{"hive_db has name",3},
{"hive_db, hive_table",10},
{"View is JdbcAccess",2},
{"hive_db as db1, hive_table where db1.name = \"Reporting\"",0},//Not working - ATLAS-145
// - Final working query -> discoveryService.searchByGremlin("L:{_var_0 = [] as Set;g.V().has(\"__typeName\", \"hive_db\").fill(_var_0);g.V().has(\"__superTypeNames\", \"hive_db\").fill(_var_0);_var_0._().as(\"db1\").in(\"__hive_table.db\").back(\"db1\").and(_().has(\"hive_db.name\", T.eq, \"Reporting\")).toList()}")
/*
{"hive_db, hive_process has name"}, //Invalid query
{"hive_db where hive_db.name=\"Reporting\" and hive_db.createTime < " + System.currentTimeMillis()}
*/
{"from hive_table",10},
{"hive_table",10},
{"hive_table isa Dimension",3},
{"hive_column where hive_column isa PII",8},
{"View is Dimension",2},
// {"hive_column where hive_column isa PII select hive_column.name", 6}, //Not working - ATLAS-175
{"hive_column select hive_column.name",37},
{"hive_column select name",37},
{"hive_column where hive_column.name=\"customer_id\"",6},
{"from hive_table select hive_table.name",10},
{"hive_db where (name = \"Reporting\")",1},
{"hive_db where (name = \"Reporting\") select name as _col_0, owner as _col_1",1},
{"hive_db where hive_db is JdbcAccess",0},//Not supposed to work
{"hive_db hive_table",10},
{"hive_db where hive_db has name",3},
{"hive_db as db1 hive_table where (db1.name = \"Reporting\")",0},//Not working -> ATLAS-145
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 ",1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 ",1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 ",1},
/*
todo: does not work - ATLAS-146
{"hive_db where (name = \"Reporting\") and ((createTime + 1) > 0)"},
{"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") select db1.name
as dbName, tab.name as tabName"},
{"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) or (db1.name = \"Reporting\") select db1.name
as dbName, tab.name as tabName"},
{"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner
select db1.name as dbName, tab.name as tabName"},
{"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner
select db1.name as dbName, tab.name as tabName"},
*/
// trait searches
{"Dimension",5},
{"JdbcAccess",2},
{"ETL",5},
{"Metric",9},
{"PII",8},
{"`Log Data`",4},
// Not sure what the expected rows should be, but since we didn't assign or do anything with the created
// I assume it'll be zero
{"`isa`",0},
/* Lineage queries are fired through ClosureQuery and are tested through HiveLineageJerseyResourceIt in webapp module.
Commenting out the below queries since DSL to Gremlin parsing/translation fails with lineage queries when there are array types
used within loop expressions which is the case with DataSet.inputs and outputs.`
{"hive_db where (name = \"Reporting\") limit 10",1},
{"hive_db where (name = \"Reporting\") select name as _col_0, owner as _col_1",1},
{"hive_db where (name = \"Reporting\") select name as _col_0, owner as _col_1 limit 10",1},
{"hive_db where hive_db is JdbcAccess",0},//Not supposed to work
{"hive_db hive_table",10},
{"hive_db hive_table limit 5",5},
{"hive_db hive_table limit 5 offset 5",5},
{"hive_db where hive_db has name",3},
{"hive_db where hive_db has name limit 5",3},
{"hive_db where hive_db has name limit 2 offset 0",2},
{"hive_db where hive_db has name limit 2 offset 1",2},
{"hive_db as db1 hive_table where (db1.name = \"Reporting\")",0},//Not working -> ATLAS-145
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 ",1},
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 limit 10",1},
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 limit 10 offset 1",0},
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 limit 10 offset 0",1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 ",1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 ",1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0",1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 5",0},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 ",1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0",1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 1",0},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10",1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 0 offset 1",0},
// trait searches
{"Dimension",5},
{"Dimension limit 2",2},
{"Dimension limit 2 offset 1",2},
{"Dimension limit 5 offset 4",1},
{"JdbcAccess",2},
{"JdbcAccess limit 5 offset 0",2},
{"JdbcAccess limit 2 offset 1",1},
{"JdbcAccess limit 1",1},
{"ETL",5},
{"ETL limit 2",2},
{"ETL limit 1",1},
{"ETL limit 1 offset 0",1},
{"ETL limit 2 offset 1",2},
{"Metric",9},
{"Metric limit 10",9},
{"Metric limit 2",2},
{"Metric limit 10 offset 1",8},
{"PII",8},
{"PII limit 10",8},
{"PII limit 2",2},
{"PII limit 10 offset 1",7},
{"`Log Data`",4},
{"`Log Data` limit 3",3},
{"`Log Data` limit 10 offset 2",2},
{"hive_table where name='sales_fact', db where name='Sales'",1},
{"hive_table where name='sales_fact', db where name='Sales' limit 10",1},
{"hive_table where name='sales_fact', db where name='Sales' limit 10 offset 1",0},
{"hive_table where name='sales_fact', db where name='Reporting'",0},
{"hive_table where name='sales_fact', db where name='Reporting' limit 10",0},
{"hive_table where name='sales_fact', db where name='Reporting' limit 10 offset 1",0},
{"hive_partition as p where values = ['2015-01-01']",1},
{"hive_partition as p where values = ['2015-01-01'] limit 10",1},
{"hive_partition as p where values = ['2015-01-01'] limit 10 offset 1",0},
{"hive_db where (name = \"Reporting\") orderby hive_db.name",1,"name",true},
{"hive_db where (name = \"Reporting\") orderby hive_db.name limit 10",1,"name",true},
{"hive_db where hive_db has name orderby hive_db.owner",3,"owner",true},
{"hive_db where hive_db has name orderby hive_db.owner limit 5",3,"owner",true},
{"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 0",2,"owner",true},
{"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 1",2,"owner",true},
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 orderby '_col_1'",1,"_col_1",true},
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 orderby '_col_1' limit 10",1,"_col_1",true},
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 orderby '_col_1' limit 10 offset 1",0,"_col_1",true},
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 orderby '_col_1' limit 10 offset 0",1,"_col_1",true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' ",1,"_col_1",true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 ",1,"_col_1",true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 offset 0",1,"_col_1",true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 offset 5",0,"_col_1",true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' ",1,"_col_0",true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10 offset 0",1,"_col_0",true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10 offset 1",0,"_col_0",true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10",1,"_col_0",true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 0 offset 1",0,"_col_0",true},
// { "from Person groupby (isOrganDonor) select Person.isOrganDonor as 'organDonor', count() as 'count', max(Person.age) as 'max', min(Person.age) as 'min'",
// new FieldValueValidator().withFieldNames("organDonor", "max", "min", "count")