Commit 5bd5327c by Ashutosh Mestry Committed by Madhan Neethiraj

ATLAS-2229: DSL implementation using Antlr

parent 71a30876
http://www.antlr.org/license.html
[The BSD License]
Copyright (c) 2012 Terence Parr and Sam Harwell
All rights reserved.
Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Developer's Certificate of Origin
All contributors to ANTLR v4 must formally agree to abide by the certificate of origin by signing on the bottom of that document. To contribute:
fork the ANTLR v4 github repository
make your changes
[first time contributors]: sign contributors.txt by adding your github userid, full name, email address (you can obscure your e-mail, but it must be computable by human), and date.
commit your changes
send a pull request
After you have signed once, you don't have to sign future pull requests. We can merge by simply checking to see your name is in the contributors file.
\ No newline at end of file
...@@ -552,6 +552,7 @@ ...@@ -552,6 +552,7 @@
<javax.servlet.version>3.1.0</javax.servlet.version> <javax.servlet.version>3.1.0</javax.servlet.version>
<guava.version>19.0</guava.version> <guava.version>19.0</guava.version>
<scala.version>2.11.8</scala.version> <scala.version>2.11.8</scala.version>
<antlr4.version>4.7</antlr4.version>
<!-- Needed for hooks --> <!-- Needed for hooks -->
<aopalliance.version>1.0</aopalliance.version> <aopalliance.version>1.0</aopalliance.version>
...@@ -565,6 +566,7 @@ ...@@ -565,6 +566,7 @@
<paranamer.version>2.7</paranamer.version> <paranamer.version>2.7</paranamer.version>
<zkclient.version>0.8</zkclient.version> <zkclient.version>0.8</zkclient.version>
<enunciate-maven-plugin.version>2.10.1</enunciate-maven-plugin.version> <enunciate-maven-plugin.version>2.10.1</enunciate-maven-plugin.version>
<antlr4.plugin.version>4.5</antlr4.plugin.version>
<PermGen>64m</PermGen> <PermGen>64m</PermGen>
<MaxPermGen>512m</MaxPermGen> <MaxPermGen>512m</MaxPermGen>
...@@ -791,6 +793,12 @@ ...@@ -791,6 +793,12 @@
<dependencyManagement> <dependencyManagement>
<dependencies> <dependencies>
<dependency> <dependency>
<groupId>org.antlr</groupId>
<artifactId>antlr4-runtime</artifactId>
<version>${antlr4.version}</version>
</dependency>
<dependency>
<groupId>com.google.guava</groupId> <groupId>com.google.guava</groupId>
<artifactId>guava</artifactId> <artifactId>guava</artifactId>
<version>${guava.version}</version> <version>${guava.version}</version>
...@@ -1609,6 +1617,26 @@ ...@@ -1609,6 +1617,26 @@
<pluginManagement> <pluginManagement>
<plugins> <plugins>
<plugin> <plugin>
<groupId>org.antlr</groupId>
<artifactId>antlr4-maven-plugin</artifactId>
<version>${antlr4.plugin.version}</version>
<configuration>
<listener>false</listener>
<visitor>true</visitor>
</configuration>
<executions>
<execution>
<goals>
<goal>antlr4</goal>
</goals>
<phase>generate-sources</phase>
<configuration>
<outputDirectory>src/main/java</outputDirectory>
</configuration>
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.codehaus.mojo</groupId> <groupId>org.codehaus.mojo</groupId>
<artifactId>buildnumber-maven-plugin</artifactId> <artifactId>buildnumber-maven-plugin</artifactId>
<version>1.4</version> <version>1.4</version>
...@@ -1871,6 +1899,7 @@ ...@@ -1871,6 +1899,7 @@
<useEclipseDefaultExcludes>true</useEclipseDefaultExcludes> <useEclipseDefaultExcludes>true</useEclipseDefaultExcludes>
<excludeSubProjects>true</excludeSubProjects> <excludeSubProjects>true</excludeSubProjects>
<excludes> <excludes>
<exclude>**/antlr4/**</exclude>
<exclude>**/dependency-reduced-pom.xml</exclude> <exclude>**/dependency-reduced-pom.xml</exclude>
<exclude>**/javax.script.ScriptEngineFactory</exclude> <exclude>**/javax.script.ScriptEngineFactory</exclude>
<exclude>.reviewboardrc</exclude> <exclude>.reviewboardrc</exclude>
......
...@@ -59,6 +59,11 @@ ...@@ -59,6 +59,11 @@
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.antlr</groupId>
<artifactId>antlr4-runtime</artifactId>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId> <groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId> <artifactId>commons-lang3</artifactId>
</dependency> </dependency>
......
...@@ -33,15 +33,12 @@ import org.apache.atlas.model.discovery.SearchParameters; ...@@ -33,15 +33,12 @@ import org.apache.atlas.model.discovery.SearchParameters;
import org.apache.atlas.model.instance.AtlasEntityHeader; import org.apache.atlas.model.instance.AtlasEntityHeader;
import org.apache.atlas.model.instance.AtlasObjectId; import org.apache.atlas.model.instance.AtlasObjectId;
import org.apache.atlas.model.profile.AtlasUserSavedSearch; import org.apache.atlas.model.profile.AtlasUserSavedSearch;
import org.apache.atlas.query.Expressions.AliasExpression;
import org.apache.atlas.query.Expressions.Expression; import org.apache.atlas.query.Expressions.Expression;
import org.apache.atlas.query.Expressions.SelectExpression;
import org.apache.atlas.query.GremlinQuery; import org.apache.atlas.query.GremlinQuery;
import org.apache.atlas.query.GremlinTranslator; import org.apache.atlas.query.GremlinTranslator;
import org.apache.atlas.query.QueryParams; import org.apache.atlas.query.QueryParams;
import org.apache.atlas.query.QueryParser; import org.apache.atlas.query.QueryParser;
import org.apache.atlas.query.QueryProcessor; import org.apache.atlas.query.QueryProcessor;
import org.apache.atlas.query.SelectExpressionHelper;
import org.apache.atlas.repository.Constants; import org.apache.atlas.repository.Constants;
import org.apache.atlas.repository.graph.GraphBackedSearchIndexer; import org.apache.atlas.repository.graph.GraphBackedSearchIndexer;
import org.apache.atlas.repository.graph.GraphHelper; import org.apache.atlas.repository.graph.GraphHelper;
...@@ -149,29 +146,29 @@ public class EntityDiscoveryService implements AtlasDiscoveryService { ...@@ -149,29 +146,29 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
LOG.warn("searchUsingDslQuery({}): expected an AtlasVertex; found unexpected entry in result {}", dslQuery, element); LOG.warn("searchUsingDslQuery({}): expected an AtlasVertex; found unexpected entry in result {}", dslQuery, element);
} }
} }
} else if (firstElement instanceof Map && } else if (gremlinQuery.hasSelectList()) {
(((Map)firstElement).containsKey("theInstance") || ((Map)firstElement).containsKey("theTrait"))) { ret.setAttributes(toAttributesResult(queryResult, gremlinQuery));
} else if (firstElement instanceof Map) {
for (Object element : queryResult) { for (Object element : queryResult) {
if (element instanceof Map) { if (element instanceof Map) {
Map map = (Map)element; Map map = (Map)element;
if (map.containsKey("theInstance")) { for (Object key : map.keySet()) {
Object value = map.get("theInstance"); Object value = map.get(key);
if (value instanceof List && CollectionUtils.isNotEmpty((List)value)) { if (value instanceof List && CollectionUtils.isNotEmpty((List)value)) {
Object entry = ((List)value).get(0); for (Object o : (List) value) {
Object entry = o;
if (entry instanceof AtlasVertex) { if (entry instanceof AtlasVertex) {
ret.addEntity(entityRetriever.toAtlasEntityHeader((AtlasVertex)entry)); ret.addEntity(entityRetriever.toAtlasEntityHeader((AtlasVertex) entry));
} }
} }
} }
} else {
LOG.warn("searchUsingDslQuery({}): expected a trait result; found unexpected entry in result {}", dslQuery, element);
} }
} }
} else if (gremlinQuery.hasSelectList()) { }
ret.setAttributes(toAttributesResult(queryResult, gremlinQuery)); } else {
LOG.warn("searchUsingDslQuery({}/{}): found unexpected entry in result {}", dslQuery, dslQuery, gremlinQuery.queryStr());
} }
} }
...@@ -688,8 +685,9 @@ public class EntityDiscoveryService implements AtlasDiscoveryService { ...@@ -688,8 +685,9 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
throw new AtlasBaseException(DISCOVERY_QUERY_FAILED, query); throw new AtlasBaseException(DISCOVERY_QUERY_FAILED, query);
} }
Expression validExpression = QueryProcessor.validate(expression); QueryProcessor queryProcessor = new QueryProcessor(typeRegistry);
GremlinQuery gremlinQuery = new GremlinTranslator(validExpression).translate(); Expression validExpression = queryProcessor.validate(expression);
GremlinQuery gremlinQuery = new GremlinTranslator(queryProcessor, validExpression).translate();
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("Translated Gremlin Query: {}", gremlinQuery.queryStr()); LOG.debug("Translated Gremlin Query: {}", gremlinQuery.queryStr());
...@@ -722,41 +720,68 @@ public class EntityDiscoveryService implements AtlasDiscoveryService { ...@@ -722,41 +720,68 @@ public class EntityDiscoveryService implements AtlasDiscoveryService {
private AttributeSearchResult toAttributesResult(List results, GremlinQuery query) { private AttributeSearchResult toAttributesResult(List results, GremlinQuery query) {
AttributeSearchResult ret = new AttributeSearchResult(); AttributeSearchResult ret = new AttributeSearchResult();
List<String> names = new ArrayList<>(); List<String> names = extractNames(results);
List<List<Object>> values = new ArrayList<>(); List<List<Object>> values = extractValues(results);
// extract select attributes from gremlin query ret.setName(names);
SelectExpression selectExpr = SelectExpressionHelper.extractSelectExpression(query.expr()); ret.setValues(values);
if (selectExpr != null) { return ret;
List<AliasExpression> aliases = selectExpr.toJavaList(); }
if (CollectionUtils.isNotEmpty(aliases)) { private List<String> extractNames(List results) {
for (AliasExpression alias : aliases) { List<String> names = new ArrayList<>();
names.add(alias.alias()); for (Object obj : results) {
if (obj instanceof Map) {
Map map = (Map) obj;
if (MapUtils.isNotEmpty(map)) {
for (Object key : map.keySet()) {
names.add((String) key);
} }
ret.setName(names); return names;
}
} else if (obj instanceof List) {
List list = (List) obj;
if (CollectionUtils.isNotEmpty(list)) {
for(Object o : list) {
names.add((String) o);
} }
} }
}
}
return names;
}
private List<List<Object>> extractValues(List results) {
List<List<Object>> values = new ArrayList<>();
for (Object obj : results) { for (Object obj : results) {
if (obj instanceof Map) { if (obj instanceof Map) {
Map map = (Map) obj; Map map = (Map) obj;
List<Object> list = new ArrayList<>();
if (MapUtils.isNotEmpty(map)) { if (MapUtils.isNotEmpty(map)) {
for (Object key : map.keySet()) { for (Object key : map.keySet()) {
Object vals = map.get(key); Object vals = map.get(key);
values.add((List<Object>) vals); if(vals instanceof List) {
List l = (List) vals;
for(Object o : l) {
list.add(o);
} }
ret.setValues(values); }
}
values.add(list);
} }
} else if (obj instanceof List) { } else if (obj instanceof List) {
List list = (List) obj; List list = (List) obj;
if (CollectionUtils.isNotEmpty(list)) { if (CollectionUtils.isNotEmpty(list)) {
values.add(list); values.add(list);
} }
ret.setValues(values);
} }
} }
return ret;
return values;
} }
private boolean skipDeletedEntities(boolean excludeDeletedEntities, AtlasVertex<?, ?> vertex) { private boolean skipDeletedEntities(boolean excludeDeletedEntities, AtlasVertex<?, ?> vertex) {
......
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.query;
import org.apache.atlas.query.antlr4.AtlasDSLParser.*;
import org.apache.atlas.query.antlr4.AtlasDSLParserBaseVisitor;
import org.apache.commons.collections.CollectionUtils;
public class DSLVisitor extends AtlasDSLParserBaseVisitor<String> {
private final QueryProcessor queryProcessor;
public DSLVisitor(QueryProcessor queryProcessor) {
this.queryProcessor = queryProcessor;
}
@Override
public String visitFromExpression(final FromExpressionContext ctx) {
FromSrcContext fromSrc = ctx.fromSrc();
AliasExprContext aliasExpr = fromSrc.aliasExpr();
if (aliasExpr != null) {
queryProcessor.addFromAlias(aliasExpr.identifier(0).getText(), aliasExpr.identifier(1).getText());
} else {
if (fromSrc.identifier() != null) {
queryProcessor.addFrom(fromSrc.identifier().getText());
} else {
queryProcessor.addFrom(fromSrc.literal().getText());
}
}
return super.visitFromExpression(ctx);
}
@Override
public String visitWhereClause(WhereClauseContext ctx) {
ExprContext expr = ctx.expr();
processExpr(expr.compE());
if (CollectionUtils.isNotEmpty(expr.exprRight())) {
for (ExprRightContext exprRight : expr.exprRight()) {
if (exprRight.K_AND() != null) {
// AND expression
processExpr(exprRight.compE());
}
// OR is tricky
}
}
return super.visitWhereClause(ctx);
}
private void processExpr(final CompEContext compE) {
if (compE != null && compE.isClause() == null && compE.hasClause() == null && compE.isClause() == null) {
ComparisonClauseContext comparisonClause = compE.comparisonClause();
if(comparisonClause == null) {
comparisonClause = compE.arithE().multiE().atomE().expr().compE().comparisonClause();
}
if (comparisonClause != null) {
String lhs = comparisonClause.arithE(0).getText();
String op = comparisonClause.operator().getText().toUpperCase();
String rhs = comparisonClause.arithE(1).getText();
queryProcessor.addWhere(lhs, op, rhs);
}
}
}
@Override
public String visitSelectExpr(SelectExprContext ctx) {
if (!(ctx.getParent() instanceof GroupByExpressionContext)) {
String[] items = new String[ctx.selectExpression().size()];
for (int i = 0; i < ctx.selectExpression().size(); i++) {
items[i] = ctx.selectExpression(i).expr().getText();
}
queryProcessor.addSelect(items);
}
return super.visitSelectExpr(ctx);
}
@Override
public String visitLimitOffset(LimitOffsetContext ctx) {
queryProcessor.addLimit(ctx.limitClause().NUMBER().toString(),
(ctx.offsetClause() == null ? "0" : ctx.offsetClause().NUMBER().getText()));
return super.visitLimitOffset(ctx);
}
@Override
public String visitOrderByExpr(OrderByExprContext ctx) {
queryProcessor.addOrderBy(ctx.expr().getText(), (ctx.sortOrder() != null && ctx.sortOrder().getText().equalsIgnoreCase("desc")));
return super.visitOrderByExpr(ctx);
}
@Override
public String visitIsClause(IsClauseContext ctx) {
queryProcessor.addFromIsA(ctx.arithE().getText(), ctx.identifier().getText());
return super.visitIsClause(ctx);
}
@Override
public String visitHasClause(HasClauseContext ctx) {
queryProcessor.addFromProperty(ctx.arithE().getText(), ctx.identifier().getText());
return super.visitHasClause(ctx);
}
@Override
public String visitGroupByExpression(GroupByExpressionContext ctx) {
String s = ctx.selectExpr().getText();
queryProcessor.addGroupBy(s);
return super.visitGroupByExpression(ctx);
}
}
...@@ -18,28 +18,24 @@ ...@@ -18,28 +18,24 @@
package org.apache.atlas.query; package org.apache.atlas.query;
import java.util.List; import org.apache.atlas.query.antlr4.AtlasDSLParser.QueryContext;
public class Expressions { public class Expressions {
public static class Expression { public static class Expression {
private final QueryContext parsedQuery;
}
public static class AliasExpression {
public String alias() {
String ret = null;
return ret; public Expression(QueryContext q) {
parsedQuery = q;
} }
public Expression isReady() {
return (parsedQuery != null ? this : null);
} }
public static class SelectExpression { public void accept(DSLVisitor qv) {
public List<AliasExpression> toJavaList() { qv.visit(parsedQuery);
List<AliasExpression> ret = null;
return ret;
} }
} }
} }
...@@ -6,9 +6,9 @@ ...@@ -6,9 +6,9 @@
* to you under the Apache License, Version 2.0 (the * to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance * "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at * with the License. You may obtain a copy of the License at
* * <p>
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* * <p>
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
...@@ -17,26 +17,22 @@ ...@@ -17,26 +17,22 @@
*/ */
package org.apache.atlas.query; package org.apache.atlas.query;
import org.apache.atlas.query.Expressions.Expression;
public class GremlinQuery { public class GremlinQuery {
public boolean hasSelectList() { private final String queryStr;
boolean ret = false; private final boolean hasSelect;
return ret; public GremlinQuery(String text, boolean hasSelect) {
this.queryStr = text;
this.hasSelect = hasSelect;
} }
public String queryStr() {
String ret = null;
return ret; public boolean hasSelectList() {
return this.hasSelect;
} }
public Expression expr() { public String queryStr() {
Expression ret = null; return queryStr;
return ret;
} }
} }
...@@ -19,16 +19,23 @@ package org.apache.atlas.query; ...@@ -19,16 +19,23 @@ package org.apache.atlas.query;
import org.apache.atlas.query.Expressions.Expression; import org.apache.atlas.query.Expressions.Expression;
public class GremlinTranslator { public class GremlinTranslator {
private final QueryProcessor queryProcessor;
private Expression expression; private Expression expression;
public GremlinTranslator(Expression expression) { public GremlinTranslator(QueryProcessor queryProcessor, Expression expression) {
this.expression = expression; this.expression = expression;
this.queryProcessor = queryProcessor;
} }
public GremlinQuery translate() { public GremlinQuery translate() {
GremlinQuery ret = null; DSLVisitor qv = new DSLVisitor(queryProcessor);
expression.accept(qv);
queryProcessor.close();
GremlinQuery ret = new GremlinQuery(queryProcessor.getText(), queryProcessor.hasSelect());
return ret; return ret;
} }
} }
...@@ -6,9 +6,9 @@ ...@@ -6,9 +6,9 @@
* to you under the Apache License, Version 2.0 (the * to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance * "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at * with the License. You may obtain a copy of the License at
* * <p>
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* * <p>
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
...@@ -17,14 +17,25 @@ ...@@ -17,14 +17,25 @@
*/ */
package org.apache.atlas.query; package org.apache.atlas.query;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.TokenStream;
import org.apache.atlas.query.Expressions.Expression; import org.apache.atlas.query.Expressions.Expression;
import org.apache.atlas.query.antlr4.AtlasDSLLexer;
import org.apache.atlas.query.antlr4.AtlasDSLParser;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
public class QueryParser { public class QueryParser {
private static final Logger LOG = LoggerFactory.getLogger(QueryParser.class);
private static final Set<String> RESERVED_KEYWORDS = private static final Set<String> RESERVED_KEYWORDS =
new HashSet<>(Arrays.asList("[", "]", "(", ")", "=", "<", ">", "!=", "<=", ">=", ",", "and", "or", "+", "-", new HashSet<>(Arrays.asList("[", "]", "(", ")", "=", "<", ">", "!=", "<=", ">=", ",", "and", "or", "+", "-",
"*", "/", ".", "select", "from", "where", "groupby", "loop", "isa", "is", "has", "*", "/", ".", "select", "from", "where", "groupby", "loop", "isa", "is", "has",
...@@ -38,6 +49,18 @@ public class QueryParser { ...@@ -38,6 +49,18 @@ public class QueryParser {
public static Expression apply(String queryStr, QueryParams params) { public static Expression apply(String queryStr, QueryParams params) {
Expression ret = null; Expression ret = null;
try {
InputStream stream = new ByteArrayInputStream(queryStr.getBytes());
AtlasDSLLexer lexer = new AtlasDSLLexer(CharStreams.fromStream(stream));
TokenStream inputTokenStream = new CommonTokenStream(lexer);
AtlasDSLParser parser = new AtlasDSLParser(inputTokenStream);
ret = new Expression(parser.query());
} catch (IOException e) {
ret = null;
LOG.error(e.getMessage(), e);
}
return ret; return ret;
} }
} }
...@@ -6,9 +6,9 @@ ...@@ -6,9 +6,9 @@
* to you under the Apache License, Version 2.0 (the * to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance * "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at * with the License. You may obtain a copy of the License at
* * <p>
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* * <p>
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
...@@ -17,12 +17,555 @@ ...@@ -17,12 +17,555 @@
*/ */
package org.apache.atlas.query; package org.apache.atlas.query;
import com.google.common.annotations.VisibleForTesting;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.TypeCategory;
import org.apache.atlas.model.discovery.SearchParameters;
import org.apache.atlas.query.Expressions.Expression; import org.apache.atlas.query.Expressions.Expression;
import org.apache.atlas.type.AtlasArrayType;
import org.apache.atlas.type.AtlasBuiltInTypes;
import org.apache.atlas.type.AtlasEntityType;
import org.apache.atlas.type.AtlasStructType;
import org.apache.atlas.type.AtlasType;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.commons.lang.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.inject.Inject;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
public class QueryProcessor { public class QueryProcessor {
public static Expression validate(Expression expression) { private static final Logger LOG = LoggerFactory.getLogger(QueryProcessor.class);
Expressions.Expression ret = null;
private final int DEFAULT_QUERY_RESULT_LIMIT = 25;
private final Pattern SINGLE_QUOTED_IDENTIFIER = Pattern.compile("'(\\w[\\w\\d\\.\\s]*)'");
private final Pattern DOUBLE_QUOTED_IDENTIFIER = Pattern.compile("\"(\\w[\\w\\d\\.\\s]*)\"");
private final Pattern BACKTICK_QUOTED_IDENTIFIER = Pattern.compile("`(\\w[\\w\\d\\.\\s]*)`");
private final List<String> errorList = new ArrayList<>();
private final GremlinClauseList queryClauses = new GremlinClauseList(errorList);
private int currentStep;
private final TypeRegistryLookup registryLookup;
@Inject
public QueryProcessor(AtlasTypeRegistry typeRegistry) {
registryLookup = new TypeRegistryLookup(errorList, typeRegistry);
init();
}
@VisibleForTesting
public QueryProcessor(TypeRegistryLookup lookup) {
registryLookup = lookup;
init();
}
private void init() {
add(GremlinClause.G);
add(GremlinClause.V);
}
public Expression validate(Expression expression) {
return expression.isReady();
}
public void addFrom(String typeName) {
if (LOG.isDebugEnabled()) {
LOG.debug("addFrom(typeName={})", typeName);
}
String actualTypeName = extractIdentifier(typeName);
if(registryLookup.isTypeTrait(actualTypeName)) {
addTraitAndRegister(actualTypeName);
} else if (!registryLookup.hasActiveType()) {
registryLookup.registerActive(actualTypeName);
if(registryLookup.doesActiveTypeHaveSubTypes()) {
add(GremlinClause.HAS_TYPE_WITHIN, registryLookup.getActiveTypeAndSubTypes());
} else {
add(GremlinClause.HAS_TYPE, actualTypeName);
}
} else {
add(GremlinClause.OUT, registryLookup.getRelationshipEdgeLabelForActiveType(actualTypeName));
registryLookup.registerActive(registryLookup.getTypeFromEdge(actualTypeName));
}
}
private void addTraitAndRegister(String typeName) {
if (LOG.isDebugEnabled()) {
LOG.debug("addTraitAndRegister(typeName={})", typeName);
}
add(GremlinClause.TRAIT, typeName);
registryLookup.registerActive(typeName);
}
public void addFromIsA(String typeName, String trait) {
if (LOG.isDebugEnabled()) {
LOG.debug("addFromIsA(typeName={}, trait={})", typeName, trait);
}
if(!registryLookup.hasActiveType()) {
addFrom(typeName);
}
add(GremlinClause.TRAIT, trait);
}
public void addFromProperty(String typeName, String attribute) {
if (LOG.isDebugEnabled()) {
LOG.debug("addFromIsA(typeName={}, attribute={})", typeName, attribute);
}
if(registryLookup.isSameAsActive(typeName) == false) {
addFrom(typeName);
}
add(GremlinClause.HAS_PROPERTY, registryLookup.getQualifiedAttributeName(attribute));
}
public void addFromAlias(String typeName, String alias) {
if (LOG.isDebugEnabled()) {
LOG.debug("addFromAlias(typeName={}, alias={})", typeName, alias);
}
addFrom(typeName);
addAsClause(alias);
}
public void addWhere(String lhs, String operator, String rhs) {
if (LOG.isDebugEnabled()) {
LOG.debug("addWhere(lhs={}, operator={}, rhs={})", lhs, operator, rhs);
}
lhs = registryLookup.getQualifiedAttributeName(lhs);
SearchParameters.Operator op = SearchParameters.Operator.fromString(operator);
switch (op) {
case LT:
add(GremlinClause.HAS_OPERATOR, lhs, "lt", rhs);
break;
case GT:
add(GremlinClause.HAS_OPERATOR, lhs, "gt", rhs);
break;
case LTE:
add(GremlinClause.HAS_OPERATOR, lhs, "lte", rhs);
break;
case GTE:
add(GremlinClause.HAS_OPERATOR, lhs, "gte", rhs);
break;
case EQ:
add(GremlinClause.HAS_OPERATOR, lhs, "eq", rhs);
break;
case NEQ:
add(GremlinClause.HAS_OPERATOR, lhs, "neq", rhs);
break;
case IN:
// TODO: Handle multiple RHS values
add(GremlinClause.HAS_OPERATOR, lhs, "within", rhs);
break;
case LIKE:
add(GremlinClause.TEXT_CONTAINS, lhs, rhs.replace("*", ".*").replace('?', '.'));
break;
}
}
public void addSelect(String[] items) {
if (LOG.isDebugEnabled()) {
LOG.debug("addSelect(items.length={})", items != null ? items.length : -1);
}
StringBuilder sb = new StringBuilder();
for (int i = 0; i < items.length; i++) {
String s = registryLookup.getQualifiedAttributeName(items[i]);
if (items[i].contains(".") || registryLookup.isAttributePrimitiveTypeForActiveType(items[i])) {
sb.append(String.format("'%s'", s));
if (i != items.length - 1) {
sb.append(", ");
}
} else {
add(GremlinClause.OUT, registryLookup.getRelationshipEdgeLabelForActiveType(items[i]));
add(GremlinClause.AS, getCurrentStep());
addSelectClause(getCurrentStep());
incrementCurrentStep();
}
}
if (!StringUtils.isEmpty(sb.toString())) {
addValueMapClause(sb.toString());
}
}
public void addLimit(String limit, String offset) {
if (LOG.isDebugEnabled()) {
LOG.debug("addLimit(limit={}, offset={})", limit, offset);
}
add(GremlinClause.ORDER);
if (offset.equalsIgnoreCase("0")) {
add(GremlinClause.LIMIT, limit);
} else {
addRangeClause(offset, limit);
}
}
public void addGroupBy(String item) {
if (LOG.isDebugEnabled()) {
LOG.debug("addGroupBy(item={})", item);
}
add(GremlinClause.GROUP);
addByClause(item, false);
}
private void addRangeClause(String startIndex, String endIndex) {
if (LOG.isDebugEnabled()) {
LOG.debug("addRangeClause(startIndex={}, endIndex={})", startIndex, endIndex);
}
add(GremlinClause.RANGE, startIndex, startIndex, endIndex);
}
public String getText() {
String[] items = new String[queryClauses.size()];
for (int i = 0; i < queryClauses.size(); i++) {
items[i] = queryClauses.getValue(i);
}
String ret = StringUtils.join(items, ".");
if (LOG.isDebugEnabled()) {
LOG.debug("getText() => {}", ret);
}
return ret; return ret;
} }
public void close() {
if(queryClauses.hasClause(GremlinClause.LIMIT) == -1) {
add(GremlinClause.LIMIT, "" + DEFAULT_QUERY_RESULT_LIMIT);
}
add(GremlinClause.TO_LIST);
}
public boolean hasSelect() {
return (queryClauses.hasClause(GremlinClause.VALUE_MAP) != -1);
}
public void addAsClause(String stepName) {
if (LOG.isDebugEnabled()) {
LOG.debug("addAsClause(stepName={})", stepName);
}
add(GremlinClause.AS, stepName);
registryLookup.registerStepType(stepName);
}
public void addOrderBy(String name, boolean isDesc) {
if (LOG.isDebugEnabled()) {
LOG.debug("addOrderBy(name={}, isDesc={})", name, isDesc);
}
add(GremlinClause.ORDER);
addByClause(registryLookup.getQualifiedAttributeName(name), isDesc);
}
private void addValueMapClause(String s) {
if (LOG.isDebugEnabled()) {
LOG.debug("addValueMapClause(s={})", s);
}
add(GremlinClause.VALUE_MAP, s);
}
private void addSelectClause(String s) {
if (LOG.isDebugEnabled()) {
LOG.debug("addSelectClause(s={})", s);
}
add(GremlinClause.SELECT, s);
}
private void addByClause(String name, boolean descr) {
if (LOG.isDebugEnabled()) {
LOG.debug("addByClause(name={})", name, descr);
}
add((!descr) ? GremlinClause.BY : GremlinClause.BY_DESC,
registryLookup.getQualifiedAttributeName(name));
}
private String getCurrentStep() {
return String.format("s%d", currentStep);
}
private void incrementCurrentStep() {
currentStep++;
}
private void add(GremlinClause clause, String... args) {
queryClauses.add(new GremlinClauseValue(clause, clause.get(args)));
}
private String extractIdentifier(String quotedIdentifier) {
String ret;
if (quotedIdentifier.charAt(0) == '`') {
ret = extract(BACKTICK_QUOTED_IDENTIFIER, quotedIdentifier);
} else if (quotedIdentifier.charAt(0) == '\'') {
ret = extract(SINGLE_QUOTED_IDENTIFIER, quotedIdentifier);
} else if (quotedIdentifier.charAt(0) == '"') {
ret = extract(DOUBLE_QUOTED_IDENTIFIER, quotedIdentifier);
} else {
ret = quotedIdentifier;
}
return ret;
}
private String extract(Pattern p, String s) {
Matcher m = p.matcher(s);
return m.find() ? m.group(1) : s;
}
private enum GremlinClause {
AS("as('%s')"),
BY("by('%s')"),
BY_DESC("by('%s', decr)"),
G("g"),
GROUP("group()"),
HAS("has('%s', %s)"),
HAS_OPERATOR("has('%s', %s(%s))"),
HAS_PROPERTY("has('%s')"),
HAS_NOT_PROPERTY("hasNot('%s')"),
HAS_TYPE("has('__typeName', '%s')"),
HAS_TYPE_WITHIN("has('__typeName', within(%s))"),
HAS_WITHIN("has('%s', within(%s))"),
IN("in()"),
LIMIT("limit(%s)"),
ORDER("order()"),
OUT("out('%s')"),
RANGE("range(%s, %s + %s)"),
SELECT("select('%s')"),
TO_LIST("toList()"),
TEXT_CONTAINS("has('%s', org.janusgraph.core.attribute.Text.textRegex(%s))"),
TEXT_PREFIX("has('%s', org.janusgraph.core.attribute.Text.textPrefix(%s))"),
TEXT_SUFFIX("has('%s', org.janusgraph.core.attribute.Text.textRegex(\".*\" + %s))"),
TRAIT("has('__traitNames', within('%s'))"),
V("V()"),
VALUE_MAP("valueMap(%s)");
private final String format;
GremlinClause(String format) {
this.format = format;
}
String get(String... args) {
return (args == null || args.length == 0) ?
format :
String.format(format, args);
}
}
private static class GremlinClauseValue {
private final GremlinClause clause;
private final String value;
public GremlinClauseValue(GremlinClause clause, String value) {
this.clause = clause;
this.value = value;
}
public GremlinClause getClause() {
return clause;
}
public String getValue() {
return value;
}
}
private static class GremlinClauseList {
private final List<String> errorList;
private AtlasEntityType activeType;
private final List<GremlinClauseValue> list;
private GremlinClauseList(List<String> errorList) {
this.errorList = errorList;
this.list = new LinkedList<>();
}
public void add(GremlinClauseValue g) {
list.add(g);
}
public void add(GremlinClauseValue g, AtlasEntityType t) {
add(g);
activeType = t;
}
public void add(GremlinClause clause, String... args) {
list.add(new GremlinClauseValue(clause, clause.get(args)));
}
public String getValue(int i) {
return list.get(i).value;
}
public int size() {
return list.size();
}
public int hasClause(GremlinClause clause) {
for (int i = 0; i < list.size(); i++) {
if (list.get(i).getClause() == clause)
return i;
}
return -1;
}
}
@VisibleForTesting
static class TypeRegistryLookup {
private final List<String> errorList;
private final AtlasTypeRegistry typeRegistry;
private AtlasEntityType activeType;
private final Map<String, AtlasEntityType> asClauseContext = new HashMap<>();
public TypeRegistryLookup(List<String> errorList, AtlasTypeRegistry typeRegistry) {
this.errorList = errorList;
this.typeRegistry = typeRegistry;
}
public void registerActive(String typeName) {
activeType = typeRegistry.getEntityTypeByName(typeName);
}
public boolean hasActiveType() {
return (activeType != null);
}
public void registerStepType(String stepName) {
if (!asClauseContext.containsKey(stepName)) {
asClauseContext.put(stepName, activeType);
} else {
addError(String.format("Multiple steps with same name detected: %s", stepName));
}
}
protected void addError(String s) {
errorList.add(s);
}
public String getRelationshipEdgeLabelForActiveType(String item) {
return getRelationshipEdgeLabel(activeType, item);
}
private String getRelationshipEdgeLabel(AtlasEntityType t, String item) {
if(t == null) {
return "";
}
AtlasStructType.AtlasAttribute attr = t.getAttribute(item);
return (attr != null) ? attr.getRelationshipEdgeLabel() : "";
}
protected boolean isAttributePrimitiveTypeForActiveType(String name) {
return isAttributePrimitiveType(activeType, name);
}
private boolean isAttributePrimitiveType(AtlasEntityType t, String name) {
if (activeType == null) {
return false;
}
AtlasType attrType = t.getAttributeType(name);
TypeCategory attrTypeCategory = attrType.getTypeCategory();
return (attrTypeCategory == TypeCategory.PRIMITIVE || attrTypeCategory == TypeCategory.ENUM);
}
public boolean isTypeTrait(String name) {
return (typeRegistry.getClassificationTypeByName(name) != null);
}
public String getQualifiedAttributeName(String item) {
if (item.contains(".")) {
String[] keyValue = StringUtils.split(item, ".");
if (!asClauseContext.containsKey(keyValue[0])) {
return item;
} else {
String s = getStitchedString(keyValue, 1, keyValue.length - 1);
return getQualifiedAttributeNameFromType(
asClauseContext.get(keyValue[0]), s);
}
}
return getQualifiedAttributeNameFromType(activeType, item);
}
protected String getStitchedString(String[] keyValue, int startIndex, int endIndex) {
if(startIndex == endIndex) {
return keyValue[startIndex];
}
return StringUtils.join(keyValue, ".", startIndex, endIndex);
}
private String getQualifiedAttributeNameFromType(AtlasEntityType t, String item) {
try {
return (t != null) ? t.getQualifiedAttributeName(item) : item;
} catch (AtlasBaseException e) {
addError(e.getMessage());
}
return item;
}
public String getTypeFromEdge(String item) {
AtlasType at = activeType.getAttribute(item).getAttributeType();
if(at.getTypeCategory() == TypeCategory.ARRAY) {
AtlasArrayType arrType = ((AtlasArrayType)at);
return ((AtlasBuiltInTypes.AtlasObjectIdType) arrType.getElementType()).getObjectType();
}
return activeType.getAttribute(item).getTypeName();
}
public boolean doesActiveTypeHaveSubTypes() {
return (activeType.getAllSubTypes().size() != 0);
}
public String getActiveTypeAndSubTypes() {
Set<String> set = activeType.getTypeAndAllSubTypes();
String[] str = set.toArray(new String[]{});
for (int i = 0; i < str.length; i++) {
str[i] = String.format("'%s'", str[i]);
}
return StringUtils.join(str, ",");
}
public boolean isSameAsActive(String typeName) {
return (activeType != null) && activeType.getTypeName().equalsIgnoreCase(typeName);
}
}
} }
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
lexer grammar AtlasDSLLexer;
fragment A: ('A'|'a');
fragment B: ('B'|'b');
fragment C: ('C'|'c');
fragment D: ('D'|'d');
fragment E: ('E'|'e');
fragment F: ('F'|'f');
fragment G: ('G'|'g');
fragment H: ('H'|'h');
fragment I: ('I'|'i');
fragment J: ('J'|'j');
fragment K: ('K'|'k');
fragment L: ('L'|'l');
fragment M: ('M'|'m');
fragment N: ('N'|'n');
fragment O: ('O'|'o');
fragment P: ('P'|'p');
fragment Q: ('Q'|'q');
fragment R: ('R'|'r');
fragment S: ('S'|'s');
fragment T: ('T'|'t');
fragment U: ('U'|'u');
fragment V: ('V'|'v');
fragment W: ('W'|'w');
fragment X: ('X'|'x');
fragment Y: ('Y'|'y');
fragment Z: ('Z'|'z');
fragment DIGIT: [0-9];
fragment LETTER: 'a'..'z'| 'A'..'Z' | '_';
// Comment skipping
SINGLE_LINE_COMMENT: '--' ~[\r\n]* -> channel(HIDDEN) ;
MULTILINE_COMMENT : '/*' .*? ( '*/' | EOF ) -> channel(HIDDEN) ;
WS: (' ' ' '* | [ \n\t\r]+) -> channel(HIDDEN) ;
// Lexer rules
NUMBER: (K_PLUS | K_MINUS)? DIGIT DIGIT* (E (K_PLUS | K_MINUS)? DIGIT DIGIT*)? ;
FLOATING_NUMBER: (K_PLUS | K_MINUS)? DIGIT+ K_DOT DIGIT+ (E (K_PLUS | K_MINUS)? DIGIT DIGIT*)? ;
BOOL: K_TRUE | K_FALSE ;
K_COMMA: ',' ;
K_PLUS: '+' ;
K_MINUS: '-' ;
K_STAR: '*' ;
K_DIV: '/' ;
K_DOT: '.' ;
K_LIKE: L I K E ;
K_AND: A N D ;
K_OR: O R ;
K_LPAREN: '(' ;
K_LBRACKET: '[' ;
K_RPAREN: ')' ;
K_RBRACKET: ']' ;
K_LT: '<' | L T ;
K_LTE: '<=' | L T E ;
K_EQ: '=' | E Q ;
K_NEQ: '!=' | N E Q ;
K_GT: '>' | G T ;
K_GTE: '>=' | G T E ;
K_FROM: F R O M ;
K_WHERE: W H E R E ;
K_ORDERBY: O R D E R B Y ;
K_GROUPBY: G R O U P B Y ;
K_LIMIT: L I M I T ;
K_SELECT: S E L E C T ;
K_MAX: M A X ;
K_MIN: M I N ;
K_SUM: S U M ;
K_COUNT: C O U N T ;
K_LOOP: L O O P ;
K_OFFSET: O F F S E T ;
K_AS: A S ;
K_ISA: I S A ;
K_IS: I S ;
K_HAS: H A S ;
K_ASC: A S C ;
K_DESC: D E S C ;
K_WITHPATH: W I T H P A T H ;
K_TRUE: T R U E ;
K_FALSE: F A L S E ;
KEYWORD: K_LIKE
| K_DOT
| K_SELECT
| K_AS
| K_HAS
| K_IS
| K_ISA
| K_WHERE
| K_LIMIT
| K_TRUE
| K_FALSE
| K_AND
| K_OR
| K_GROUPBY
| K_ORDERBY
| K_WITHPATH
| K_SUM
| K_MIN
| K_MAX
| K_OFFSET
| K_LOOP
| K_FROM
| K_DESC
| K_ASC
| K_COUNT
;
ID: STRING
|LETTER (LETTER|DIGIT)*
| LETTER (LETTER|DIGIT)* KEYWORD KEYWORD*
| KEYWORD KEYWORD* LETTER (LETTER|DIGIT)*
| LETTER (LETTER|DIGIT)* KEYWORD KEYWORD* LETTER (LETTER|DIGIT)*
;
STRING: '"' ~('"')* '"' | '\'' ~('\'')* '\'' | '`' ~('`')* '`';
\ No newline at end of file
// Generated from AtlasDSLLexer.g4 by ANTLR 4.7
package org.apache.atlas.query.antlr4;
import org.antlr.v4.runtime.Lexer;
import org.antlr.v4.runtime.CharStream;
import org.antlr.v4.runtime.Token;
import org.antlr.v4.runtime.TokenStream;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.misc.*;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class AtlasDSLLexer extends Lexer {
static { RuntimeMetaData.checkVersion("4.7", RuntimeMetaData.VERSION); }
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
SINGLE_LINE_COMMENT=1, MULTILINE_COMMENT=2, WS=3, NUMBER=4, FLOATING_NUMBER=5,
BOOL=6, K_COMMA=7, K_PLUS=8, K_MINUS=9, K_STAR=10, K_DIV=11, K_DOT=12,
K_LIKE=13, K_AND=14, K_OR=15, K_LPAREN=16, K_LBRACKET=17, K_RPAREN=18,
K_RBRACKET=19, K_LT=20, K_LTE=21, K_EQ=22, K_NEQ=23, K_GT=24, K_GTE=25,
K_FROM=26, K_WHERE=27, K_ORDERBY=28, K_GROUPBY=29, K_LIMIT=30, K_SELECT=31,
K_MAX=32, K_MIN=33, K_SUM=34, K_COUNT=35, K_LOOP=36, K_OFFSET=37, K_AS=38,
K_ISA=39, K_IS=40, K_HAS=41, K_ASC=42, K_DESC=43, K_WITHPATH=44, K_TRUE=45,
K_FALSE=46, KEYWORD=47, ID=48, STRING=49;
public static String[] channelNames = {
"DEFAULT_TOKEN_CHANNEL", "HIDDEN"
};
public static String[] modeNames = {
"DEFAULT_MODE"
};
public static final String[] ruleNames = {
"A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N",
"O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z", "DIGIT", "LETTER",
"SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "WS", "NUMBER", "FLOATING_NUMBER",
"BOOL", "K_COMMA", "K_PLUS", "K_MINUS", "K_STAR", "K_DIV", "K_DOT", "K_LIKE",
"K_AND", "K_OR", "K_LPAREN", "K_LBRACKET", "K_RPAREN", "K_RBRACKET", "K_LT",
"K_LTE", "K_EQ", "K_NEQ", "K_GT", "K_GTE", "K_FROM", "K_WHERE", "K_ORDERBY",
"K_GROUPBY", "K_LIMIT", "K_SELECT", "K_MAX", "K_MIN", "K_SUM", "K_COUNT",
"K_LOOP", "K_OFFSET", "K_AS", "K_ISA", "K_IS", "K_HAS", "K_ASC", "K_DESC",
"K_WITHPATH", "K_TRUE", "K_FALSE", "KEYWORD", "ID", "STRING"
};
private static final String[] _LITERAL_NAMES = {
null, null, null, null, null, null, null, "','", "'+'", "'-'", "'*'",
"'/'", "'.'", null, null, null, "'('", "'['", "')'", "']'"
};
private static final String[] _SYMBOLIC_NAMES = {
null, "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "WS", "NUMBER", "FLOATING_NUMBER",
"BOOL", "K_COMMA", "K_PLUS", "K_MINUS", "K_STAR", "K_DIV", "K_DOT", "K_LIKE",
"K_AND", "K_OR", "K_LPAREN", "K_LBRACKET", "K_RPAREN", "K_RBRACKET", "K_LT",
"K_LTE", "K_EQ", "K_NEQ", "K_GT", "K_GTE", "K_FROM", "K_WHERE", "K_ORDERBY",
"K_GROUPBY", "K_LIMIT", "K_SELECT", "K_MAX", "K_MIN", "K_SUM", "K_COUNT",
"K_LOOP", "K_OFFSET", "K_AS", "K_ISA", "K_IS", "K_HAS", "K_ASC", "K_DESC",
"K_WITHPATH", "K_TRUE", "K_FALSE", "KEYWORD", "ID", "STRING"
};
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
/**
* @deprecated Use {@link #VOCABULARY} instead.
*/
@Deprecated
public static final String[] tokenNames;
static {
tokenNames = new String[_SYMBOLIC_NAMES.length];
for (int i = 0; i < tokenNames.length; i++) {
tokenNames[i] = VOCABULARY.getLiteralName(i);
if (tokenNames[i] == null) {
tokenNames[i] = VOCABULARY.getSymbolicName(i);
}
if (tokenNames[i] == null) {
tokenNames[i] = "<INVALID>";
}
}
}
@Override
@Deprecated
public String[] getTokenNames() {
return tokenNames;
}
@Override
public Vocabulary getVocabulary() {
return VOCABULARY;
}
public AtlasDSLLexer(CharStream input) {
super(input);
_interp = new LexerATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
@Override
public String getGrammarFileName() { return "AtlasDSLLexer.g4"; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public String getSerializedATN() { return _serializedATN; }
@Override
public String[] getChannelNames() { return channelNames; }
@Override
public String[] getModeNames() { return modeNames; }
@Override
public ATN getATN() { return _ATN; }
public static final String _serializedATN =
"\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\2\63\u026a\b\1\4\2"+
"\t\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4"+
"\13\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22"+
"\t\22\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31"+
"\t\31\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t"+
" \4!\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t"+
"+\4,\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\4\62\t\62\4\63\t\63\4\64"+
"\t\64\4\65\t\65\4\66\t\66\4\67\t\67\48\t8\49\t9\4:\t:\4;\t;\4<\t<\4=\t"+
"=\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4"+
"I\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6"+
"\3\6\3\7\3\7\3\b\3\b\3\t\3\t\3\n\3\n\3\13\3\13\3\f\3\f\3\r\3\r\3\16\3"+
"\16\3\17\3\17\3\20\3\20\3\21\3\21\3\22\3\22\3\23\3\23\3\24\3\24\3\25\3"+
"\25\3\26\3\26\3\27\3\27\3\30\3\30\3\31\3\31\3\32\3\32\3\33\3\33\3\34\3"+
"\34\3\35\3\35\3\36\3\36\3\36\3\36\7\36\u00da\n\36\f\36\16\36\u00dd\13"+
"\36\3\36\3\36\3\37\3\37\3\37\3\37\7\37\u00e5\n\37\f\37\16\37\u00e8\13"+
"\37\3\37\3\37\3\37\5\37\u00ed\n\37\3\37\3\37\3 \3 \7 \u00f3\n \f \16 "+
"\u00f6\13 \3 \6 \u00f9\n \r \16 \u00fa\5 \u00fd\n \3 \3 \3!\3!\5!\u0103"+
"\n!\3!\3!\7!\u0107\n!\f!\16!\u010a\13!\3!\3!\3!\5!\u010f\n!\3!\3!\7!\u0113"+
"\n!\f!\16!\u0116\13!\5!\u0118\n!\3\"\3\"\5\"\u011c\n\"\3\"\6\"\u011f\n"+
"\"\r\"\16\"\u0120\3\"\3\"\6\"\u0125\n\"\r\"\16\"\u0126\3\"\3\"\3\"\5\""+
"\u012c\n\"\3\"\3\"\7\"\u0130\n\"\f\"\16\"\u0133\13\"\5\"\u0135\n\"\3#"+
"\3#\5#\u0139\n#\3$\3$\3%\3%\3&\3&\3\'\3\'\3(\3(\3)\3)\3*\3*\3*\3*\3*\3"+
"+\3+\3+\3+\3,\3,\3,\3-\3-\3.\3.\3/\3/\3\60\3\60\3\61\3\61\3\61\3\61\5"+
"\61\u015f\n\61\3\62\3\62\3\62\3\62\3\62\3\62\5\62\u0167\n\62\3\63\3\63"+
"\3\63\3\63\5\63\u016d\n\63\3\64\3\64\3\64\3\64\3\64\3\64\5\64\u0175\n"+
"\64\3\65\3\65\3\65\3\65\5\65\u017b\n\65\3\66\3\66\3\66\3\66\3\66\3\66"+
"\5\66\u0183\n\66\3\67\3\67\3\67\3\67\3\67\38\38\38\38\38\38\39\39\39\3"+
"9\39\39\39\39\3:\3:\3:\3:\3:\3:\3:\3:\3;\3;\3;\3;\3;\3;\3<\3<\3<\3<\3"+
"<\3<\3<\3=\3=\3=\3=\3>\3>\3>\3>\3?\3?\3?\3?\3@\3@\3@\3@\3@\3@\3A\3A\3"+
"A\3A\3A\3B\3B\3B\3B\3B\3B\3B\3C\3C\3C\3D\3D\3D\3D\3E\3E\3E\3F\3F\3F\3"+
"F\3G\3G\3G\3G\3H\3H\3H\3H\3H\3I\3I\3I\3I\3I\3I\3I\3I\3I\3J\3J\3J\3J\3"+
"J\3K\3K\3K\3K\3K\3K\3L\3L\3L\3L\3L\3L\3L\3L\3L\3L\3L\3L\3L\3L\3L\3L\3"+
"L\3L\3L\3L\3L\3L\3L\3L\3L\5L\u020f\nL\3M\3M\3M\3M\7M\u0215\nM\fM\16M\u0218"+
"\13M\3M\3M\3M\7M\u021d\nM\fM\16M\u0220\13M\3M\3M\7M\u0224\nM\fM\16M\u0227"+
"\13M\3M\3M\7M\u022b\nM\fM\16M\u022e\13M\3M\3M\3M\7M\u0233\nM\fM\16M\u0236"+
"\13M\3M\3M\3M\7M\u023b\nM\fM\16M\u023e\13M\3M\3M\7M\u0242\nM\fM\16M\u0245"+
"\13M\3M\3M\3M\7M\u024a\nM\fM\16M\u024d\13M\5M\u024f\nM\3N\3N\7N\u0253"+
"\nN\fN\16N\u0256\13N\3N\3N\3N\7N\u025b\nN\fN\16N\u025e\13N\3N\3N\3N\7"+
"N\u0263\nN\fN\16N\u0266\13N\3N\5N\u0269\nN\3\u00e6\2O\3\2\5\2\7\2\t\2"+
"\13\2\r\2\17\2\21\2\23\2\25\2\27\2\31\2\33\2\35\2\37\2!\2#\2%\2\'\2)\2"+
"+\2-\2/\2\61\2\63\2\65\2\67\29\2;\3=\4?\5A\6C\7E\bG\tI\nK\13M\fO\rQ\16"+
"S\17U\20W\21Y\22[\23]\24_\25a\26c\27e\30g\31i\32k\33m\34o\35q\36s\37u"+
" w!y\"{#}$\177%\u0081&\u0083\'\u0085(\u0087)\u0089*\u008b+\u008d,\u008f"+
"-\u0091.\u0093/\u0095\60\u0097\61\u0099\62\u009b\63\3\2#\4\2CCcc\4\2D"+
"Ddd\4\2EEee\4\2FFff\4\2GGgg\4\2HHhh\4\2IIii\4\2JJjj\4\2KKkk\4\2LLll\4"+
"\2MMmm\4\2NNnn\4\2OOoo\4\2PPpp\4\2QQqq\4\2RRrr\4\2SSss\4\2TTtt\4\2UUu"+
"u\4\2VVvv\4\2WWww\4\2XXxx\4\2YYyy\4\2ZZzz\4\2[[{{\4\2\\\\||\3\2\62;\5"+
"\2C\\aac|\4\2\f\f\17\17\5\2\13\f\17\17\"\"\3\2$$\3\2))\3\2bb\2\u0297\2"+
";\3\2\2\2\2=\3\2\2\2\2?\3\2\2\2\2A\3\2\2\2\2C\3\2\2\2\2E\3\2\2\2\2G\3"+
"\2\2\2\2I\3\2\2\2\2K\3\2\2\2\2M\3\2\2\2\2O\3\2\2\2\2Q\3\2\2\2\2S\3\2\2"+
"\2\2U\3\2\2\2\2W\3\2\2\2\2Y\3\2\2\2\2[\3\2\2\2\2]\3\2\2\2\2_\3\2\2\2\2"+
"a\3\2\2\2\2c\3\2\2\2\2e\3\2\2\2\2g\3\2\2\2\2i\3\2\2\2\2k\3\2\2\2\2m\3"+
"\2\2\2\2o\3\2\2\2\2q\3\2\2\2\2s\3\2\2\2\2u\3\2\2\2\2w\3\2\2\2\2y\3\2\2"+
"\2\2{\3\2\2\2\2}\3\2\2\2\2\177\3\2\2\2\2\u0081\3\2\2\2\2\u0083\3\2\2\2"+
"\2\u0085\3\2\2\2\2\u0087\3\2\2\2\2\u0089\3\2\2\2\2\u008b\3\2\2\2\2\u008d"+
"\3\2\2\2\2\u008f\3\2\2\2\2\u0091\3\2\2\2\2\u0093\3\2\2\2\2\u0095\3\2\2"+
"\2\2\u0097\3\2\2\2\2\u0099\3\2\2\2\2\u009b\3\2\2\2\3\u009d\3\2\2\2\5\u009f"+
"\3\2\2\2\7\u00a1\3\2\2\2\t\u00a3\3\2\2\2\13\u00a5\3\2\2\2\r\u00a7\3\2"+
"\2\2\17\u00a9\3\2\2\2\21\u00ab\3\2\2\2\23\u00ad\3\2\2\2\25\u00af\3\2\2"+
"\2\27\u00b1\3\2\2\2\31\u00b3\3\2\2\2\33\u00b5\3\2\2\2\35\u00b7\3\2\2\2"+
"\37\u00b9\3\2\2\2!\u00bb\3\2\2\2#\u00bd\3\2\2\2%\u00bf\3\2\2\2\'\u00c1"+
"\3\2\2\2)\u00c3\3\2\2\2+\u00c5\3\2\2\2-\u00c7\3\2\2\2/\u00c9\3\2\2\2\61"+
"\u00cb\3\2\2\2\63\u00cd\3\2\2\2\65\u00cf\3\2\2\2\67\u00d1\3\2\2\29\u00d3"+
"\3\2\2\2;\u00d5\3\2\2\2=\u00e0\3\2\2\2?\u00fc\3\2\2\2A\u0102\3\2\2\2C"+
"\u011b\3\2\2\2E\u0138\3\2\2\2G\u013a\3\2\2\2I\u013c\3\2\2\2K\u013e\3\2"+
"\2\2M\u0140\3\2\2\2O\u0142\3\2\2\2Q\u0144\3\2\2\2S\u0146\3\2\2\2U\u014b"+
"\3\2\2\2W\u014f\3\2\2\2Y\u0152\3\2\2\2[\u0154\3\2\2\2]\u0156\3\2\2\2_"+
"\u0158\3\2\2\2a\u015e\3\2\2\2c\u0166\3\2\2\2e\u016c\3\2\2\2g\u0174\3\2"+
"\2\2i\u017a\3\2\2\2k\u0182\3\2\2\2m\u0184\3\2\2\2o\u0189\3\2\2\2q\u018f"+
"\3\2\2\2s\u0197\3\2\2\2u\u019f\3\2\2\2w\u01a5\3\2\2\2y\u01ac\3\2\2\2{"+
"\u01b0\3\2\2\2}\u01b4\3\2\2\2\177\u01b8\3\2\2\2\u0081\u01be\3\2\2\2\u0083"+
"\u01c3\3\2\2\2\u0085\u01ca\3\2\2\2\u0087\u01cd\3\2\2\2\u0089\u01d1\3\2"+
"\2\2\u008b\u01d4\3\2\2\2\u008d\u01d8\3\2\2\2\u008f\u01dc\3\2\2\2\u0091"+
"\u01e1\3\2\2\2\u0093\u01ea\3\2\2\2\u0095\u01ef\3\2\2\2\u0097\u020e\3\2"+
"\2\2\u0099\u024e\3\2\2\2\u009b\u0268\3\2\2\2\u009d\u009e\t\2\2\2\u009e"+
"\4\3\2\2\2\u009f\u00a0\t\3\2\2\u00a0\6\3\2\2\2\u00a1\u00a2\t\4\2\2\u00a2"+
"\b\3\2\2\2\u00a3\u00a4\t\5\2\2\u00a4\n\3\2\2\2\u00a5\u00a6\t\6\2\2\u00a6"+
"\f\3\2\2\2\u00a7\u00a8\t\7\2\2\u00a8\16\3\2\2\2\u00a9\u00aa\t\b\2\2\u00aa"+
"\20\3\2\2\2\u00ab\u00ac\t\t\2\2\u00ac\22\3\2\2\2\u00ad\u00ae\t\n\2\2\u00ae"+
"\24\3\2\2\2\u00af\u00b0\t\13\2\2\u00b0\26\3\2\2\2\u00b1\u00b2\t\f\2\2"+
"\u00b2\30\3\2\2\2\u00b3\u00b4\t\r\2\2\u00b4\32\3\2\2\2\u00b5\u00b6\t\16"+
"\2\2\u00b6\34\3\2\2\2\u00b7\u00b8\t\17\2\2\u00b8\36\3\2\2\2\u00b9\u00ba"+
"\t\20\2\2\u00ba \3\2\2\2\u00bb\u00bc\t\21\2\2\u00bc\"\3\2\2\2\u00bd\u00be"+
"\t\22\2\2\u00be$\3\2\2\2\u00bf\u00c0\t\23\2\2\u00c0&\3\2\2\2\u00c1\u00c2"+
"\t\24\2\2\u00c2(\3\2\2\2\u00c3\u00c4\t\25\2\2\u00c4*\3\2\2\2\u00c5\u00c6"+
"\t\26\2\2\u00c6,\3\2\2\2\u00c7\u00c8\t\27\2\2\u00c8.\3\2\2\2\u00c9\u00ca"+
"\t\30\2\2\u00ca\60\3\2\2\2\u00cb\u00cc\t\31\2\2\u00cc\62\3\2\2\2\u00cd"+
"\u00ce\t\32\2\2\u00ce\64\3\2\2\2\u00cf\u00d0\t\33\2\2\u00d0\66\3\2\2\2"+
"\u00d1\u00d2\t\34\2\2\u00d28\3\2\2\2\u00d3\u00d4\t\35\2\2\u00d4:\3\2\2"+
"\2\u00d5\u00d6\7/\2\2\u00d6\u00d7\7/\2\2\u00d7\u00db\3\2\2\2\u00d8\u00da"+
"\n\36\2\2\u00d9\u00d8\3\2\2\2\u00da\u00dd\3\2\2\2\u00db\u00d9\3\2\2\2"+
"\u00db\u00dc\3\2\2\2\u00dc\u00de\3\2\2\2\u00dd\u00db\3\2\2\2\u00de\u00df"+
"\b\36\2\2\u00df<\3\2\2\2\u00e0\u00e1\7\61\2\2\u00e1\u00e2\7,\2\2\u00e2"+
"\u00e6\3\2\2\2\u00e3\u00e5\13\2\2\2\u00e4\u00e3\3\2\2\2\u00e5\u00e8\3"+
"\2\2\2\u00e6\u00e7\3\2\2\2\u00e6\u00e4\3\2\2\2\u00e7\u00ec\3\2\2\2\u00e8"+
"\u00e6\3\2\2\2\u00e9\u00ea\7,\2\2\u00ea\u00ed\7\61\2\2\u00eb\u00ed\7\2"+
"\2\3\u00ec\u00e9\3\2\2\2\u00ec\u00eb\3\2\2\2\u00ed\u00ee\3\2\2\2\u00ee"+
"\u00ef\b\37\2\2\u00ef>\3\2\2\2\u00f0\u00f4\7\"\2\2\u00f1\u00f3\7\"\2\2"+
"\u00f2\u00f1\3\2\2\2\u00f3\u00f6\3\2\2\2\u00f4\u00f2\3\2\2\2\u00f4\u00f5"+
"\3\2\2\2\u00f5\u00fd\3\2\2\2\u00f6\u00f4\3\2\2\2\u00f7\u00f9\t\37\2\2"+
"\u00f8\u00f7\3\2\2\2\u00f9\u00fa\3\2\2\2\u00fa\u00f8\3\2\2\2\u00fa\u00fb"+
"\3\2\2\2\u00fb\u00fd\3\2\2\2\u00fc\u00f0\3\2\2\2\u00fc\u00f8\3\2\2\2\u00fd"+
"\u00fe\3\2\2\2\u00fe\u00ff\b \2\2\u00ff@\3\2\2\2\u0100\u0103\5I%\2\u0101"+
"\u0103\5K&\2\u0102\u0100\3\2\2\2\u0102\u0101\3\2\2\2\u0102\u0103\3\2\2"+
"\2\u0103\u0104\3\2\2\2\u0104\u0108\5\67\34\2\u0105\u0107\5\67\34\2\u0106"+
"\u0105\3\2\2\2\u0107\u010a\3\2\2\2\u0108\u0106\3\2\2\2\u0108\u0109\3\2"+
"\2\2\u0109\u0117\3\2\2\2\u010a\u0108\3\2\2\2\u010b\u010e\5\13\6\2\u010c"+
"\u010f\5I%\2\u010d\u010f\5K&\2\u010e\u010c\3\2\2\2\u010e\u010d\3\2\2\2"+
"\u010e\u010f\3\2\2\2\u010f\u0110\3\2\2\2\u0110\u0114\5\67\34\2\u0111\u0113"+
"\5\67\34\2\u0112\u0111\3\2\2\2\u0113\u0116\3\2\2\2\u0114\u0112\3\2\2\2"+
"\u0114\u0115\3\2\2\2\u0115\u0118\3\2\2\2\u0116\u0114\3\2\2\2\u0117\u010b"+
"\3\2\2\2\u0117\u0118\3\2\2\2\u0118B\3\2\2\2\u0119\u011c\5I%\2\u011a\u011c"+
"\5K&\2\u011b\u0119\3\2\2\2\u011b\u011a\3\2\2\2\u011b\u011c\3\2\2\2\u011c"+
"\u011e\3\2\2\2\u011d\u011f\5\67\34\2\u011e\u011d\3\2\2\2\u011f\u0120\3"+
"\2\2\2\u0120\u011e\3\2\2\2\u0120\u0121\3\2\2\2\u0121\u0122\3\2\2\2\u0122"+
"\u0124\5Q)\2\u0123\u0125\5\67\34\2\u0124\u0123\3\2\2\2\u0125\u0126\3\2"+
"\2\2\u0126\u0124\3\2\2\2\u0126\u0127\3\2\2\2\u0127\u0134\3\2\2\2\u0128"+
"\u012b\5\13\6\2\u0129\u012c\5I%\2\u012a\u012c\5K&\2\u012b\u0129\3\2\2"+
"\2\u012b\u012a\3\2\2\2\u012b\u012c\3\2\2\2\u012c\u012d\3\2\2\2\u012d\u0131"+
"\5\67\34\2\u012e\u0130\5\67\34\2\u012f\u012e\3\2\2\2\u0130\u0133\3\2\2"+
"\2\u0131\u012f\3\2\2\2\u0131\u0132\3\2\2\2\u0132\u0135\3\2\2\2\u0133\u0131"+
"\3\2\2\2\u0134\u0128\3\2\2\2\u0134\u0135\3\2\2\2\u0135D\3\2\2\2\u0136"+
"\u0139\5\u0093J\2\u0137\u0139\5\u0095K\2\u0138\u0136\3\2\2\2\u0138\u0137"+
"\3\2\2\2\u0139F\3\2\2\2\u013a\u013b\7.\2\2\u013bH\3\2\2\2\u013c\u013d"+
"\7-\2\2\u013dJ\3\2\2\2\u013e\u013f\7/\2\2\u013fL\3\2\2\2\u0140\u0141\7"+
",\2\2\u0141N\3\2\2\2\u0142\u0143\7\61\2\2\u0143P\3\2\2\2\u0144\u0145\7"+
"\60\2\2\u0145R\3\2\2\2\u0146\u0147\5\31\r\2\u0147\u0148\5\23\n\2\u0148"+
"\u0149\5\27\f\2\u0149\u014a\5\13\6\2\u014aT\3\2\2\2\u014b\u014c\5\3\2"+
"\2\u014c\u014d\5\35\17\2\u014d\u014e\5\t\5\2\u014eV\3\2\2\2\u014f\u0150"+
"\5\37\20\2\u0150\u0151\5%\23\2\u0151X\3\2\2\2\u0152\u0153\7*\2\2\u0153"+
"Z\3\2\2\2\u0154\u0155\7]\2\2\u0155\\\3\2\2\2\u0156\u0157\7+\2\2\u0157"+
"^\3\2\2\2\u0158\u0159\7_\2\2\u0159`\3\2\2\2\u015a\u015f\7>\2\2\u015b\u015c"+
"\5\31\r\2\u015c\u015d\5)\25\2\u015d\u015f\3\2\2\2\u015e\u015a\3\2\2\2"+
"\u015e\u015b\3\2\2\2\u015fb\3\2\2\2\u0160\u0161\7>\2\2\u0161\u0167\7?"+
"\2\2\u0162\u0163\5\31\r\2\u0163\u0164\5)\25\2\u0164\u0165\5\13\6\2\u0165"+
"\u0167\3\2\2\2\u0166\u0160\3\2\2\2\u0166\u0162\3\2\2\2\u0167d\3\2\2\2"+
"\u0168\u016d\7?\2\2\u0169\u016a\5\13\6\2\u016a\u016b\5#\22\2\u016b\u016d"+
"\3\2\2\2\u016c\u0168\3\2\2\2\u016c\u0169\3\2\2\2\u016df\3\2\2\2\u016e"+
"\u016f\7#\2\2\u016f\u0175\7?\2\2\u0170\u0171\5\35\17\2\u0171\u0172\5\13"+
"\6\2\u0172\u0173\5#\22\2\u0173\u0175\3\2\2\2\u0174\u016e\3\2\2\2\u0174"+
"\u0170\3\2\2\2\u0175h\3\2\2\2\u0176\u017b\7@\2\2\u0177\u0178\5\17\b\2"+
"\u0178\u0179\5)\25\2\u0179\u017b\3\2\2\2\u017a\u0176\3\2\2\2\u017a\u0177"+
"\3\2\2\2\u017bj\3\2\2\2\u017c\u017d\7@\2\2\u017d\u0183\7?\2\2\u017e\u017f"+
"\5\17\b\2\u017f\u0180\5)\25\2\u0180\u0181\5\13\6\2\u0181\u0183\3\2\2\2"+
"\u0182\u017c\3\2\2\2\u0182\u017e\3\2\2\2\u0183l\3\2\2\2\u0184\u0185\5"+
"\r\7\2\u0185\u0186\5%\23\2\u0186\u0187\5\37\20\2\u0187\u0188\5\33\16\2"+
"\u0188n\3\2\2\2\u0189\u018a\5/\30\2\u018a\u018b\5\21\t\2\u018b\u018c\5"+
"\13\6\2\u018c\u018d\5%\23\2\u018d\u018e\5\13\6\2\u018ep\3\2\2\2\u018f"+
"\u0190\5\37\20\2\u0190\u0191\5%\23\2\u0191\u0192\5\t\5\2\u0192\u0193\5"+
"\13\6\2\u0193\u0194\5%\23\2\u0194\u0195\5\5\3\2\u0195\u0196\5\63\32\2"+
"\u0196r\3\2\2\2\u0197\u0198\5\17\b\2\u0198\u0199\5%\23\2\u0199\u019a\5"+
"\37\20\2\u019a\u019b\5+\26\2\u019b\u019c\5!\21\2\u019c\u019d\5\5\3\2\u019d"+
"\u019e\5\63\32\2\u019et\3\2\2\2\u019f\u01a0\5\31\r\2\u01a0\u01a1\5\23"+
"\n\2\u01a1\u01a2\5\33\16\2\u01a2\u01a3\5\23\n\2\u01a3\u01a4\5)\25\2\u01a4"+
"v\3\2\2\2\u01a5\u01a6\5\'\24\2\u01a6\u01a7\5\13\6\2\u01a7\u01a8\5\31\r"+
"\2\u01a8\u01a9\5\13\6\2\u01a9\u01aa\5\7\4\2\u01aa\u01ab\5)\25\2\u01ab"+
"x\3\2\2\2\u01ac\u01ad\5\33\16\2\u01ad\u01ae\5\3\2\2\u01ae\u01af\5\61\31"+
"\2\u01afz\3\2\2\2\u01b0\u01b1\5\33\16\2\u01b1\u01b2\5\23\n\2\u01b2\u01b3"+
"\5\35\17\2\u01b3|\3\2\2\2\u01b4\u01b5\5\'\24\2\u01b5\u01b6\5+\26\2\u01b6"+
"\u01b7\5\33\16\2\u01b7~\3\2\2\2\u01b8\u01b9\5\7\4\2\u01b9\u01ba\5\37\20"+
"\2\u01ba\u01bb\5+\26\2\u01bb\u01bc\5\35\17\2\u01bc\u01bd\5)\25\2\u01bd"+
"\u0080\3\2\2\2\u01be\u01bf\5\31\r\2\u01bf\u01c0\5\37\20\2\u01c0\u01c1"+
"\5\37\20\2\u01c1\u01c2\5!\21\2\u01c2\u0082\3\2\2\2\u01c3\u01c4\5\37\20"+
"\2\u01c4\u01c5\5\r\7\2\u01c5\u01c6\5\r\7\2\u01c6\u01c7\5\'\24\2\u01c7"+
"\u01c8\5\13\6\2\u01c8\u01c9\5)\25\2\u01c9\u0084\3\2\2\2\u01ca\u01cb\5"+
"\3\2\2\u01cb\u01cc\5\'\24\2\u01cc\u0086\3\2\2\2\u01cd\u01ce\5\23\n\2\u01ce"+
"\u01cf\5\'\24\2\u01cf\u01d0\5\3\2\2\u01d0\u0088\3\2\2\2\u01d1\u01d2\5"+
"\23\n\2\u01d2\u01d3\5\'\24\2\u01d3\u008a\3\2\2\2\u01d4\u01d5\5\21\t\2"+
"\u01d5\u01d6\5\3\2\2\u01d6\u01d7\5\'\24\2\u01d7\u008c\3\2\2\2\u01d8\u01d9"+
"\5\3\2\2\u01d9\u01da\5\'\24\2\u01da\u01db\5\7\4\2\u01db\u008e\3\2\2\2"+
"\u01dc\u01dd\5\t\5\2\u01dd\u01de\5\13\6\2\u01de\u01df\5\'\24\2\u01df\u01e0"+
"\5\7\4\2\u01e0\u0090\3\2\2\2\u01e1\u01e2\5/\30\2\u01e2\u01e3\5\23\n\2"+
"\u01e3\u01e4\5)\25\2\u01e4\u01e5\5\21\t\2\u01e5\u01e6\5!\21\2\u01e6\u01e7"+
"\5\3\2\2\u01e7\u01e8\5)\25\2\u01e8\u01e9\5\21\t\2\u01e9\u0092\3\2\2\2"+
"\u01ea\u01eb\5)\25\2\u01eb\u01ec\5%\23\2\u01ec\u01ed\5+\26\2\u01ed\u01ee"+
"\5\13\6\2\u01ee\u0094\3\2\2\2\u01ef\u01f0\5\r\7\2\u01f0\u01f1\5\3\2\2"+
"\u01f1\u01f2\5\31\r\2\u01f2\u01f3\5\'\24\2\u01f3\u01f4\5\13\6\2\u01f4"+
"\u0096\3\2\2\2\u01f5\u020f\5S*\2\u01f6\u020f\5Q)\2\u01f7\u020f\5w<\2\u01f8"+
"\u020f\5\u0085C\2\u01f9\u020f\5\u008bF\2\u01fa\u020f\5\u0089E\2\u01fb"+
"\u020f\5\u0087D\2\u01fc\u020f\5o8\2\u01fd\u020f\5u;\2\u01fe\u020f\5\u0093"+
"J\2\u01ff\u020f\5\u0095K\2\u0200\u020f\5U+\2\u0201\u020f\5W,\2\u0202\u020f"+
"\5s:\2\u0203\u020f\5q9\2\u0204\u020f\5\u0091I\2\u0205\u020f\5}?\2\u0206"+
"\u020f\5{>\2\u0207\u020f\5y=\2\u0208\u020f\5\u0083B\2\u0209\u020f\5\u0081"+
"A\2\u020a\u020f\5m\67\2\u020b\u020f\5\u008fH\2\u020c\u020f\5\u008dG\2"+
"\u020d\u020f\5\177@\2\u020e\u01f5\3\2\2\2\u020e\u01f6\3\2\2\2\u020e\u01f7"+
"\3\2\2\2\u020e\u01f8\3\2\2\2\u020e\u01f9\3\2\2\2\u020e\u01fa\3\2\2\2\u020e"+
"\u01fb\3\2\2\2\u020e\u01fc\3\2\2\2\u020e\u01fd\3\2\2\2\u020e\u01fe\3\2"+
"\2\2\u020e\u01ff\3\2\2\2\u020e\u0200\3\2\2\2\u020e\u0201\3\2\2\2\u020e"+
"\u0202\3\2\2\2\u020e\u0203\3\2\2\2\u020e\u0204\3\2\2\2\u020e\u0205\3\2"+
"\2\2\u020e\u0206\3\2\2\2\u020e\u0207\3\2\2\2\u020e\u0208\3\2\2\2\u020e"+
"\u0209\3\2\2\2\u020e\u020a\3\2\2\2\u020e\u020b\3\2\2\2\u020e\u020c\3\2"+
"\2\2\u020e\u020d\3\2\2\2\u020f\u0098\3\2\2\2\u0210\u024f\5\u009bN\2\u0211"+
"\u0216\59\35\2\u0212\u0215\59\35\2\u0213\u0215\5\67\34\2\u0214\u0212\3"+
"\2\2\2\u0214\u0213\3\2\2\2\u0215\u0218\3\2\2\2\u0216\u0214\3\2\2\2\u0216"+
"\u0217\3\2\2\2\u0217\u024f\3\2\2\2\u0218\u0216\3\2\2\2\u0219\u021e\59"+
"\35\2\u021a\u021d\59\35\2\u021b\u021d\5\67\34\2\u021c\u021a\3\2\2\2\u021c"+
"\u021b\3\2\2\2\u021d\u0220\3\2\2\2\u021e\u021c\3\2\2\2\u021e\u021f\3\2"+
"\2\2\u021f\u0221\3\2\2\2\u0220\u021e\3\2\2\2\u0221\u0225\5\u0097L\2\u0222"+
"\u0224\5\u0097L\2\u0223\u0222\3\2\2\2\u0224\u0227\3\2\2\2\u0225\u0223"+
"\3\2\2\2\u0225\u0226\3\2\2\2\u0226\u024f\3\2\2\2\u0227\u0225\3\2\2\2\u0228"+
"\u022c\5\u0097L\2\u0229\u022b\5\u0097L\2\u022a\u0229\3\2\2\2\u022b\u022e"+
"\3\2\2\2\u022c\u022a\3\2\2\2\u022c\u022d\3\2\2\2\u022d\u022f\3\2\2\2\u022e"+
"\u022c\3\2\2\2\u022f\u0234\59\35\2\u0230\u0233\59\35\2\u0231\u0233\5\67"+
"\34\2\u0232\u0230\3\2\2\2\u0232\u0231\3\2\2\2\u0233\u0236\3\2\2\2\u0234"+
"\u0232\3\2\2\2\u0234\u0235\3\2\2\2\u0235\u024f\3\2\2\2\u0236\u0234\3\2"+
"\2\2\u0237\u023c\59\35\2\u0238\u023b\59\35\2\u0239\u023b\5\67\34\2\u023a"+
"\u0238\3\2\2\2\u023a\u0239\3\2\2\2\u023b\u023e\3\2\2\2\u023c\u023a\3\2"+
"\2\2\u023c\u023d\3\2\2\2\u023d\u023f\3\2\2\2\u023e\u023c\3\2\2\2\u023f"+
"\u0243\5\u0097L\2\u0240\u0242\5\u0097L\2\u0241\u0240\3\2\2\2\u0242\u0245"+
"\3\2\2\2\u0243\u0241\3\2\2\2\u0243\u0244\3\2\2\2\u0244\u0246\3\2\2\2\u0245"+
"\u0243\3\2\2\2\u0246\u024b\59\35\2\u0247\u024a\59\35\2\u0248\u024a\5\67"+
"\34\2\u0249\u0247\3\2\2\2\u0249\u0248\3\2\2\2\u024a\u024d\3\2\2\2\u024b"+
"\u0249\3\2\2\2\u024b\u024c\3\2\2\2\u024c\u024f\3\2\2\2\u024d\u024b\3\2"+
"\2\2\u024e\u0210\3\2\2\2\u024e\u0211\3\2\2\2\u024e\u0219\3\2\2\2\u024e"+
"\u0228\3\2\2\2\u024e\u0237\3\2\2\2\u024f\u009a\3\2\2\2\u0250\u0254\7$"+
"\2\2\u0251\u0253\n \2\2\u0252\u0251\3\2\2\2\u0253\u0256\3\2\2\2\u0254"+
"\u0252\3\2\2\2\u0254\u0255\3\2\2\2\u0255\u0257\3\2\2\2\u0256\u0254\3\2"+
"\2\2\u0257\u0269\7$\2\2\u0258\u025c\7)\2\2\u0259\u025b\n!\2\2\u025a\u0259"+
"\3\2\2\2\u025b\u025e\3\2\2\2\u025c\u025a\3\2\2\2\u025c\u025d\3\2\2\2\u025d"+
"\u025f\3\2\2\2\u025e\u025c\3\2\2\2\u025f\u0269\7)\2\2\u0260\u0264\7b\2"+
"\2\u0261\u0263\n\"\2\2\u0262\u0261\3\2\2\2\u0263\u0266\3\2\2\2\u0264\u0262"+
"\3\2\2\2\u0264\u0265\3\2\2\2\u0265\u0267\3\2\2\2\u0266\u0264\3\2\2\2\u0267"+
"\u0269\7b\2\2\u0268\u0250\3\2\2\2\u0268\u0258\3\2\2\2\u0268\u0260\3\2"+
"\2\2\u0269\u009c\3\2\2\2.\2\u00db\u00e6\u00ec\u00f4\u00fa\u00fc\u0102"+
"\u0108\u010e\u0114\u0117\u011b\u0120\u0126\u012b\u0131\u0134\u0138\u015e"+
"\u0166\u016c\u0174\u017a\u0182\u020e\u0214\u0216\u021c\u021e\u0225\u022c"+
"\u0232\u0234\u023a\u023c\u0243\u0249\u024b\u024e\u0254\u025c\u0264\u0268"+
"\3\2\3\2";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
}
}
\ No newline at end of file
SINGLE_LINE_COMMENT=1
MULTILINE_COMMENT=2
WS=3
NUMBER=4
FLOATING_NUMBER=5
BOOL=6
K_COMMA=7
K_PLUS=8
K_MINUS=9
K_STAR=10
K_DIV=11
K_DOT=12
K_LIKE=13
K_AND=14
K_OR=15
K_LPAREN=16
K_LBRACKET=17
K_RPAREN=18
K_RBRACKET=19
K_LT=20
K_LTE=21
K_EQ=22
K_NEQ=23
K_GT=24
K_GTE=25
K_FROM=26
K_WHERE=27
K_ORDERBY=28
K_GROUPBY=29
K_LIMIT=30
K_SELECT=31
K_MAX=32
K_MIN=33
K_SUM=34
K_COUNT=35
K_LOOP=36
K_OFFSET=37
K_AS=38
K_ISA=39
K_IS=40
K_HAS=41
K_ASC=42
K_DESC=43
K_WITHPATH=44
K_TRUE=45
K_FALSE=46
KEYWORD=47
ID=48
STRING=49
','=7
'+'=8
'-'=9
'*'=10
'/'=11
'.'=12
'('=16
'['=17
')'=18
']'=19
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
parser grammar AtlasDSLParser;
options { tokenVocab=AtlasDSLLexer; }
// Start of rules, bottom-up (rules at the end are built using the core rules)
// Core rules
identifier: ID ;
operator: (K_LT | K_LTE | K_EQ | K_NEQ | K_GT | K_GTE | K_LIKE) ;
sortOrder: K_ASC | K_DESC ;
valueArray: K_LBRACKET STRING (K_COMMA STRING)* K_RBRACKET ;
literal: BOOL | NUMBER | FLOATING_NUMBER | (STRING | valueArray) ;
// Composite rules
limitClause: K_LIMIT NUMBER ;
offsetClause: K_OFFSET NUMBER ;
atomE: (identifier | literal) | K_LPAREN expr K_RPAREN ;
multiERight: (K_STAR | K_DIV) atomE ;
multiE: atomE multiERight* ;
arithERight: (K_PLUS | K_MINUS) multiE ;
arithE: multiE arithERight* ;
comparisonClause: arithE operator arithE ;
isClause: arithE (K_ISA | K_IS) identifier ;
hasClause: arithE K_HAS identifier ;
countClause: K_COUNT K_LPAREN K_RPAREN ;
maxClause: K_MAX K_LPAREN expr K_RPAREN ;
minClause: K_MIN K_LPAREN expr K_RPAREN ;
sumClause: K_SUM K_LPAREN expr K_RPAREN ;
exprRight: (K_AND | K_OR) compE ;
compE: comparisonClause
| isClause
| hasClause
| arithE
| countClause
| maxClause
| minClause
| sumClause
;
expr: compE exprRight* ;
limitOffset: limitClause offsetClause? ;
selectExpression: expr (K_AS identifier)? ;
selectExpr: selectExpression (K_COMMA selectExpression)* ;
aliasExpr: (identifier | literal) K_AS identifier ;
orderByExpr: K_ORDERBY expr sortOrder? ;
fromSrc: aliasExpr | (identifier | literal) ;
whereClause: K_WHERE expr ;
fromExpression: fromSrc whereClause? ;
fromClause: K_FROM fromExpression ;
selectClause: K_SELECT selectExpr ;
singleQrySrc: fromClause | whereClause | fromExpression | expr ;
loopExpression: K_LOOP K_LPAREN query K_RPAREN NUMBER? (K_AS identifier)? ;
groupByExpression: K_GROUPBY K_LPAREN selectExpr K_RPAREN ;
commaDelimitedQueries: singleQrySrc (K_COMMA singleQrySrc)* ;
spaceDelimitedQueries: singleQrySrc singleQrySrc* ;
querySrc: commaDelimitedQueries | spaceDelimitedQueries ;
query: querySrc loopExpression?
groupByExpression?
selectClause?
orderByExpr?
limitOffset? ;
queryWithPath: query (K_WITHPATH)? ;
// Generated from AtlasDSLParser.g4 by ANTLR 4.7
package org.apache.atlas.query.antlr4;
import org.antlr.v4.runtime.atn.*;
import org.antlr.v4.runtime.dfa.DFA;
import org.antlr.v4.runtime.*;
import org.antlr.v4.runtime.misc.*;
import org.antlr.v4.runtime.tree.*;
import java.util.List;
import java.util.Iterator;
import java.util.ArrayList;
@SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"})
public class AtlasDSLParser extends Parser {
static { RuntimeMetaData.checkVersion("4.7", RuntimeMetaData.VERSION); }
protected static final DFA[] _decisionToDFA;
protected static final PredictionContextCache _sharedContextCache =
new PredictionContextCache();
public static final int
SINGLE_LINE_COMMENT=1, MULTILINE_COMMENT=2, WS=3, NUMBER=4, FLOATING_NUMBER=5,
BOOL=6, K_COMMA=7, K_PLUS=8, K_MINUS=9, K_STAR=10, K_DIV=11, K_DOT=12,
K_LIKE=13, K_AND=14, K_OR=15, K_LPAREN=16, K_LBRACKET=17, K_RPAREN=18,
K_RBRACKET=19, K_LT=20, K_LTE=21, K_EQ=22, K_NEQ=23, K_GT=24, K_GTE=25,
K_FROM=26, K_WHERE=27, K_ORDERBY=28, K_GROUPBY=29, K_LIMIT=30, K_SELECT=31,
K_MAX=32, K_MIN=33, K_SUM=34, K_COUNT=35, K_LOOP=36, K_OFFSET=37, K_AS=38,
K_ISA=39, K_IS=40, K_HAS=41, K_ASC=42, K_DESC=43, K_WITHPATH=44, K_TRUE=45,
K_FALSE=46, KEYWORD=47, ID=48, STRING=49;
public static final int
RULE_identifier = 0, RULE_operator = 1, RULE_sortOrder = 2, RULE_valueArray = 3,
RULE_literal = 4, RULE_limitClause = 5, RULE_offsetClause = 6, RULE_atomE = 7,
RULE_multiERight = 8, RULE_multiE = 9, RULE_arithERight = 10, RULE_arithE = 11,
RULE_comparisonClause = 12, RULE_isClause = 13, RULE_hasClause = 14, RULE_countClause = 15,
RULE_maxClause = 16, RULE_minClause = 17, RULE_sumClause = 18, RULE_exprRight = 19,
RULE_compE = 20, RULE_expr = 21, RULE_limitOffset = 22, RULE_selectExpression = 23,
RULE_selectExpr = 24, RULE_aliasExpr = 25, RULE_orderByExpr = 26, RULE_fromSrc = 27,
RULE_whereClause = 28, RULE_fromExpression = 29, RULE_fromClause = 30,
RULE_selectClause = 31, RULE_singleQrySrc = 32, RULE_loopExpression = 33,
RULE_groupByExpression = 34, RULE_commaDelimitedQueries = 35, RULE_spaceDelimitedQueries = 36,
RULE_querySrc = 37, RULE_query = 38, RULE_queryWithPath = 39;
public static final String[] ruleNames = {
"identifier", "operator", "sortOrder", "valueArray", "literal", "limitClause",
"offsetClause", "atomE", "multiERight", "multiE", "arithERight", "arithE",
"comparisonClause", "isClause", "hasClause", "countClause", "maxClause",
"minClause", "sumClause", "exprRight", "compE", "expr", "limitOffset",
"selectExpression", "selectExpr", "aliasExpr", "orderByExpr", "fromSrc",
"whereClause", "fromExpression", "fromClause", "selectClause", "singleQrySrc",
"loopExpression", "groupByExpression", "commaDelimitedQueries", "spaceDelimitedQueries",
"querySrc", "query", "queryWithPath"
};
private static final String[] _LITERAL_NAMES = {
null, null, null, null, null, null, null, "','", "'+'", "'-'", "'*'",
"'/'", "'.'", null, null, null, "'('", "'['", "')'", "']'"
};
private static final String[] _SYMBOLIC_NAMES = {
null, "SINGLE_LINE_COMMENT", "MULTILINE_COMMENT", "WS", "NUMBER", "FLOATING_NUMBER",
"BOOL", "K_COMMA", "K_PLUS", "K_MINUS", "K_STAR", "K_DIV", "K_DOT", "K_LIKE",
"K_AND", "K_OR", "K_LPAREN", "K_LBRACKET", "K_RPAREN", "K_RBRACKET", "K_LT",
"K_LTE", "K_EQ", "K_NEQ", "K_GT", "K_GTE", "K_FROM", "K_WHERE", "K_ORDERBY",
"K_GROUPBY", "K_LIMIT", "K_SELECT", "K_MAX", "K_MIN", "K_SUM", "K_COUNT",
"K_LOOP", "K_OFFSET", "K_AS", "K_ISA", "K_IS", "K_HAS", "K_ASC", "K_DESC",
"K_WITHPATH", "K_TRUE", "K_FALSE", "KEYWORD", "ID", "STRING"
};
public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES);
/**
* @deprecated Use {@link #VOCABULARY} instead.
*/
@Deprecated
public static final String[] tokenNames;
static {
tokenNames = new String[_SYMBOLIC_NAMES.length];
for (int i = 0; i < tokenNames.length; i++) {
tokenNames[i] = VOCABULARY.getLiteralName(i);
if (tokenNames[i] == null) {
tokenNames[i] = VOCABULARY.getSymbolicName(i);
}
if (tokenNames[i] == null) {
tokenNames[i] = "<INVALID>";
}
}
}
@Override
@Deprecated
public String[] getTokenNames() {
return tokenNames;
}
@Override
public Vocabulary getVocabulary() {
return VOCABULARY;
}
@Override
public String getGrammarFileName() { return "AtlasDSLParser.g4"; }
@Override
public String[] getRuleNames() { return ruleNames; }
@Override
public String getSerializedATN() { return _serializedATN; }
@Override
public ATN getATN() { return _ATN; }
public AtlasDSLParser(TokenStream input) {
super(input);
_interp = new ParserATNSimulator(this,_ATN,_decisionToDFA,_sharedContextCache);
}
public static class IdentifierContext extends ParserRuleContext {
public TerminalNode ID() { return getToken(AtlasDSLParser.ID, 0); }
public IdentifierContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_identifier; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitIdentifier(this);
else return visitor.visitChildren(this);
}
}
public final IdentifierContext identifier() throws RecognitionException {
IdentifierContext _localctx = new IdentifierContext(_ctx, getState());
enterRule(_localctx, 0, RULE_identifier);
try {
enterOuterAlt(_localctx, 1);
{
setState(80);
match(ID);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class OperatorContext extends ParserRuleContext {
public TerminalNode K_LT() { return getToken(AtlasDSLParser.K_LT, 0); }
public TerminalNode K_LTE() { return getToken(AtlasDSLParser.K_LTE, 0); }
public TerminalNode K_EQ() { return getToken(AtlasDSLParser.K_EQ, 0); }
public TerminalNode K_NEQ() { return getToken(AtlasDSLParser.K_NEQ, 0); }
public TerminalNode K_GT() { return getToken(AtlasDSLParser.K_GT, 0); }
public TerminalNode K_GTE() { return getToken(AtlasDSLParser.K_GTE, 0); }
public TerminalNode K_LIKE() { return getToken(AtlasDSLParser.K_LIKE, 0); }
public OperatorContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_operator; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitOperator(this);
else return visitor.visitChildren(this);
}
}
public final OperatorContext operator() throws RecognitionException {
OperatorContext _localctx = new OperatorContext(_ctx, getState());
enterRule(_localctx, 2, RULE_operator);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(82);
_la = _input.LA(1);
if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << K_LIKE) | (1L << K_LT) | (1L << K_LTE) | (1L << K_EQ) | (1L << K_NEQ) | (1L << K_GT) | (1L << K_GTE))) != 0)) ) {
_errHandler.recoverInline(this);
}
else {
if ( _input.LA(1)==Token.EOF ) matchedEOF = true;
_errHandler.reportMatch(this);
consume();
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class SortOrderContext extends ParserRuleContext {
public TerminalNode K_ASC() { return getToken(AtlasDSLParser.K_ASC, 0); }
public TerminalNode K_DESC() { return getToken(AtlasDSLParser.K_DESC, 0); }
public SortOrderContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_sortOrder; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitSortOrder(this);
else return visitor.visitChildren(this);
}
}
public final SortOrderContext sortOrder() throws RecognitionException {
SortOrderContext _localctx = new SortOrderContext(_ctx, getState());
enterRule(_localctx, 4, RULE_sortOrder);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(84);
_la = _input.LA(1);
if ( !(_la==K_ASC || _la==K_DESC) ) {
_errHandler.recoverInline(this);
}
else {
if ( _input.LA(1)==Token.EOF ) matchedEOF = true;
_errHandler.reportMatch(this);
consume();
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ValueArrayContext extends ParserRuleContext {
public TerminalNode K_LBRACKET() { return getToken(AtlasDSLParser.K_LBRACKET, 0); }
public List<TerminalNode> STRING() { return getTokens(AtlasDSLParser.STRING); }
public TerminalNode STRING(int i) {
return getToken(AtlasDSLParser.STRING, i);
}
public TerminalNode K_RBRACKET() { return getToken(AtlasDSLParser.K_RBRACKET, 0); }
public List<TerminalNode> K_COMMA() { return getTokens(AtlasDSLParser.K_COMMA); }
public TerminalNode K_COMMA(int i) {
return getToken(AtlasDSLParser.K_COMMA, i);
}
public ValueArrayContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_valueArray; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitValueArray(this);
else return visitor.visitChildren(this);
}
}
public final ValueArrayContext valueArray() throws RecognitionException {
ValueArrayContext _localctx = new ValueArrayContext(_ctx, getState());
enterRule(_localctx, 6, RULE_valueArray);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(86);
match(K_LBRACKET);
setState(87);
match(STRING);
setState(92);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==K_COMMA) {
{
{
setState(88);
match(K_COMMA);
setState(89);
match(STRING);
}
}
setState(94);
_errHandler.sync(this);
_la = _input.LA(1);
}
setState(95);
match(K_RBRACKET);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class LiteralContext extends ParserRuleContext {
public TerminalNode BOOL() { return getToken(AtlasDSLParser.BOOL, 0); }
public TerminalNode NUMBER() { return getToken(AtlasDSLParser.NUMBER, 0); }
public TerminalNode FLOATING_NUMBER() { return getToken(AtlasDSLParser.FLOATING_NUMBER, 0); }
public TerminalNode STRING() { return getToken(AtlasDSLParser.STRING, 0); }
public ValueArrayContext valueArray() {
return getRuleContext(ValueArrayContext.class,0);
}
public LiteralContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_literal; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitLiteral(this);
else return visitor.visitChildren(this);
}
}
public final LiteralContext literal() throws RecognitionException {
LiteralContext _localctx = new LiteralContext(_ctx, getState());
enterRule(_localctx, 8, RULE_literal);
try {
setState(104);
_errHandler.sync(this);
switch (_input.LA(1)) {
case BOOL:
enterOuterAlt(_localctx, 1);
{
setState(97);
match(BOOL);
}
break;
case NUMBER:
enterOuterAlt(_localctx, 2);
{
setState(98);
match(NUMBER);
}
break;
case FLOATING_NUMBER:
enterOuterAlt(_localctx, 3);
{
setState(99);
match(FLOATING_NUMBER);
}
break;
case K_LBRACKET:
case STRING:
enterOuterAlt(_localctx, 4);
{
setState(102);
_errHandler.sync(this);
switch (_input.LA(1)) {
case STRING:
{
setState(100);
match(STRING);
}
break;
case K_LBRACKET:
{
setState(101);
valueArray();
}
break;
default:
throw new NoViableAltException(this);
}
}
break;
default:
throw new NoViableAltException(this);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class LimitClauseContext extends ParserRuleContext {
public TerminalNode K_LIMIT() { return getToken(AtlasDSLParser.K_LIMIT, 0); }
public TerminalNode NUMBER() { return getToken(AtlasDSLParser.NUMBER, 0); }
public LimitClauseContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_limitClause; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitLimitClause(this);
else return visitor.visitChildren(this);
}
}
public final LimitClauseContext limitClause() throws RecognitionException {
LimitClauseContext _localctx = new LimitClauseContext(_ctx, getState());
enterRule(_localctx, 10, RULE_limitClause);
try {
enterOuterAlt(_localctx, 1);
{
setState(106);
match(K_LIMIT);
setState(107);
match(NUMBER);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class OffsetClauseContext extends ParserRuleContext {
public TerminalNode K_OFFSET() { return getToken(AtlasDSLParser.K_OFFSET, 0); }
public TerminalNode NUMBER() { return getToken(AtlasDSLParser.NUMBER, 0); }
public OffsetClauseContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_offsetClause; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitOffsetClause(this);
else return visitor.visitChildren(this);
}
}
public final OffsetClauseContext offsetClause() throws RecognitionException {
OffsetClauseContext _localctx = new OffsetClauseContext(_ctx, getState());
enterRule(_localctx, 12, RULE_offsetClause);
try {
enterOuterAlt(_localctx, 1);
{
setState(109);
match(K_OFFSET);
setState(110);
match(NUMBER);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class AtomEContext extends ParserRuleContext {
public IdentifierContext identifier() {
return getRuleContext(IdentifierContext.class,0);
}
public LiteralContext literal() {
return getRuleContext(LiteralContext.class,0);
}
public TerminalNode K_LPAREN() { return getToken(AtlasDSLParser.K_LPAREN, 0); }
public ExprContext expr() {
return getRuleContext(ExprContext.class,0);
}
public TerminalNode K_RPAREN() { return getToken(AtlasDSLParser.K_RPAREN, 0); }
public AtomEContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_atomE; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitAtomE(this);
else return visitor.visitChildren(this);
}
}
public final AtomEContext atomE() throws RecognitionException {
AtomEContext _localctx = new AtomEContext(_ctx, getState());
enterRule(_localctx, 14, RULE_atomE);
try {
setState(120);
_errHandler.sync(this);
switch (_input.LA(1)) {
case NUMBER:
case FLOATING_NUMBER:
case BOOL:
case K_LBRACKET:
case ID:
case STRING:
enterOuterAlt(_localctx, 1);
{
setState(114);
_errHandler.sync(this);
switch (_input.LA(1)) {
case ID:
{
setState(112);
identifier();
}
break;
case NUMBER:
case FLOATING_NUMBER:
case BOOL:
case K_LBRACKET:
case STRING:
{
setState(113);
literal();
}
break;
default:
throw new NoViableAltException(this);
}
}
break;
case K_LPAREN:
enterOuterAlt(_localctx, 2);
{
setState(116);
match(K_LPAREN);
setState(117);
expr();
setState(118);
match(K_RPAREN);
}
break;
default:
throw new NoViableAltException(this);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class MultiERightContext extends ParserRuleContext {
public AtomEContext atomE() {
return getRuleContext(AtomEContext.class,0);
}
public TerminalNode K_STAR() { return getToken(AtlasDSLParser.K_STAR, 0); }
public TerminalNode K_DIV() { return getToken(AtlasDSLParser.K_DIV, 0); }
public MultiERightContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_multiERight; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitMultiERight(this);
else return visitor.visitChildren(this);
}
}
public final MultiERightContext multiERight() throws RecognitionException {
MultiERightContext _localctx = new MultiERightContext(_ctx, getState());
enterRule(_localctx, 16, RULE_multiERight);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(122);
_la = _input.LA(1);
if ( !(_la==K_STAR || _la==K_DIV) ) {
_errHandler.recoverInline(this);
}
else {
if ( _input.LA(1)==Token.EOF ) matchedEOF = true;
_errHandler.reportMatch(this);
consume();
}
setState(123);
atomE();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class MultiEContext extends ParserRuleContext {
public AtomEContext atomE() {
return getRuleContext(AtomEContext.class,0);
}
public List<MultiERightContext> multiERight() {
return getRuleContexts(MultiERightContext.class);
}
public MultiERightContext multiERight(int i) {
return getRuleContext(MultiERightContext.class,i);
}
public MultiEContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_multiE; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitMultiE(this);
else return visitor.visitChildren(this);
}
}
public final MultiEContext multiE() throws RecognitionException {
MultiEContext _localctx = new MultiEContext(_ctx, getState());
enterRule(_localctx, 18, RULE_multiE);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(125);
atomE();
setState(129);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==K_STAR || _la==K_DIV) {
{
{
setState(126);
multiERight();
}
}
setState(131);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ArithERightContext extends ParserRuleContext {
public MultiEContext multiE() {
return getRuleContext(MultiEContext.class,0);
}
public TerminalNode K_PLUS() { return getToken(AtlasDSLParser.K_PLUS, 0); }
public TerminalNode K_MINUS() { return getToken(AtlasDSLParser.K_MINUS, 0); }
public ArithERightContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_arithERight; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitArithERight(this);
else return visitor.visitChildren(this);
}
}
public final ArithERightContext arithERight() throws RecognitionException {
ArithERightContext _localctx = new ArithERightContext(_ctx, getState());
enterRule(_localctx, 20, RULE_arithERight);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(132);
_la = _input.LA(1);
if ( !(_la==K_PLUS || _la==K_MINUS) ) {
_errHandler.recoverInline(this);
}
else {
if ( _input.LA(1)==Token.EOF ) matchedEOF = true;
_errHandler.reportMatch(this);
consume();
}
setState(133);
multiE();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ArithEContext extends ParserRuleContext {
public MultiEContext multiE() {
return getRuleContext(MultiEContext.class,0);
}
public List<ArithERightContext> arithERight() {
return getRuleContexts(ArithERightContext.class);
}
public ArithERightContext arithERight(int i) {
return getRuleContext(ArithERightContext.class,i);
}
public ArithEContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_arithE; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitArithE(this);
else return visitor.visitChildren(this);
}
}
public final ArithEContext arithE() throws RecognitionException {
ArithEContext _localctx = new ArithEContext(_ctx, getState());
enterRule(_localctx, 22, RULE_arithE);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(135);
multiE();
setState(139);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==K_PLUS || _la==K_MINUS) {
{
{
setState(136);
arithERight();
}
}
setState(141);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ComparisonClauseContext extends ParserRuleContext {
public List<ArithEContext> arithE() {
return getRuleContexts(ArithEContext.class);
}
public ArithEContext arithE(int i) {
return getRuleContext(ArithEContext.class,i);
}
public OperatorContext operator() {
return getRuleContext(OperatorContext.class,0);
}
public ComparisonClauseContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_comparisonClause; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitComparisonClause(this);
else return visitor.visitChildren(this);
}
}
public final ComparisonClauseContext comparisonClause() throws RecognitionException {
ComparisonClauseContext _localctx = new ComparisonClauseContext(_ctx, getState());
enterRule(_localctx, 24, RULE_comparisonClause);
try {
enterOuterAlt(_localctx, 1);
{
setState(142);
arithE();
setState(143);
operator();
setState(144);
arithE();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class IsClauseContext extends ParserRuleContext {
public ArithEContext arithE() {
return getRuleContext(ArithEContext.class,0);
}
public IdentifierContext identifier() {
return getRuleContext(IdentifierContext.class,0);
}
public TerminalNode K_ISA() { return getToken(AtlasDSLParser.K_ISA, 0); }
public TerminalNode K_IS() { return getToken(AtlasDSLParser.K_IS, 0); }
public IsClauseContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_isClause; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitIsClause(this);
else return visitor.visitChildren(this);
}
}
public final IsClauseContext isClause() throws RecognitionException {
IsClauseContext _localctx = new IsClauseContext(_ctx, getState());
enterRule(_localctx, 26, RULE_isClause);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(146);
arithE();
setState(147);
_la = _input.LA(1);
if ( !(_la==K_ISA || _la==K_IS) ) {
_errHandler.recoverInline(this);
}
else {
if ( _input.LA(1)==Token.EOF ) matchedEOF = true;
_errHandler.reportMatch(this);
consume();
}
setState(148);
identifier();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class HasClauseContext extends ParserRuleContext {
public ArithEContext arithE() {
return getRuleContext(ArithEContext.class,0);
}
public TerminalNode K_HAS() { return getToken(AtlasDSLParser.K_HAS, 0); }
public IdentifierContext identifier() {
return getRuleContext(IdentifierContext.class,0);
}
public HasClauseContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_hasClause; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitHasClause(this);
else return visitor.visitChildren(this);
}
}
public final HasClauseContext hasClause() throws RecognitionException {
HasClauseContext _localctx = new HasClauseContext(_ctx, getState());
enterRule(_localctx, 28, RULE_hasClause);
try {
enterOuterAlt(_localctx, 1);
{
setState(150);
arithE();
setState(151);
match(K_HAS);
setState(152);
identifier();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class CountClauseContext extends ParserRuleContext {
public TerminalNode K_COUNT() { return getToken(AtlasDSLParser.K_COUNT, 0); }
public TerminalNode K_LPAREN() { return getToken(AtlasDSLParser.K_LPAREN, 0); }
public TerminalNode K_RPAREN() { return getToken(AtlasDSLParser.K_RPAREN, 0); }
public CountClauseContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_countClause; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitCountClause(this);
else return visitor.visitChildren(this);
}
}
public final CountClauseContext countClause() throws RecognitionException {
CountClauseContext _localctx = new CountClauseContext(_ctx, getState());
enterRule(_localctx, 30, RULE_countClause);
try {
enterOuterAlt(_localctx, 1);
{
setState(154);
match(K_COUNT);
setState(155);
match(K_LPAREN);
setState(156);
match(K_RPAREN);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class MaxClauseContext extends ParserRuleContext {
public TerminalNode K_MAX() { return getToken(AtlasDSLParser.K_MAX, 0); }
public TerminalNode K_LPAREN() { return getToken(AtlasDSLParser.K_LPAREN, 0); }
public ExprContext expr() {
return getRuleContext(ExprContext.class,0);
}
public TerminalNode K_RPAREN() { return getToken(AtlasDSLParser.K_RPAREN, 0); }
public MaxClauseContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_maxClause; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitMaxClause(this);
else return visitor.visitChildren(this);
}
}
public final MaxClauseContext maxClause() throws RecognitionException {
MaxClauseContext _localctx = new MaxClauseContext(_ctx, getState());
enterRule(_localctx, 32, RULE_maxClause);
try {
enterOuterAlt(_localctx, 1);
{
setState(158);
match(K_MAX);
setState(159);
match(K_LPAREN);
setState(160);
expr();
setState(161);
match(K_RPAREN);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class MinClauseContext extends ParserRuleContext {
public TerminalNode K_MIN() { return getToken(AtlasDSLParser.K_MIN, 0); }
public TerminalNode K_LPAREN() { return getToken(AtlasDSLParser.K_LPAREN, 0); }
public ExprContext expr() {
return getRuleContext(ExprContext.class,0);
}
public TerminalNode K_RPAREN() { return getToken(AtlasDSLParser.K_RPAREN, 0); }
public MinClauseContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_minClause; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitMinClause(this);
else return visitor.visitChildren(this);
}
}
public final MinClauseContext minClause() throws RecognitionException {
MinClauseContext _localctx = new MinClauseContext(_ctx, getState());
enterRule(_localctx, 34, RULE_minClause);
try {
enterOuterAlt(_localctx, 1);
{
setState(163);
match(K_MIN);
setState(164);
match(K_LPAREN);
setState(165);
expr();
setState(166);
match(K_RPAREN);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class SumClauseContext extends ParserRuleContext {
public TerminalNode K_SUM() { return getToken(AtlasDSLParser.K_SUM, 0); }
public TerminalNode K_LPAREN() { return getToken(AtlasDSLParser.K_LPAREN, 0); }
public ExprContext expr() {
return getRuleContext(ExprContext.class,0);
}
public TerminalNode K_RPAREN() { return getToken(AtlasDSLParser.K_RPAREN, 0); }
public SumClauseContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_sumClause; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitSumClause(this);
else return visitor.visitChildren(this);
}
}
public final SumClauseContext sumClause() throws RecognitionException {
SumClauseContext _localctx = new SumClauseContext(_ctx, getState());
enterRule(_localctx, 36, RULE_sumClause);
try {
enterOuterAlt(_localctx, 1);
{
setState(168);
match(K_SUM);
setState(169);
match(K_LPAREN);
setState(170);
expr();
setState(171);
match(K_RPAREN);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ExprRightContext extends ParserRuleContext {
public CompEContext compE() {
return getRuleContext(CompEContext.class,0);
}
public TerminalNode K_AND() { return getToken(AtlasDSLParser.K_AND, 0); }
public TerminalNode K_OR() { return getToken(AtlasDSLParser.K_OR, 0); }
public ExprRightContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_exprRight; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitExprRight(this);
else return visitor.visitChildren(this);
}
}
public final ExprRightContext exprRight() throws RecognitionException {
ExprRightContext _localctx = new ExprRightContext(_ctx, getState());
enterRule(_localctx, 38, RULE_exprRight);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(173);
_la = _input.LA(1);
if ( !(_la==K_AND || _la==K_OR) ) {
_errHandler.recoverInline(this);
}
else {
if ( _input.LA(1)==Token.EOF ) matchedEOF = true;
_errHandler.reportMatch(this);
consume();
}
setState(174);
compE();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class CompEContext extends ParserRuleContext {
public ComparisonClauseContext comparisonClause() {
return getRuleContext(ComparisonClauseContext.class,0);
}
public IsClauseContext isClause() {
return getRuleContext(IsClauseContext.class,0);
}
public HasClauseContext hasClause() {
return getRuleContext(HasClauseContext.class,0);
}
public ArithEContext arithE() {
return getRuleContext(ArithEContext.class,0);
}
public CountClauseContext countClause() {
return getRuleContext(CountClauseContext.class,0);
}
public MaxClauseContext maxClause() {
return getRuleContext(MaxClauseContext.class,0);
}
public MinClauseContext minClause() {
return getRuleContext(MinClauseContext.class,0);
}
public SumClauseContext sumClause() {
return getRuleContext(SumClauseContext.class,0);
}
public CompEContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_compE; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitCompE(this);
else return visitor.visitChildren(this);
}
}
public final CompEContext compE() throws RecognitionException {
CompEContext _localctx = new CompEContext(_ctx, getState());
enterRule(_localctx, 40, RULE_compE);
try {
setState(184);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,7,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
setState(176);
comparisonClause();
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
setState(177);
isClause();
}
break;
case 3:
enterOuterAlt(_localctx, 3);
{
setState(178);
hasClause();
}
break;
case 4:
enterOuterAlt(_localctx, 4);
{
setState(179);
arithE();
}
break;
case 5:
enterOuterAlt(_localctx, 5);
{
setState(180);
countClause();
}
break;
case 6:
enterOuterAlt(_localctx, 6);
{
setState(181);
maxClause();
}
break;
case 7:
enterOuterAlt(_localctx, 7);
{
setState(182);
minClause();
}
break;
case 8:
enterOuterAlt(_localctx, 8);
{
setState(183);
sumClause();
}
break;
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class ExprContext extends ParserRuleContext {
public CompEContext compE() {
return getRuleContext(CompEContext.class,0);
}
public List<ExprRightContext> exprRight() {
return getRuleContexts(ExprRightContext.class);
}
public ExprRightContext exprRight(int i) {
return getRuleContext(ExprRightContext.class,i);
}
public ExprContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_expr; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitExpr(this);
else return visitor.visitChildren(this);
}
}
public final ExprContext expr() throws RecognitionException {
ExprContext _localctx = new ExprContext(_ctx, getState());
enterRule(_localctx, 42, RULE_expr);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(186);
compE();
setState(190);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==K_AND || _la==K_OR) {
{
{
setState(187);
exprRight();
}
}
setState(192);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class LimitOffsetContext extends ParserRuleContext {
public LimitClauseContext limitClause() {
return getRuleContext(LimitClauseContext.class,0);
}
public OffsetClauseContext offsetClause() {
return getRuleContext(OffsetClauseContext.class,0);
}
public LimitOffsetContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_limitOffset; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitLimitOffset(this);
else return visitor.visitChildren(this);
}
}
public final LimitOffsetContext limitOffset() throws RecognitionException {
LimitOffsetContext _localctx = new LimitOffsetContext(_ctx, getState());
enterRule(_localctx, 44, RULE_limitOffset);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(193);
limitClause();
setState(195);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==K_OFFSET) {
{
setState(194);
offsetClause();
}
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class SelectExpressionContext extends ParserRuleContext {
public ExprContext expr() {
return getRuleContext(ExprContext.class,0);
}
public TerminalNode K_AS() { return getToken(AtlasDSLParser.K_AS, 0); }
public IdentifierContext identifier() {
return getRuleContext(IdentifierContext.class,0);
}
public SelectExpressionContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_selectExpression; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitSelectExpression(this);
else return visitor.visitChildren(this);
}
}
public final SelectExpressionContext selectExpression() throws RecognitionException {
SelectExpressionContext _localctx = new SelectExpressionContext(_ctx, getState());
enterRule(_localctx, 46, RULE_selectExpression);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(197);
expr();
setState(200);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==K_AS) {
{
setState(198);
match(K_AS);
setState(199);
identifier();
}
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class SelectExprContext extends ParserRuleContext {
public List<SelectExpressionContext> selectExpression() {
return getRuleContexts(SelectExpressionContext.class);
}
public SelectExpressionContext selectExpression(int i) {
return getRuleContext(SelectExpressionContext.class,i);
}
public List<TerminalNode> K_COMMA() { return getTokens(AtlasDSLParser.K_COMMA); }
public TerminalNode K_COMMA(int i) {
return getToken(AtlasDSLParser.K_COMMA, i);
}
public SelectExprContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_selectExpr; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitSelectExpr(this);
else return visitor.visitChildren(this);
}
}
public final SelectExprContext selectExpr() throws RecognitionException {
SelectExprContext _localctx = new SelectExprContext(_ctx, getState());
enterRule(_localctx, 48, RULE_selectExpr);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(202);
selectExpression();
setState(207);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==K_COMMA) {
{
{
setState(203);
match(K_COMMA);
setState(204);
selectExpression();
}
}
setState(209);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class AliasExprContext extends ParserRuleContext {
public TerminalNode K_AS() { return getToken(AtlasDSLParser.K_AS, 0); }
public List<IdentifierContext> identifier() {
return getRuleContexts(IdentifierContext.class);
}
public IdentifierContext identifier(int i) {
return getRuleContext(IdentifierContext.class,i);
}
public LiteralContext literal() {
return getRuleContext(LiteralContext.class,0);
}
public AliasExprContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_aliasExpr; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitAliasExpr(this);
else return visitor.visitChildren(this);
}
}
public final AliasExprContext aliasExpr() throws RecognitionException {
AliasExprContext _localctx = new AliasExprContext(_ctx, getState());
enterRule(_localctx, 50, RULE_aliasExpr);
try {
enterOuterAlt(_localctx, 1);
{
setState(212);
_errHandler.sync(this);
switch (_input.LA(1)) {
case ID:
{
setState(210);
identifier();
}
break;
case NUMBER:
case FLOATING_NUMBER:
case BOOL:
case K_LBRACKET:
case STRING:
{
setState(211);
literal();
}
break;
default:
throw new NoViableAltException(this);
}
setState(214);
match(K_AS);
setState(215);
identifier();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class OrderByExprContext extends ParserRuleContext {
public TerminalNode K_ORDERBY() { return getToken(AtlasDSLParser.K_ORDERBY, 0); }
public ExprContext expr() {
return getRuleContext(ExprContext.class,0);
}
public SortOrderContext sortOrder() {
return getRuleContext(SortOrderContext.class,0);
}
public OrderByExprContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_orderByExpr; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitOrderByExpr(this);
else return visitor.visitChildren(this);
}
}
public final OrderByExprContext orderByExpr() throws RecognitionException {
OrderByExprContext _localctx = new OrderByExprContext(_ctx, getState());
enterRule(_localctx, 52, RULE_orderByExpr);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(217);
match(K_ORDERBY);
setState(218);
expr();
setState(220);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==K_ASC || _la==K_DESC) {
{
setState(219);
sortOrder();
}
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class FromSrcContext extends ParserRuleContext {
public AliasExprContext aliasExpr() {
return getRuleContext(AliasExprContext.class,0);
}
public IdentifierContext identifier() {
return getRuleContext(IdentifierContext.class,0);
}
public LiteralContext literal() {
return getRuleContext(LiteralContext.class,0);
}
public FromSrcContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_fromSrc; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitFromSrc(this);
else return visitor.visitChildren(this);
}
}
public final FromSrcContext fromSrc() throws RecognitionException {
FromSrcContext _localctx = new FromSrcContext(_ctx, getState());
enterRule(_localctx, 54, RULE_fromSrc);
try {
setState(227);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,15,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
setState(222);
aliasExpr();
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
setState(225);
_errHandler.sync(this);
switch (_input.LA(1)) {
case ID:
{
setState(223);
identifier();
}
break;
case NUMBER:
case FLOATING_NUMBER:
case BOOL:
case K_LBRACKET:
case STRING:
{
setState(224);
literal();
}
break;
default:
throw new NoViableAltException(this);
}
}
break;
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class WhereClauseContext extends ParserRuleContext {
public TerminalNode K_WHERE() { return getToken(AtlasDSLParser.K_WHERE, 0); }
public ExprContext expr() {
return getRuleContext(ExprContext.class,0);
}
public WhereClauseContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_whereClause; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitWhereClause(this);
else return visitor.visitChildren(this);
}
}
public final WhereClauseContext whereClause() throws RecognitionException {
WhereClauseContext _localctx = new WhereClauseContext(_ctx, getState());
enterRule(_localctx, 56, RULE_whereClause);
try {
enterOuterAlt(_localctx, 1);
{
setState(229);
match(K_WHERE);
setState(230);
expr();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class FromExpressionContext extends ParserRuleContext {
public FromSrcContext fromSrc() {
return getRuleContext(FromSrcContext.class,0);
}
public WhereClauseContext whereClause() {
return getRuleContext(WhereClauseContext.class,0);
}
public FromExpressionContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_fromExpression; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitFromExpression(this);
else return visitor.visitChildren(this);
}
}
public final FromExpressionContext fromExpression() throws RecognitionException {
FromExpressionContext _localctx = new FromExpressionContext(_ctx, getState());
enterRule(_localctx, 58, RULE_fromExpression);
try {
enterOuterAlt(_localctx, 1);
{
setState(232);
fromSrc();
setState(234);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,16,_ctx) ) {
case 1:
{
setState(233);
whereClause();
}
break;
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class FromClauseContext extends ParserRuleContext {
public TerminalNode K_FROM() { return getToken(AtlasDSLParser.K_FROM, 0); }
public FromExpressionContext fromExpression() {
return getRuleContext(FromExpressionContext.class,0);
}
public FromClauseContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_fromClause; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitFromClause(this);
else return visitor.visitChildren(this);
}
}
public final FromClauseContext fromClause() throws RecognitionException {
FromClauseContext _localctx = new FromClauseContext(_ctx, getState());
enterRule(_localctx, 60, RULE_fromClause);
try {
enterOuterAlt(_localctx, 1);
{
setState(236);
match(K_FROM);
setState(237);
fromExpression();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class SelectClauseContext extends ParserRuleContext {
public TerminalNode K_SELECT() { return getToken(AtlasDSLParser.K_SELECT, 0); }
public SelectExprContext selectExpr() {
return getRuleContext(SelectExprContext.class,0);
}
public SelectClauseContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_selectClause; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitSelectClause(this);
else return visitor.visitChildren(this);
}
}
public final SelectClauseContext selectClause() throws RecognitionException {
SelectClauseContext _localctx = new SelectClauseContext(_ctx, getState());
enterRule(_localctx, 62, RULE_selectClause);
try {
enterOuterAlt(_localctx, 1);
{
setState(239);
match(K_SELECT);
setState(240);
selectExpr();
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class SingleQrySrcContext extends ParserRuleContext {
public FromClauseContext fromClause() {
return getRuleContext(FromClauseContext.class,0);
}
public WhereClauseContext whereClause() {
return getRuleContext(WhereClauseContext.class,0);
}
public FromExpressionContext fromExpression() {
return getRuleContext(FromExpressionContext.class,0);
}
public ExprContext expr() {
return getRuleContext(ExprContext.class,0);
}
public SingleQrySrcContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_singleQrySrc; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitSingleQrySrc(this);
else return visitor.visitChildren(this);
}
}
public final SingleQrySrcContext singleQrySrc() throws RecognitionException {
SingleQrySrcContext _localctx = new SingleQrySrcContext(_ctx, getState());
enterRule(_localctx, 64, RULE_singleQrySrc);
try {
setState(246);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,17,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
setState(242);
fromClause();
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
setState(243);
whereClause();
}
break;
case 3:
enterOuterAlt(_localctx, 3);
{
setState(244);
fromExpression();
}
break;
case 4:
enterOuterAlt(_localctx, 4);
{
setState(245);
expr();
}
break;
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class LoopExpressionContext extends ParserRuleContext {
public TerminalNode K_LOOP() { return getToken(AtlasDSLParser.K_LOOP, 0); }
public TerminalNode K_LPAREN() { return getToken(AtlasDSLParser.K_LPAREN, 0); }
public QueryContext query() {
return getRuleContext(QueryContext.class,0);
}
public TerminalNode K_RPAREN() { return getToken(AtlasDSLParser.K_RPAREN, 0); }
public TerminalNode NUMBER() { return getToken(AtlasDSLParser.NUMBER, 0); }
public TerminalNode K_AS() { return getToken(AtlasDSLParser.K_AS, 0); }
public IdentifierContext identifier() {
return getRuleContext(IdentifierContext.class,0);
}
public LoopExpressionContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_loopExpression; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitLoopExpression(this);
else return visitor.visitChildren(this);
}
}
public final LoopExpressionContext loopExpression() throws RecognitionException {
LoopExpressionContext _localctx = new LoopExpressionContext(_ctx, getState());
enterRule(_localctx, 66, RULE_loopExpression);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(248);
match(K_LOOP);
setState(249);
match(K_LPAREN);
setState(250);
query();
setState(251);
match(K_RPAREN);
setState(253);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==NUMBER) {
{
setState(252);
match(NUMBER);
}
}
setState(257);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==K_AS) {
{
setState(255);
match(K_AS);
setState(256);
identifier();
}
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class GroupByExpressionContext extends ParserRuleContext {
public TerminalNode K_GROUPBY() { return getToken(AtlasDSLParser.K_GROUPBY, 0); }
public TerminalNode K_LPAREN() { return getToken(AtlasDSLParser.K_LPAREN, 0); }
public SelectExprContext selectExpr() {
return getRuleContext(SelectExprContext.class,0);
}
public TerminalNode K_RPAREN() { return getToken(AtlasDSLParser.K_RPAREN, 0); }
public GroupByExpressionContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_groupByExpression; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitGroupByExpression(this);
else return visitor.visitChildren(this);
}
}
public final GroupByExpressionContext groupByExpression() throws RecognitionException {
GroupByExpressionContext _localctx = new GroupByExpressionContext(_ctx, getState());
enterRule(_localctx, 68, RULE_groupByExpression);
try {
enterOuterAlt(_localctx, 1);
{
setState(259);
match(K_GROUPBY);
setState(260);
match(K_LPAREN);
setState(261);
selectExpr();
setState(262);
match(K_RPAREN);
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class CommaDelimitedQueriesContext extends ParserRuleContext {
public List<SingleQrySrcContext> singleQrySrc() {
return getRuleContexts(SingleQrySrcContext.class);
}
public SingleQrySrcContext singleQrySrc(int i) {
return getRuleContext(SingleQrySrcContext.class,i);
}
public List<TerminalNode> K_COMMA() { return getTokens(AtlasDSLParser.K_COMMA); }
public TerminalNode K_COMMA(int i) {
return getToken(AtlasDSLParser.K_COMMA, i);
}
public CommaDelimitedQueriesContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_commaDelimitedQueries; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitCommaDelimitedQueries(this);
else return visitor.visitChildren(this);
}
}
public final CommaDelimitedQueriesContext commaDelimitedQueries() throws RecognitionException {
CommaDelimitedQueriesContext _localctx = new CommaDelimitedQueriesContext(_ctx, getState());
enterRule(_localctx, 70, RULE_commaDelimitedQueries);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(264);
singleQrySrc();
setState(269);
_errHandler.sync(this);
_la = _input.LA(1);
while (_la==K_COMMA) {
{
{
setState(265);
match(K_COMMA);
setState(266);
singleQrySrc();
}
}
setState(271);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class SpaceDelimitedQueriesContext extends ParserRuleContext {
public List<SingleQrySrcContext> singleQrySrc() {
return getRuleContexts(SingleQrySrcContext.class);
}
public SingleQrySrcContext singleQrySrc(int i) {
return getRuleContext(SingleQrySrcContext.class,i);
}
public SpaceDelimitedQueriesContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_spaceDelimitedQueries; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitSpaceDelimitedQueries(this);
else return visitor.visitChildren(this);
}
}
public final SpaceDelimitedQueriesContext spaceDelimitedQueries() throws RecognitionException {
SpaceDelimitedQueriesContext _localctx = new SpaceDelimitedQueriesContext(_ctx, getState());
enterRule(_localctx, 72, RULE_spaceDelimitedQueries);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(272);
singleQrySrc();
setState(276);
_errHandler.sync(this);
_la = _input.LA(1);
while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << NUMBER) | (1L << FLOATING_NUMBER) | (1L << BOOL) | (1L << K_LPAREN) | (1L << K_LBRACKET) | (1L << K_FROM) | (1L << K_WHERE) | (1L << K_MAX) | (1L << K_MIN) | (1L << K_SUM) | (1L << K_COUNT) | (1L << ID) | (1L << STRING))) != 0)) {
{
{
setState(273);
singleQrySrc();
}
}
setState(278);
_errHandler.sync(this);
_la = _input.LA(1);
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class QuerySrcContext extends ParserRuleContext {
public CommaDelimitedQueriesContext commaDelimitedQueries() {
return getRuleContext(CommaDelimitedQueriesContext.class,0);
}
public SpaceDelimitedQueriesContext spaceDelimitedQueries() {
return getRuleContext(SpaceDelimitedQueriesContext.class,0);
}
public QuerySrcContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_querySrc; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitQuerySrc(this);
else return visitor.visitChildren(this);
}
}
public final QuerySrcContext querySrc() throws RecognitionException {
QuerySrcContext _localctx = new QuerySrcContext(_ctx, getState());
enterRule(_localctx, 74, RULE_querySrc);
try {
setState(281);
_errHandler.sync(this);
switch ( getInterpreter().adaptivePredict(_input,22,_ctx) ) {
case 1:
enterOuterAlt(_localctx, 1);
{
setState(279);
commaDelimitedQueries();
}
break;
case 2:
enterOuterAlt(_localctx, 2);
{
setState(280);
spaceDelimitedQueries();
}
break;
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class QueryContext extends ParserRuleContext {
public QuerySrcContext querySrc() {
return getRuleContext(QuerySrcContext.class,0);
}
public LoopExpressionContext loopExpression() {
return getRuleContext(LoopExpressionContext.class,0);
}
public GroupByExpressionContext groupByExpression() {
return getRuleContext(GroupByExpressionContext.class,0);
}
public SelectClauseContext selectClause() {
return getRuleContext(SelectClauseContext.class,0);
}
public OrderByExprContext orderByExpr() {
return getRuleContext(OrderByExprContext.class,0);
}
public LimitOffsetContext limitOffset() {
return getRuleContext(LimitOffsetContext.class,0);
}
public QueryContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_query; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitQuery(this);
else return visitor.visitChildren(this);
}
}
public final QueryContext query() throws RecognitionException {
QueryContext _localctx = new QueryContext(_ctx, getState());
enterRule(_localctx, 76, RULE_query);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(283);
querySrc();
setState(285);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==K_LOOP) {
{
setState(284);
loopExpression();
}
}
setState(288);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==K_GROUPBY) {
{
setState(287);
groupByExpression();
}
}
setState(291);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==K_SELECT) {
{
setState(290);
selectClause();
}
}
setState(294);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==K_ORDERBY) {
{
setState(293);
orderByExpr();
}
}
setState(297);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==K_LIMIT) {
{
setState(296);
limitOffset();
}
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static class QueryWithPathContext extends ParserRuleContext {
public QueryContext query() {
return getRuleContext(QueryContext.class,0);
}
public TerminalNode K_WITHPATH() { return getToken(AtlasDSLParser.K_WITHPATH, 0); }
public QueryWithPathContext(ParserRuleContext parent, int invokingState) {
super(parent, invokingState);
}
@Override public int getRuleIndex() { return RULE_queryWithPath; }
@Override
public <T> T accept(ParseTreeVisitor<? extends T> visitor) {
if ( visitor instanceof AtlasDSLParserVisitor ) return ((AtlasDSLParserVisitor<? extends T>)visitor).visitQueryWithPath(this);
else return visitor.visitChildren(this);
}
}
public final QueryWithPathContext queryWithPath() throws RecognitionException {
QueryWithPathContext _localctx = new QueryWithPathContext(_ctx, getState());
enterRule(_localctx, 78, RULE_queryWithPath);
int _la;
try {
enterOuterAlt(_localctx, 1);
{
setState(299);
query();
setState(301);
_errHandler.sync(this);
_la = _input.LA(1);
if (_la==K_WITHPATH) {
{
setState(300);
match(K_WITHPATH);
}
}
}
}
catch (RecognitionException re) {
_localctx.exception = re;
_errHandler.reportError(this, re);
_errHandler.recover(this, re);
}
finally {
exitRule();
}
return _localctx;
}
public static final String _serializedATN =
"\3\u608b\ua72a\u8133\ub9ed\u417c\u3be7\u7786\u5964\3\63\u0132\4\2\t\2"+
"\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+
"\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+
"\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+
"\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!"+
"\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\3\2\3\2\3\3\3"+
"\3\3\4\3\4\3\5\3\5\3\5\3\5\7\5]\n\5\f\5\16\5`\13\5\3\5\3\5\3\6\3\6\3\6"+
"\3\6\3\6\5\6i\n\6\5\6k\n\6\3\7\3\7\3\7\3\b\3\b\3\b\3\t\3\t\5\tu\n\t\3"+
"\t\3\t\3\t\3\t\5\t{\n\t\3\n\3\n\3\n\3\13\3\13\7\13\u0082\n\13\f\13\16"+
"\13\u0085\13\13\3\f\3\f\3\f\3\r\3\r\7\r\u008c\n\r\f\r\16\r\u008f\13\r"+
"\3\16\3\16\3\16\3\16\3\17\3\17\3\17\3\17\3\20\3\20\3\20\3\20\3\21\3\21"+
"\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\23\3\23\3\23\3\23\3\23\3\24\3\24"+
"\3\24\3\24\3\24\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26"+
"\5\26\u00bb\n\26\3\27\3\27\7\27\u00bf\n\27\f\27\16\27\u00c2\13\27\3\30"+
"\3\30\5\30\u00c6\n\30\3\31\3\31\3\31\5\31\u00cb\n\31\3\32\3\32\3\32\7"+
"\32\u00d0\n\32\f\32\16\32\u00d3\13\32\3\33\3\33\5\33\u00d7\n\33\3\33\3"+
"\33\3\33\3\34\3\34\3\34\5\34\u00df\n\34\3\35\3\35\3\35\5\35\u00e4\n\35"+
"\5\35\u00e6\n\35\3\36\3\36\3\36\3\37\3\37\5\37\u00ed\n\37\3 \3 \3 \3!"+
"\3!\3!\3\"\3\"\3\"\3\"\5\"\u00f9\n\"\3#\3#\3#\3#\3#\5#\u0100\n#\3#\3#"+
"\5#\u0104\n#\3$\3$\3$\3$\3$\3%\3%\3%\7%\u010e\n%\f%\16%\u0111\13%\3&\3"+
"&\7&\u0115\n&\f&\16&\u0118\13&\3\'\3\'\5\'\u011c\n\'\3(\3(\5(\u0120\n"+
"(\3(\5(\u0123\n(\3(\5(\u0126\n(\3(\5(\u0129\n(\3(\5(\u012c\n(\3)\3)\5"+
")\u0130\n)\3)\2\2*\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60"+
"\62\64\668:<>@BDFHJLNP\2\b\4\2\17\17\26\33\3\2,-\3\2\f\r\3\2\n\13\3\2"+
")*\3\2\20\21\2\u0130\2R\3\2\2\2\4T\3\2\2\2\6V\3\2\2\2\bX\3\2\2\2\nj\3"+
"\2\2\2\fl\3\2\2\2\16o\3\2\2\2\20z\3\2\2\2\22|\3\2\2\2\24\177\3\2\2\2\26"+
"\u0086\3\2\2\2\30\u0089\3\2\2\2\32\u0090\3\2\2\2\34\u0094\3\2\2\2\36\u0098"+
"\3\2\2\2 \u009c\3\2\2\2\"\u00a0\3\2\2\2$\u00a5\3\2\2\2&\u00aa\3\2\2\2"+
"(\u00af\3\2\2\2*\u00ba\3\2\2\2,\u00bc\3\2\2\2.\u00c3\3\2\2\2\60\u00c7"+
"\3\2\2\2\62\u00cc\3\2\2\2\64\u00d6\3\2\2\2\66\u00db\3\2\2\28\u00e5\3\2"+
"\2\2:\u00e7\3\2\2\2<\u00ea\3\2\2\2>\u00ee\3\2\2\2@\u00f1\3\2\2\2B\u00f8"+
"\3\2\2\2D\u00fa\3\2\2\2F\u0105\3\2\2\2H\u010a\3\2\2\2J\u0112\3\2\2\2L"+
"\u011b\3\2\2\2N\u011d\3\2\2\2P\u012d\3\2\2\2RS\7\62\2\2S\3\3\2\2\2TU\t"+
"\2\2\2U\5\3\2\2\2VW\t\3\2\2W\7\3\2\2\2XY\7\23\2\2Y^\7\63\2\2Z[\7\t\2\2"+
"[]\7\63\2\2\\Z\3\2\2\2]`\3\2\2\2^\\\3\2\2\2^_\3\2\2\2_a\3\2\2\2`^\3\2"+
"\2\2ab\7\25\2\2b\t\3\2\2\2ck\7\b\2\2dk\7\6\2\2ek\7\7\2\2fi\7\63\2\2gi"+
"\5\b\5\2hf\3\2\2\2hg\3\2\2\2ik\3\2\2\2jc\3\2\2\2jd\3\2\2\2je\3\2\2\2j"+
"h\3\2\2\2k\13\3\2\2\2lm\7 \2\2mn\7\6\2\2n\r\3\2\2\2op\7\'\2\2pq\7\6\2"+
"\2q\17\3\2\2\2ru\5\2\2\2su\5\n\6\2tr\3\2\2\2ts\3\2\2\2u{\3\2\2\2vw\7\22"+
"\2\2wx\5,\27\2xy\7\24\2\2y{\3\2\2\2zt\3\2\2\2zv\3\2\2\2{\21\3\2\2\2|}"+
"\t\4\2\2}~\5\20\t\2~\23\3\2\2\2\177\u0083\5\20\t\2\u0080\u0082\5\22\n"+
"\2\u0081\u0080\3\2\2\2\u0082\u0085\3\2\2\2\u0083\u0081\3\2\2\2\u0083\u0084"+
"\3\2\2\2\u0084\25\3\2\2\2\u0085\u0083\3\2\2\2\u0086\u0087\t\5\2\2\u0087"+
"\u0088\5\24\13\2\u0088\27\3\2\2\2\u0089\u008d\5\24\13\2\u008a\u008c\5"+
"\26\f\2\u008b\u008a\3\2\2\2\u008c\u008f\3\2\2\2\u008d\u008b\3\2\2\2\u008d"+
"\u008e\3\2\2\2\u008e\31\3\2\2\2\u008f\u008d\3\2\2\2\u0090\u0091\5\30\r"+
"\2\u0091\u0092\5\4\3\2\u0092\u0093\5\30\r\2\u0093\33\3\2\2\2\u0094\u0095"+
"\5\30\r\2\u0095\u0096\t\6\2\2\u0096\u0097\5\2\2\2\u0097\35\3\2\2\2\u0098"+
"\u0099\5\30\r\2\u0099\u009a\7+\2\2\u009a\u009b\5\2\2\2\u009b\37\3\2\2"+
"\2\u009c\u009d\7%\2\2\u009d\u009e\7\22\2\2\u009e\u009f\7\24\2\2\u009f"+
"!\3\2\2\2\u00a0\u00a1\7\"\2\2\u00a1\u00a2\7\22\2\2\u00a2\u00a3\5,\27\2"+
"\u00a3\u00a4\7\24\2\2\u00a4#\3\2\2\2\u00a5\u00a6\7#\2\2\u00a6\u00a7\7"+
"\22\2\2\u00a7\u00a8\5,\27\2\u00a8\u00a9\7\24\2\2\u00a9%\3\2\2\2\u00aa"+
"\u00ab\7$\2\2\u00ab\u00ac\7\22\2\2\u00ac\u00ad\5,\27\2\u00ad\u00ae\7\24"+
"\2\2\u00ae\'\3\2\2\2\u00af\u00b0\t\7\2\2\u00b0\u00b1\5*\26\2\u00b1)\3"+
"\2\2\2\u00b2\u00bb\5\32\16\2\u00b3\u00bb\5\34\17\2\u00b4\u00bb\5\36\20"+
"\2\u00b5\u00bb\5\30\r\2\u00b6\u00bb\5 \21\2\u00b7\u00bb\5\"\22\2\u00b8"+
"\u00bb\5$\23\2\u00b9\u00bb\5&\24\2\u00ba\u00b2\3\2\2\2\u00ba\u00b3\3\2"+
"\2\2\u00ba\u00b4\3\2\2\2\u00ba\u00b5\3\2\2\2\u00ba\u00b6\3\2\2\2\u00ba"+
"\u00b7\3\2\2\2\u00ba\u00b8\3\2\2\2\u00ba\u00b9\3\2\2\2\u00bb+\3\2\2\2"+
"\u00bc\u00c0\5*\26\2\u00bd\u00bf\5(\25\2\u00be\u00bd\3\2\2\2\u00bf\u00c2"+
"\3\2\2\2\u00c0\u00be\3\2\2\2\u00c0\u00c1\3\2\2\2\u00c1-\3\2\2\2\u00c2"+
"\u00c0\3\2\2\2\u00c3\u00c5\5\f\7\2\u00c4\u00c6\5\16\b\2\u00c5\u00c4\3"+
"\2\2\2\u00c5\u00c6\3\2\2\2\u00c6/\3\2\2\2\u00c7\u00ca\5,\27\2\u00c8\u00c9"+
"\7(\2\2\u00c9\u00cb\5\2\2\2\u00ca\u00c8\3\2\2\2\u00ca\u00cb\3\2\2\2\u00cb"+
"\61\3\2\2\2\u00cc\u00d1\5\60\31\2\u00cd\u00ce\7\t\2\2\u00ce\u00d0\5\60"+
"\31\2\u00cf\u00cd\3\2\2\2\u00d0\u00d3\3\2\2\2\u00d1\u00cf\3\2\2\2\u00d1"+
"\u00d2\3\2\2\2\u00d2\63\3\2\2\2\u00d3\u00d1\3\2\2\2\u00d4\u00d7\5\2\2"+
"\2\u00d5\u00d7\5\n\6\2\u00d6\u00d4\3\2\2\2\u00d6\u00d5\3\2\2\2\u00d7\u00d8"+
"\3\2\2\2\u00d8\u00d9\7(\2\2\u00d9\u00da\5\2\2\2\u00da\65\3\2\2\2\u00db"+
"\u00dc\7\36\2\2\u00dc\u00de\5,\27\2\u00dd\u00df\5\6\4\2\u00de\u00dd\3"+
"\2\2\2\u00de\u00df\3\2\2\2\u00df\67\3\2\2\2\u00e0\u00e6\5\64\33\2\u00e1"+
"\u00e4\5\2\2\2\u00e2\u00e4\5\n\6\2\u00e3\u00e1\3\2\2\2\u00e3\u00e2\3\2"+
"\2\2\u00e4\u00e6\3\2\2\2\u00e5\u00e0\3\2\2\2\u00e5\u00e3\3\2\2\2\u00e6"+
"9\3\2\2\2\u00e7\u00e8\7\35\2\2\u00e8\u00e9\5,\27\2\u00e9;\3\2\2\2\u00ea"+
"\u00ec\58\35\2\u00eb\u00ed\5:\36\2\u00ec\u00eb\3\2\2\2\u00ec\u00ed\3\2"+
"\2\2\u00ed=\3\2\2\2\u00ee\u00ef\7\34\2\2\u00ef\u00f0\5<\37\2\u00f0?\3"+
"\2\2\2\u00f1\u00f2\7!\2\2\u00f2\u00f3\5\62\32\2\u00f3A\3\2\2\2\u00f4\u00f9"+
"\5> \2\u00f5\u00f9\5:\36\2\u00f6\u00f9\5<\37\2\u00f7\u00f9\5,\27\2\u00f8"+
"\u00f4\3\2\2\2\u00f8\u00f5\3\2\2\2\u00f8\u00f6\3\2\2\2\u00f8\u00f7\3\2"+
"\2\2\u00f9C\3\2\2\2\u00fa\u00fb\7&\2\2\u00fb\u00fc\7\22\2\2\u00fc\u00fd"+
"\5N(\2\u00fd\u00ff\7\24\2\2\u00fe\u0100\7\6\2\2\u00ff\u00fe\3\2\2\2\u00ff"+
"\u0100\3\2\2\2\u0100\u0103\3\2\2\2\u0101\u0102\7(\2\2\u0102\u0104\5\2"+
"\2\2\u0103\u0101\3\2\2\2\u0103\u0104\3\2\2\2\u0104E\3\2\2\2\u0105\u0106"+
"\7\37\2\2\u0106\u0107\7\22\2\2\u0107\u0108\5\62\32\2\u0108\u0109\7\24"+
"\2\2\u0109G\3\2\2\2\u010a\u010f\5B\"\2\u010b\u010c\7\t\2\2\u010c\u010e"+
"\5B\"\2\u010d\u010b\3\2\2\2\u010e\u0111\3\2\2\2\u010f\u010d\3\2\2\2\u010f"+
"\u0110\3\2\2\2\u0110I\3\2\2\2\u0111\u010f\3\2\2\2\u0112\u0116\5B\"\2\u0113"+
"\u0115\5B\"\2\u0114\u0113\3\2\2\2\u0115\u0118\3\2\2\2\u0116\u0114\3\2"+
"\2\2\u0116\u0117\3\2\2\2\u0117K\3\2\2\2\u0118\u0116\3\2\2\2\u0119\u011c"+
"\5H%\2\u011a\u011c\5J&\2\u011b\u0119\3\2\2\2\u011b\u011a\3\2\2\2\u011c"+
"M\3\2\2\2\u011d\u011f\5L\'\2\u011e\u0120\5D#\2\u011f\u011e\3\2\2\2\u011f"+
"\u0120\3\2\2\2\u0120\u0122\3\2\2\2\u0121\u0123\5F$\2\u0122\u0121\3\2\2"+
"\2\u0122\u0123\3\2\2\2\u0123\u0125\3\2\2\2\u0124\u0126\5@!\2\u0125\u0124"+
"\3\2\2\2\u0125\u0126\3\2\2\2\u0126\u0128\3\2\2\2\u0127\u0129\5\66\34\2"+
"\u0128\u0127\3\2\2\2\u0128\u0129\3\2\2\2\u0129\u012b\3\2\2\2\u012a\u012c"+
"\5.\30\2\u012b\u012a\3\2\2\2\u012b\u012c\3\2\2\2\u012cO\3\2\2\2\u012d"+
"\u012f\5N(\2\u012e\u0130\7.\2\2\u012f\u012e\3\2\2\2\u012f\u0130\3\2\2"+
"\2\u0130Q\3\2\2\2\37^hjtz\u0083\u008d\u00ba\u00c0\u00c5\u00ca\u00d1\u00d6"+
"\u00de\u00e3\u00e5\u00ec\u00f8\u00ff\u0103\u010f\u0116\u011b\u011f\u0122"+
"\u0125\u0128\u012b\u012f";
public static final ATN _ATN =
new ATNDeserializer().deserialize(_serializedATN.toCharArray());
static {
_decisionToDFA = new DFA[_ATN.getNumberOfDecisions()];
for (int i = 0; i < _ATN.getNumberOfDecisions(); i++) {
_decisionToDFA[i] = new DFA(_ATN.getDecisionState(i), i);
}
}
}
\ No newline at end of file
SINGLE_LINE_COMMENT=1
MULTILINE_COMMENT=2
WS=3
NUMBER=4
FLOATING_NUMBER=5
BOOL=6
K_COMMA=7
K_PLUS=8
K_MINUS=9
K_STAR=10
K_DIV=11
K_DOT=12
K_LIKE=13
K_AND=14
K_OR=15
K_LPAREN=16
K_LBRACKET=17
K_RPAREN=18
K_RBRACKET=19
K_LT=20
K_LTE=21
K_EQ=22
K_NEQ=23
K_GT=24
K_GTE=25
K_FROM=26
K_WHERE=27
K_ORDERBY=28
K_GROUPBY=29
K_LIMIT=30
K_SELECT=31
K_MAX=32
K_MIN=33
K_SUM=34
K_COUNT=35
K_LOOP=36
K_OFFSET=37
K_AS=38
K_ISA=39
K_IS=40
K_HAS=41
K_ASC=42
K_DESC=43
K_WITHPATH=44
K_TRUE=45
K_FALSE=46
KEYWORD=47
ID=48
STRING=49
','=7
'+'=8
'-'=9
'*'=10
'/'=11
'.'=12
'('=16
'['=17
')'=18
']'=19
// Generated from AtlasDSLParser.g4 by ANTLR 4.7
package org.apache.atlas.query.antlr4;
import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor;
/**
* This class provides an empty implementation of {@link AtlasDSLParserVisitor},
* which can be extended to create a visitor which only needs to handle a subset
* of the available methods.
*
* @param <T> The return type of the visit operation. Use {@link Void} for
* operations with no return type.
*/
public class AtlasDSLParserBaseVisitor<T> extends AbstractParseTreeVisitor<T> implements AtlasDSLParserVisitor<T> {
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitIdentifier(AtlasDSLParser.IdentifierContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitOperator(AtlasDSLParser.OperatorContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSortOrder(AtlasDSLParser.SortOrderContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitValueArray(AtlasDSLParser.ValueArrayContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLiteral(AtlasDSLParser.LiteralContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLimitClause(AtlasDSLParser.LimitClauseContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitOffsetClause(AtlasDSLParser.OffsetClauseContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitAtomE(AtlasDSLParser.AtomEContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitMultiERight(AtlasDSLParser.MultiERightContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitMultiE(AtlasDSLParser.MultiEContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitArithERight(AtlasDSLParser.ArithERightContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitArithE(AtlasDSLParser.ArithEContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitComparisonClause(AtlasDSLParser.ComparisonClauseContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitIsClause(AtlasDSLParser.IsClauseContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitHasClause(AtlasDSLParser.HasClauseContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitCountClause(AtlasDSLParser.CountClauseContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitMaxClause(AtlasDSLParser.MaxClauseContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitMinClause(AtlasDSLParser.MinClauseContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSumClause(AtlasDSLParser.SumClauseContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExprRight(AtlasDSLParser.ExprRightContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitCompE(AtlasDSLParser.CompEContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitExpr(AtlasDSLParser.ExprContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLimitOffset(AtlasDSLParser.LimitOffsetContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSelectExpression(AtlasDSLParser.SelectExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSelectExpr(AtlasDSLParser.SelectExprContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitAliasExpr(AtlasDSLParser.AliasExprContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitOrderByExpr(AtlasDSLParser.OrderByExprContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitFromSrc(AtlasDSLParser.FromSrcContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitWhereClause(AtlasDSLParser.WhereClauseContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitFromExpression(AtlasDSLParser.FromExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitFromClause(AtlasDSLParser.FromClauseContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSelectClause(AtlasDSLParser.SelectClauseContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSingleQrySrc(AtlasDSLParser.SingleQrySrcContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitLoopExpression(AtlasDSLParser.LoopExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitGroupByExpression(AtlasDSLParser.GroupByExpressionContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitCommaDelimitedQueries(AtlasDSLParser.CommaDelimitedQueriesContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitSpaceDelimitedQueries(AtlasDSLParser.SpaceDelimitedQueriesContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitQuerySrc(AtlasDSLParser.QuerySrcContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitQuery(AtlasDSLParser.QueryContext ctx) { return visitChildren(ctx); }
/**
* {@inheritDoc}
*
* <p>The default implementation returns the result of calling
* {@link #visitChildren} on {@code ctx}.</p>
*/
@Override public T visitQueryWithPath(AtlasDSLParser.QueryWithPathContext ctx) { return visitChildren(ctx); }
}
\ No newline at end of file
// Generated from AtlasDSLParser.g4 by ANTLR 4.7
package org.apache.atlas.query.antlr4;
import org.antlr.v4.runtime.tree.ParseTreeVisitor;
/**
* This interface defines a complete generic visitor for a parse tree produced
* by {@link AtlasDSLParser}.
*
* @param <T> The return type of the visit operation. Use {@link Void} for
* operations with no return type.
*/
public interface AtlasDSLParserVisitor<T> extends ParseTreeVisitor<T> {
/**
* Visit a parse tree produced by {@link AtlasDSLParser#identifier}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitIdentifier(AtlasDSLParser.IdentifierContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#operator}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitOperator(AtlasDSLParser.OperatorContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#sortOrder}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitSortOrder(AtlasDSLParser.SortOrderContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#valueArray}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitValueArray(AtlasDSLParser.ValueArrayContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#literal}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitLiteral(AtlasDSLParser.LiteralContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#limitClause}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitLimitClause(AtlasDSLParser.LimitClauseContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#offsetClause}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitOffsetClause(AtlasDSLParser.OffsetClauseContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#atomE}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAtomE(AtlasDSLParser.AtomEContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#multiERight}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitMultiERight(AtlasDSLParser.MultiERightContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#multiE}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitMultiE(AtlasDSLParser.MultiEContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#arithERight}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitArithERight(AtlasDSLParser.ArithERightContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#arithE}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitArithE(AtlasDSLParser.ArithEContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#comparisonClause}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitComparisonClause(AtlasDSLParser.ComparisonClauseContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#isClause}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitIsClause(AtlasDSLParser.IsClauseContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#hasClause}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitHasClause(AtlasDSLParser.HasClauseContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#countClause}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCountClause(AtlasDSLParser.CountClauseContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#maxClause}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitMaxClause(AtlasDSLParser.MaxClauseContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#minClause}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitMinClause(AtlasDSLParser.MinClauseContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#sumClause}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitSumClause(AtlasDSLParser.SumClauseContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#exprRight}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExprRight(AtlasDSLParser.ExprRightContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#compE}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCompE(AtlasDSLParser.CompEContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#expr}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitExpr(AtlasDSLParser.ExprContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#limitOffset}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitLimitOffset(AtlasDSLParser.LimitOffsetContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#selectExpression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitSelectExpression(AtlasDSLParser.SelectExpressionContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#selectExpr}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitSelectExpr(AtlasDSLParser.SelectExprContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#aliasExpr}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitAliasExpr(AtlasDSLParser.AliasExprContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#orderByExpr}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitOrderByExpr(AtlasDSLParser.OrderByExprContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#fromSrc}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFromSrc(AtlasDSLParser.FromSrcContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#whereClause}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitWhereClause(AtlasDSLParser.WhereClauseContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#fromExpression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFromExpression(AtlasDSLParser.FromExpressionContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#fromClause}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitFromClause(AtlasDSLParser.FromClauseContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#selectClause}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitSelectClause(AtlasDSLParser.SelectClauseContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#singleQrySrc}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitSingleQrySrc(AtlasDSLParser.SingleQrySrcContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#loopExpression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitLoopExpression(AtlasDSLParser.LoopExpressionContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#groupByExpression}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitGroupByExpression(AtlasDSLParser.GroupByExpressionContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#commaDelimitedQueries}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitCommaDelimitedQueries(AtlasDSLParser.CommaDelimitedQueriesContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#spaceDelimitedQueries}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitSpaceDelimitedQueries(AtlasDSLParser.SpaceDelimitedQueriesContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#querySrc}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitQuerySrc(AtlasDSLParser.QuerySrcContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#query}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitQuery(AtlasDSLParser.QueryContext ctx);
/**
* Visit a parse tree produced by {@link AtlasDSLParser#queryWithPath}.
* @param ctx the parse tree
* @return the visitor result
*/
T visitQueryWithPath(AtlasDSLParser.QueryWithPathContext ctx);
}
\ No newline at end of file
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.query;
import com.google.common.collect.ImmutableList;
import org.apache.atlas.AtlasClient;
import org.apache.atlas.TestUtilsV2;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.instance.AtlasClassification;
import org.apache.atlas.model.instance.AtlasEntity;
import org.apache.atlas.model.typedef.AtlasTypesDef;
import org.apache.atlas.repository.store.graph.AtlasEntityStore;
import org.apache.atlas.repository.store.graph.v1.AtlasEntityStream;
import org.apache.atlas.store.AtlasTypeDefStore;
import org.apache.atlas.type.AtlasTypeRegistry;
import javax.inject.Inject;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Date;
import java.util.List;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.apache.atlas.repository.impexp.ZipFileResourceTestUtils.loadModelFromJson;
import static org.testng.Assert.fail;
public abstract class BasicTestSetup {
protected static final String DATABASE_TYPE = "hive_db";
protected static final String HIVE_TABLE_TYPE = "hive_table";
private static final String COLUMN_TYPE = "hive_column";
private static final String HIVE_PROCESS_TYPE = "hive_process";
private static final String STORAGE_DESC_TYPE = "StorageDesc";
private static final String VIEW_TYPE = "View";
private static final String PARTITION_TYPE = "hive_partition";
protected static final String DATASET_SUBTYPE = "dataset_subtype";
@Inject
protected AtlasTypeRegistry atlasTypeRegistry;
@Inject
protected AtlasTypeDefStore atlasTypeDefStore;
@Inject
protected AtlasEntityStore atlasEntityStore;
private boolean baseLoaded = false;
protected void setupTestData() {
loadBaseModels();
loadHiveDataset();
loadEmployeeDataset();
}
private void loadBaseModels() {
// Load all base models
try {
loadModelFromJson("0000-Area0/0010-base_model.json", atlasTypeDefStore, atlasTypeRegistry);
baseLoaded = true;
} catch (IOException | AtlasBaseException e) {
fail("Base model setup is required for test to run");
}
}
protected void loadHiveDataset() {
if (!baseLoaded) {
loadBaseModels();
}
try {
loadModelFromJson("1000-Hadoop/1030-hive_model.json", atlasTypeDefStore, atlasTypeRegistry);
} catch (IOException | AtlasBaseException e) {
fail("Hive model setup is required for test to run");
}
AtlasEntity.AtlasEntitiesWithExtInfo hiveTestEntities = hiveTestEntities();
try {
atlasEntityStore.createOrUpdate(new AtlasEntityStream(hiveTestEntities), false);
} catch (AtlasBaseException e) {
fail("Hive instance setup is needed for test to run");
}
}
protected void loadEmployeeDataset() {
if (!baseLoaded) {
loadBaseModels();
}
// Define employee dataset types
AtlasTypesDef employeeTypes = TestUtilsV2.defineDeptEmployeeTypes();
try {
atlasTypeDefStore.createTypesDef(employeeTypes);
} catch (AtlasBaseException e) {
fail("Employee Type setup is required");
}
// Define entities for department
AtlasEntity.AtlasEntitiesWithExtInfo deptEg2 = TestUtilsV2.createDeptEg2();
try {
atlasEntityStore.createOrUpdate(new AtlasEntityStream(deptEg2), false);
} catch (AtlasBaseException e) {
fail("Employee entity setup should've passed");
}
}
public AtlasEntity.AtlasEntitiesWithExtInfo hiveTestEntities() {
List<AtlasEntity> entities = new ArrayList<>();
AtlasEntity salesDB = database("Sales", "Sales Database", "John ETL", "hdfs://host:8000/apps/warehouse/sales");
entities.add(salesDB);
AtlasEntity sd =
storageDescriptor("hdfs://host:8000/apps/warehouse/sales", "TextInputFormat", "TextOutputFormat", true, ImmutableList.of(
column("time_id", "int", "time id")));
entities.add(sd);
List<AtlasEntity> salesFactColumns = ImmutableList
.of(column("time_id", "int", "time id"),
column("product_id", "int", "product id"),
column("customer_id", "int", "customer id", "PII"),
column("sales", "double", "product id", "Metric"));
entities.addAll(salesFactColumns);
AtlasEntity salesFact = table("sales_fact", "sales fact table", salesDB, sd, "Joe", "Managed", salesFactColumns, "Fact");
entities.add(salesFact);
List<AtlasEntity> logFactColumns = ImmutableList
.of(column("time_id", "int", "time id"), column("app_id", "int", "app id"),
column("machine_id", "int", "machine id"), column("log", "string", "log data", "Log Data"));
entities.addAll(logFactColumns);
List<AtlasEntity> timeDimColumns = ImmutableList
.of(column("time_id", "int", "time id"),
column("dayOfYear", "int", "day Of Year"),
column("weekDay", "int", "week Day"));
entities.addAll(timeDimColumns);
AtlasEntity timeDim = table("time_dim", "time dimension table", salesDB, sd, "John Doe", "External", timeDimColumns,
"Dimension");
entities.add(timeDim);
AtlasEntity reportingDB =
database("Reporting", "reporting database", "Jane BI", "hdfs://host:8000/apps/warehouse/reporting");
entities.add(reportingDB);
AtlasEntity salesFactDaily =
table("sales_fact_daily_mv", "sales fact daily materialized view", reportingDB, sd, "Joe BI", "Managed",
salesFactColumns, "Metric");
entities.add(salesFactDaily);
AtlasEntity circularLineageTable1 = table("table1", "", reportingDB, sd, "Vimal", "Managed", salesFactColumns, "Metric");
entities.add(circularLineageTable1);
AtlasEntity circularLineageTable2 = table("table2", "", reportingDB, sd, "Vimal", "Managed", salesFactColumns, "Metric");
entities.add(circularLineageTable2);
AtlasEntity circularLineage1Process = loadProcess("circularLineage1", "hive query for daily summary", "John ETL", ImmutableList.of(circularLineageTable1),
ImmutableList.of(circularLineageTable2), "create table as select ", "plan", "id", "graph", "ETL");
entities.add(circularLineage1Process);
AtlasEntity circularLineage2Process = loadProcess("circularLineage2", "hive query for daily summary", "John ETL", ImmutableList.of(circularLineageTable2),
ImmutableList.of(circularLineageTable1), "create table as select ", "plan", "id", "graph", "ETL");
entities.add(circularLineage2Process);
AtlasEntity loadSalesDaily = loadProcess("loadSalesDaily", "hive query for daily summary", "John ETL", ImmutableList.of(salesFact, timeDim),
ImmutableList.of(salesFactDaily), "create table as select ", "plan", "id", "graph", "ETL");
entities.add(loadSalesDaily);
AtlasEntity logDB = database("Logging", "logging database", "Tim ETL", "hdfs://host:8000/apps/warehouse/logging");
entities.add(logDB);
AtlasEntity loggingFactDaily =
table("log_fact_daily_mv", "log fact daily materialized view", logDB, sd, "Tim ETL", "Managed",
logFactColumns, "Log Data");
entities.add(loggingFactDaily);
List<AtlasEntity> productDimColumns = ImmutableList
.of(column("product_id", "int", "product id"),
column("product_name", "string", "product name"),
column("brand_name", "int", "brand name"));
entities.addAll(productDimColumns);
AtlasEntity productDim =
table("product_dim", "product dimension table", salesDB, sd, "John Doe", "Managed", productDimColumns,
"Dimension");
entities.add(productDim);
AtlasEntity productDimView = view("product_dim_view", reportingDB, ImmutableList.of(productDim), "Dimension", "JdbcAccess");
entities.add(productDimView);
List<AtlasEntity> customerDimColumns = ImmutableList.of(
column("customer_id", "int", "customer id", "PII"),
column("name", "string", "customer name", "PII"),
column("address", "string", "customer address", "PII"));
entities.addAll(customerDimColumns);
AtlasEntity customerDim =
table("customer_dim", "customer dimension table", salesDB, sd, "fetl", "External", customerDimColumns,
"Dimension");
entities.add(customerDim);
AtlasEntity customerDimView = view("customer_dim_view", reportingDB, ImmutableList.of(customerDim), "Dimension", "JdbcAccess");
entities.add(customerDimView);
AtlasEntity salesFactMonthly =
table("sales_fact_monthly_mv", "sales fact monthly materialized view", reportingDB, sd, "Jane BI",
"Managed", salesFactColumns, "Metric");
entities.add(salesFactMonthly);
AtlasEntity loadSalesMonthly = loadProcess("loadSalesMonthly", "hive query for monthly summary", "John ETL", ImmutableList.of(salesFactDaily),
ImmutableList.of(salesFactMonthly), "create table as select ", "plan", "id", "graph", "ETL");
entities.add(loadSalesMonthly);
AtlasEntity loggingFactMonthly =
table("logging_fact_monthly_mv", "logging fact monthly materialized view", logDB, sd, "Tim ETL",
"Managed", logFactColumns, "Log Data");
entities.add(loggingFactMonthly);
AtlasEntity loadLogsMonthly = loadProcess("loadLogsMonthly", "hive query for monthly summary", "Tim ETL", ImmutableList.of(loggingFactDaily),
ImmutableList.of(loggingFactMonthly), "create table as select ", "plan", "id", "graph", "ETL");
entities.add(loadLogsMonthly);
AtlasEntity partition = partition(new ArrayList() {{
add("2015-01-01");
}}, salesFactDaily);
entities.add(partition);
AtlasEntity datasetSubType = datasetSubType("dataSetSubTypeInst1", "testOwner");
entities.add(datasetSubType);
return new AtlasEntity.AtlasEntitiesWithExtInfo(entities);
}
AtlasEntity database(String name, String description, String owner, String locationUri, String... traitNames) {
AtlasEntity database = new AtlasEntity(DATABASE_TYPE);
database.setAttribute("name", name);
database.setAttribute("description", description);
database.setAttribute("owner", owner);
database.setAttribute("locationUri", locationUri);
database.setAttribute("createTime", System.currentTimeMillis());
database.setClassifications(Stream.of(traitNames).map(AtlasClassification::new).collect(Collectors.toList()));
return database;
}
protected AtlasEntity storageDescriptor(String location, String inputFormat, String outputFormat, boolean compressed, List<AtlasEntity> columns) {
AtlasEntity storageDescriptor = new AtlasEntity(STORAGE_DESC_TYPE);
storageDescriptor.setAttribute("location", location);
storageDescriptor.setAttribute("inputFormat", inputFormat);
storageDescriptor.setAttribute("outputFormat", outputFormat);
storageDescriptor.setAttribute("compressed", compressed);
storageDescriptor.setAttribute("cols", columns);
return storageDescriptor;
}
protected AtlasEntity column(String name, String dataType, String comment, String... traitNames) {
AtlasEntity column = new AtlasEntity(COLUMN_TYPE);
column.setAttribute("name", name);
column.setAttribute("dataType", dataType);
column.setAttribute("comment", comment);
column.setClassifications(Stream.of(traitNames).map(AtlasClassification::new).collect(Collectors.toList()));
return column;
}
protected AtlasEntity table(String name, String description, AtlasEntity db, AtlasEntity sd, String owner, String tableType,
List<AtlasEntity> columns, String... traitNames) {
AtlasEntity table = new AtlasEntity(HIVE_TABLE_TYPE);
table.setAttribute("name", name);
table.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, "qualified:" + name);
table.setAttribute("description", description);
table.setAttribute("owner", owner);
table.setAttribute("tableType", tableType);
table.setAttribute("temporary", false);
table.setAttribute("createTime", new Date(System.currentTimeMillis()));
table.setAttribute("lastAccessTime", System.currentTimeMillis());
table.setAttribute("retention", System.currentTimeMillis());
table.setAttribute("db", db);
// todo - uncomment this, something is broken
table.setAttribute("sd", sd);
table.setAttribute("columns", columns);
table.setClassifications(Stream.of(traitNames).map(AtlasClassification::new).collect(Collectors.toList()));
return table;
}
protected AtlasEntity loadProcess(String name, String description, String user, List<AtlasEntity> inputTables, List<AtlasEntity> outputTables,
String queryText, String queryPlan, String queryId, String queryGraph, String... traitNames) {
AtlasEntity process = new AtlasEntity(HIVE_PROCESS_TYPE);
process.setAttribute("name", name);
process.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
process.setAttribute("description", description);
process.setAttribute("user", user);
process.setAttribute("startTime", System.currentTimeMillis());
process.setAttribute("endTime", System.currentTimeMillis() + 10000);
process.setAttribute("inputs", inputTables);
process.setAttribute("outputs", outputTables);
process.setAttribute("queryText", queryText);
process.setAttribute("queryPlan", queryPlan);
process.setAttribute("queryId", queryId);
process.setAttribute("queryGraph", queryGraph);
process.setClassifications(Stream.of(traitNames).map(AtlasClassification::new).collect(Collectors.toList()));
return process;
}
AtlasEntity view(String name, AtlasEntity dbId, List<AtlasEntity> inputTables, String... traitNames) {
AtlasEntity view = new AtlasEntity(VIEW_TYPE);
view.setAttribute("name", name);
view.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
view.setAttribute("db", dbId);
view.setAttribute("inputTables", inputTables);
view.setClassifications(Stream.of(traitNames).map(AtlasClassification::new).collect(Collectors.toList()));
return view;
}
AtlasEntity partition(List<String> values, AtlasEntity table, String... traitNames) {
AtlasEntity partition = new AtlasEntity(PARTITION_TYPE);
partition.setAttribute("values", values);
partition.setAttribute("table", table);
partition.setClassifications(Stream.of(traitNames).map(AtlasClassification::new).collect(Collectors.toList()));
return partition;
}
AtlasEntity datasetSubType(final String name, String owner) {
AtlasEntity datasetSubType = new AtlasEntity(DATASET_SUBTYPE);
datasetSubType.setAttribute(AtlasClient.REFERENCEABLE_ATTRIBUTE_NAME, name);
datasetSubType.setAttribute(AtlasClient.NAME, name);
datasetSubType.setAttribute("owner", owner);
return datasetSubType;
}
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.query;
import org.apache.atlas.TestModules;
import org.apache.atlas.discovery.EntityDiscoveryService;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.discovery.AtlasSearchResult;
import org.apache.atlas.runner.LocalSolrRunner;
import org.testng.annotations.AfterClass;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Guice;
import org.testng.annotations.Test;
import javax.inject.Inject;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
@Guice(modules = TestModules.TestOnlyModule.class)
public class DSLQueriesTest extends BasicTestSetup {
@Inject
private EntityDiscoveryService discoveryService;
@BeforeClass
public void setup() throws Exception {
LocalSolrRunner.start();
setupTestData();
}
@AfterClass
public void teardown() throws Exception {
LocalSolrRunner.stop();
}
@DataProvider(name = "comparisonQueriesProvider")
private Object[][] createComparisonQueries() {
//create queries the exercise the comparison logic for
//all of the different supported data types
return new Object[][] {
{"Person where (birthday < \"1950-01-01T02:35:58.440Z\" )", 0},
{"Person where (birthday > \"1975-01-01T02:35:58.440Z\" )", 2},
{"Person where (birthday >= \"1975-01-01T02:35:58.440Z\" )", 2},
{"Person where (birthday <= \"1950-01-01T02:35:58.440Z\" )", 0},
{"Person where (birthday = \"1975-01-01T02:35:58.440Z\" )", 0},
{"Person where (birthday != \"1975-01-01T02:35:58.440Z\" )", 4},
{"Person where (hasPets = true)", 2},
{"Person where (hasPets = false)", 2},
{"Person where (hasPets != false)", 2},
{"Person where (hasPets != true)", 2},
{"Person where (numberOfCars > 0)", 2},
{"Person where (numberOfCars > 1)", 1},
{"Person where (numberOfCars >= 1)", 2},
{"Person where (numberOfCars < 2)", 3},
{"Person where (numberOfCars <= 2)", 4},
{"Person where (numberOfCars = 2)", 1},
{"Person where (numberOfCars != 2)", 3},
{"Person where (houseNumber > 0)", 2},
{"Person where (houseNumber > 17)", 1},
{"Person where (houseNumber >= 17)", 2},
{"Person where (houseNumber < 153)", 3},
{"Person where (houseNumber <= 153)", 4},
{"Person where (houseNumber = 17)", 1},
{"Person where (houseNumber != 17)", 3},
{"Person where (carMileage > 0)", 2},
{"Person where (carMileage > 13)", 1},
{"Person where (carMileage >= 13)", 2},
{"Person where (carMileage < 13364)", 3},
{"Person where (carMileage <= 13364)", 4},
{"Person where (carMileage = 13)", 1},
{"Person where (carMileage != 13)", 3},
{"Person where (shares > 0)", 2},
{"Person where (shares > 13)", 2},
{"Person where (shares >= 16000)", 1},
{"Person where (shares < 13364)", 2},
{"Person where (shares <= 15000)", 3},
{"Person where (shares = 15000)", 1},
{"Person where (shares != 1)", 4},
{"Person where (salary > 0)", 2},
{"Person where (salary > 100000)", 2},
{"Person where (salary >= 200000)", 1},
{"Person where (salary < 13364)", 2},
{"Person where (salary <= 150000)", 3},
{"Person where (salary = 12334)", 0},
{"Person where (salary != 12344)", 4},
{"Person where (age > 36)", 1},
{"Person where (age > 49)", 1},
{"Person where (age >= 49)", 1},
{"Person where (age < 50)", 3},
{"Person where (age <= 35)", 2},
{"Person where (age = 35)", 0},
{"Person where (age != 35)", 4}
};
}
@Test(dataProvider = "comparisonQueriesProvider")
public void testComparisonQueries(String query, int expected) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0);
assertNotNull(searchResult.getEntities());
assertEquals(searchResult.getEntities().size(), expected);
}
@DataProvider(name = "dslQueriesProvider")
private Object[][] createDSLQueries() {
return new Object[][]{
{"hive_db as inst where inst.name=\"Reporting\" select inst as id, inst.name", 1},
{"from hive_db as h select h as id", 3},
{"from hive_db", 3},
{"hive_db", 3},
{"hive_db where hive_db.name=\"Reporting\"", 1},
{"hive_db hive_db.name = \"Reporting\"", 1},
{"hive_db where hive_db.name=\"Reporting\" select name, owner", 1},
{"hive_db has name", 3},
{"hive_db, hive_table", 10},
{"View is JdbcAccess", 2},
{"hive_db as db1, hive_table where db1.name = \"Reporting\"", 0}, //Not working - ATLAS-145
// - Final working query -> discoveryService.searchByGremlin("L:{_var_0 = [] as Set;g.V().has(\"__typeName\", \"hive_db\").fill(_var_0);g.V().has(\"__superTypeNames\", \"hive_db\").fill(_var_0);_var_0._().as(\"db1\").in(\"__hive_table.db\").back(\"db1\").and(_().has(\"hive_db.name\", T.eq, \"Reporting\")).toList()}")
/*
{"hive_db, hive_process has name"}, //Invalid query
{"hive_db where hive_db.name=\"Reporting\" and hive_db.createTime < " + System.currentTimeMillis()}
*/
{"from hive_table", 10},
{"hive_table", 10},
{"hive_table isa Dimension", 3},
{"hive_column where hive_column isa PII", 8},
{"View is Dimension" , 2},
// {"hive_column where hive_column isa PII select hive_column.name", 6}, //Not working - ATLAS-175
{"hive_column select hive_column.name", 37},
{"hive_column select name",37},
{"hive_column where hive_column.name=\"customer_id\"", 6},
{"from hive_table select hive_table.name", 10},
{"hive_db where (name = \"Reporting\")", 1},
{"hive_db where (name = \"Reporting\") select name as _col_0, owner as _col_1", 1},
{"hive_db where hive_db is JdbcAccess", 0}, //Not supposed to work
{"hive_db hive_table", 10},
{"hive_db where hive_db has name", 3},
{"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 0}, //Not working -> ATLAS-145
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 ", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 ", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 ", 1},
/*
todo: does not work - ATLAS-146
{"hive_db where (name = \"Reporting\") and ((createTime + 1) > 0)"},
{"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") select db1.name
as dbName, tab.name as tabName"},
{"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) or (db1.name = \"Reporting\") select db1.name
as dbName, tab.name as tabName"},
{"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner
select db1.name as dbName, tab.name as tabName"},
{"hive_db as db1 hive_table as tab where ((db1.createTime + 1) > 0) and (db1.name = \"Reporting\") or db1 has owner
select db1.name as dbName, tab.name as tabName"},
*/
// trait searches
{"Dimension", 5},
{"JdbcAccess", 2},
{"ETL", 5},
{"Metric", 9},
{"PII", 8},
{"`Log Data`", 4},
// Not sure what the expected rows should be, but since we didn't assign or do anything with the created
// I assume it'll be zero
{"`isa`", 0},
/* Lineage queries are fired through ClosureQuery and are tested through HiveLineageJerseyResourceIt in webapp module.
Commenting out the below queries since DSL to Gremlin parsing/translation fails with lineage queries when there are array types
used within loop expressions which is the case with DataSet.inputs and outputs.`
// Lineage
{"Table LoadProcess outputTable"}, {"Table loop (LoadProcess outputTable)"},
{"Table as _loop0 loop (LoadProcess outputTable) withPath"},
{"Table as src loop (LoadProcess outputTable) as dest select src.name as srcTable, dest.name as "
+ "destTable withPath"},
*/
// {"hive_table as t, sd, hive_column as c where t.name=\"sales_fact\" select c.name as colName, c.dataType as "
// + "colType", 0}, //Not working - ATLAS-145 and ATLAS-166
{"hive_table where name='sales_fact', db where name='Sales'", 1},
{"hive_table where name='sales_fact', db where name='Reporting'", 0},
{"hive_partition as p where values = ['2015-01-01']", 1},
// {"StorageDesc select cols", 6} //Not working since loading of lists needs to be fixed yet
//check supertypeNames
{"DataSet where name='sales_fact'", 1},
{"Asset where name='sales_fact'", 1}
};
}
@Test(dataProvider = "dslQueriesProvider")
public void testBasicDSL(String query, int expected) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0);
assertNotNull(searchResult.getEntities());
assertEquals(searchResult.getEntities().size(), expected);
}
@DataProvider(name = "dslExplicitLimitQueriesProvider")
private Object[][] createDSLQueriesWithExplicitLimit() {
return new Object[][]{
{"hive_column", 37, 40, 0},//with higher limit all rows returned
{"hive_column limit 10", 10, 50, 0},//lower limit in query
{"hive_column select hive_column.name limit 10", 5, 5, 0},//lower limit in query param
{"hive_column select hive_column.name withPath", 20, 20, 0},//limit only in params
//with offset, only remaining rows returned
{"hive_column select hive_column.name limit 40 withPath", 17, 40, 20},
//with higher offset, no rows returned
{"hive_column select hive_column.name limit 40 withPath", 0, 40, 40},
//offset used from query
{"hive_column select hive_column.name limit 40 offset 10", 27, 40, 0},
//offsets in query and parameter added up
{"hive_column select hive_column.name limit 40 offset 10", 17, 40, 10},
//works with where clause
{"hive_db where name = 'Reporting' limit 10 offset 0", 1, 40, 0},
//works with joins
{"hive_db, hive_table where db.name = 'Reporting' limit 10", 1, 1, 0},
{"hive_column limit 25", 5, 10, 20}, //last page should return records limited by limit in query
{"hive_column limit 25", 0, 10, 30}, //offset > limit returns 0 rows
};
}
@Test(dataProvider = "dslExplicitLimitQueriesProvider")
public void testExplicitDSL(String query, int expected, int limit, int offset) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, limit, offset);
assertNotNull(searchResult.getEntities());
assertEquals(searchResult.getEntities().size(), expected);
}
@DataProvider(name = "dslLimitQueriesProvider")
private Object[][] createDSLQueriesWithLimit() {
return new Object[][]{
{"hive_column limit 10 ", 10},
{"hive_column select hive_column.name limit 10 ", 10},
{"hive_column select hive_column.name withPath", 37},
{"hive_column select hive_column.name limit 10 withPath", 10},
{"from hive_db", 3},
{"from hive_db limit 2", 2},
{"from hive_db limit 2 offset 0", 2},
{"from hive_db limit 2 offset 1", 2},
{"from hive_db limit 3 offset 1", 2},
{"hive_db", 3},
{"hive_db where hive_db.name=\"Reporting\"", 1},
{"hive_db where hive_db.name=\"Reporting\" or hive_db.name=\"Sales\" or hive_db.name=\"Logging\" limit 1 offset 1", 1},
{"hive_db where hive_db.name=\"Reporting\" or hive_db.name=\"Sales\" or hive_db.name=\"Logging\" limit 1 offset 2", 1},
{"hive_db where hive_db.name=\"Reporting\" or hive_db.name=\"Sales\" or hive_db.name=\"Logging\" limit 2 offset 1", 2},
{"hive_db where hive_db.name=\"Reporting\" limit 10 ", 1},
{"hive_db hive_db.name = \"Reporting\"", 1},
{"hive_db where hive_db.name=\"Reporting\" select name, owner", 1},
{"hive_db has name", 3},
{"hive_db has name limit 2 offset 0", 2},
{"hive_db has name limit 2 offset 1", 2},
{"hive_db has name limit 10 offset 1", 2},
{"hive_db has name limit 10 offset 0", 3},
{"hive_db, hive_table", 10},
{"hive_db, hive_table limit 5", 5},
{"hive_db, hive_table limit 5 offset 0", 5},
{"hive_db, hive_table limit 5 offset 5", 5},
{"View is JdbcAccess", 2},
{"View is JdbcAccess limit 1", 1},
{"View is JdbcAccess limit 2 offset 1", 1},
{"hive_db as db1, hive_table where db1.name = \"Reporting\"", 0}, //Not working - ATLAS-145
{"from hive_table", 10},
{"from hive_table limit 5", 5},
{"from hive_table limit 5 offset 5", 5},
{"hive_table", 10},
{"hive_table limit 5", 5},
{"hive_table limit 5 offset 5", 5},
{"hive_table isa Dimension", 3},
{"hive_table isa Dimension limit 2", 2},
{"hive_table isa Dimension limit 2 offset 0", 2},
{"hive_table isa Dimension limit 2 offset 1", 2},
{"hive_table isa Dimension limit 3 offset 1", 2},
{"hive_column where hive_column isa PII", 8},
{"hive_column where hive_column isa PII limit 5", 5},
{"hive_column where hive_column isa PII limit 5 offset 1", 5},
{"hive_column where hive_column isa PII limit 5 offset 5", 3},
{"View is Dimension" , 2},
{"View is Dimension limit 1" , 1},
{"View is Dimension limit 1 offset 1" , 1},
{"View is Dimension limit 10 offset 1" , 1},
{"hive_column select hive_column.name", 37},
{"hive_column select hive_column.name limit 5", 5},
{"hive_column select hive_column.name limit 5 offset 36", 1},
{"hive_column select name", 37},
{"hive_column select name limit 5", 5},
{"hive_column select name limit 5 offset 36 ", 1},
{"hive_column where hive_column.name=\"customer_id\"", 6},
{"hive_column where hive_column.name=\"customer_id\" limit 2", 2},
{"hive_column where hive_column.name=\"customer_id\" limit 2 offset 1", 2},
{"hive_column where hive_column.name=\"customer_id\" limit 10 offset 3", 3},
{"from hive_table select hive_table.name", 10},
{"from hive_table select hive_table.name limit 5", 5},
{"from hive_table select hive_table.name limit 5 offset 5", 5},
{"hive_db where (name = \"Reporting\")", 1},
{"hive_db where (name = \"Reporting\") limit 10", 1},
{"hive_db where (name = \"Reporting\") select name as _col_0, owner as _col_1", 1},
{"hive_db where (name = \"Reporting\") select name as _col_0, owner as _col_1 limit 10", 1},
{"hive_db where hive_db is JdbcAccess", 0}, //Not supposed to work
{"hive_db hive_table", 10},
{"hive_db hive_table limit 5", 5},
{"hive_db hive_table limit 5 offset 5", 5},
{"hive_db where hive_db has name", 3},
{"hive_db where hive_db has name limit 5", 3},
{"hive_db where hive_db has name limit 2 offset 0", 2},
{"hive_db where hive_db has name limit 2 offset 1", 2},
{"hive_db as db1 hive_table where (db1.name = \"Reporting\")", 0}, //Not working -> ATLAS-145
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 ", 1},
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 limit 10", 1},
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 limit 10 offset 1", 0},
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 limit 10 offset 0", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 ", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 ", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 limit 10 offset 5", 0},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 ", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 0", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10 offset 1", 0},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 10", 1},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 limit 0 offset 1", 0},
// trait searches
{"Dimension", 5},
{"Dimension limit 2", 2},
{"Dimension limit 2 offset 1", 2},
{"Dimension limit 5 offset 4", 1},
{"JdbcAccess", 2},
{"JdbcAccess limit 5 offset 0", 2},
{"JdbcAccess limit 2 offset 1", 1},
{"JdbcAccess limit 1", 1},
{"ETL", 5},
{"ETL limit 2", 2},
{"ETL limit 1", 1},
{"ETL limit 1 offset 0", 1},
{"ETL limit 2 offset 1", 2},
{"Metric", 9},
{"Metric limit 10", 9},
{"Metric limit 2", 2},
{"Metric limit 10 offset 1", 8},
{"PII", 8},
{"PII limit 10", 8},
{"PII limit 2", 2},
{"PII limit 10 offset 1", 7},
{"`Log Data`", 4},
{"`Log Data` limit 3", 3},
{"`Log Data` limit 10 offset 2", 2},
{"hive_table where name='sales_fact', db where name='Sales'", 1},
{"hive_table where name='sales_fact', db where name='Sales' limit 10", 1},
{"hive_table where name='sales_fact', db where name='Sales' limit 10 offset 1", 0},
{"hive_table where name='sales_fact', db where name='Reporting'", 0},
{"hive_table where name='sales_fact', db where name='Reporting' limit 10", 0},
{"hive_table where name='sales_fact', db where name='Reporting' limit 10 offset 1", 0},
{"hive_partition as p where values = ['2015-01-01']", 1},
{"hive_partition as p where values = ['2015-01-01'] limit 10", 1},
{"hive_partition as p where values = ['2015-01-01'] limit 10 offset 1", 0},
};
}
@Test(dataProvider = "dslLimitQueriesProvider")
public void testDSLLimitQueries(String query, int expected) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0);
assertNotNull(searchResult.getEntities());
assertEquals(searchResult.getEntities().size(), expected);
}
@DataProvider(name = "dslOrderByQueriesProvider")
private Object[][] createDSLQueriesWithOrderBy() {
return new Object[][]{
//test with alias
// {"from hive_db select hive_db.name as 'o' orderby o limit 3", 3, "name", isAscending},
{"from hive_db as h orderby h.owner limit 3", 3, "owner", true},
{"hive_column as c select c.name orderby hive_column.name ", 37, "c.name", true},
{"hive_column as c select c.name orderby hive_column.name limit 5", 5, "c.name", true},
{"hive_column as c select c.name orderby hive_column.name desc limit 5", 5, "c.name", false},
{"from hive_db orderby hive_db.owner limit 3", 3, "owner", true},
{"hive_column select hive_column.name orderby hive_column.name ", 37, "hive_column.name", true},
{"hive_column select hive_column.name orderby hive_column.name limit 5", 5, "hive_column.name", true},
{"hive_column select hive_column.name orderby hive_column.name desc limit 5", 5, "hive_column.name", false},
{"from hive_db orderby owner limit 3", 3, "owner", true},
{"hive_column select hive_column.name orderby name ", 37, "hive_column.name", true},
{"hive_column select hive_column.name orderby name limit 5", 5, "hive_column.name", true},
{"hive_column select hive_column.name orderby name desc limit 5", 5, "hive_column.name", false},
//Not working, the problem is in server code not figuring out how to sort. not sure if it is valid use case.
// {"hive_db hive_table orderby 'hive_db.owner'", 10, "owner", isAscending},
// {"hive_db hive_table orderby 'hive_db.owner' limit 5", 5, "owner", isAscending},
// {"hive_db hive_table orderby 'hive_db.owner' limit 5 offset 5", 3, "owner", isAscending},
{"hive_db select hive_db.description orderby hive_db.description limit 10 withPath", 3, "hive_db.description", true},
{"hive_db select hive_db.description orderby hive_db.description desc limit 10 withPath", 3, "hive_db.description", false},
{"hive_column select hive_column.name orderby hive_column.name limit 10 withPath", 10, "hive_column.name", true},
{"hive_column select hive_column.name orderby hive_column.name asc limit 10 withPath", 10, "hive_column.name", true},
{"hive_column select hive_column.name orderby hive_column.name desc limit 10 withPath", 10, "hive_column.name", false},
{"from hive_db orderby hive_db.owner limit 3", 3, "owner", true},
{"hive_db where hive_db.name=\"Reporting\" orderby 'owner'", 1, "owner", true},
{"hive_db where hive_db.name=\"Reporting\" orderby hive_db.owner limit 10 ", 1, "owner", true},
{"hive_db where hive_db.name=\"Reporting\" select name, owner orderby hive_db.name ", 1, "name", true},
{"hive_db has name orderby hive_db.owner limit 10 offset 0", 3, "owner", true},
{"from hive_table select hive_table.owner orderby hive_table.owner", 10, "hive_table.owner", true},
{"from hive_table select hive_table.owner orderby hive_table.owner limit 8", 8, "hive_table.owner", true},
{"hive_table orderby hive_table.name", 10, "name", true},
{"hive_table orderby hive_table.owner", 10, "owner", true},
{"hive_table orderby hive_table.owner limit 8", 8, "owner", true},
{"hive_table orderby hive_table.owner limit 8 offset 0", 8, "owner", true},
{"hive_table orderby hive_table.owner desc limit 8 offset 0", 8, "owner", false},
//Not working because of existing bug Atlas-175
// {"hive_table isa Dimension orderby hive_table.owner", 3, "hive_table.owner", isAscending},//order not working
// {"hive_table isa Dimension orderby hive_table.owner limit 3", 3, "hive_table.owner", isAscending},
// {"hive_table isa Dimension orderby hive_table.owner limit 3 offset 0", 3, "hive_table.owner", isAscending},
// {"hive_table isa Dimension orderby hive_table.owner desc limit 3 offset 0", 3, "hive_table.owner", !isAscending},
//
// {"hive_column where hive_column isa PII orderby hive_column.name", 6, "hive_column.name", isAscending},
// {"hive_column where hive_column isa PII orderby hive_column.name limit 5", 5, "hive_column.name", isAscending},
// {"hive_column where hive_column isa PII orderby hive_column.name limit 5 offset 1", 5, "hive_column.name", isAscending},
// {"hive_column where hive_column isa PII orderby hive_column.name desc limit 5 offset 1", 5, "hive_column.name", !isAscending},
{"hive_column select hive_column.name orderby hive_column.name ", 37, "hive_column.name", true},
{"hive_column select hive_column.name orderby hive_column.name limit 5", 5, "hive_column.name", true},
{"hive_column select hive_column.name orderby hive_column.name desc limit 5", 5, "hive_column.name", false},
{"hive_column select hive_column.name orderby hive_column.name limit 5 offset 28", 5, "hive_column.name", true},
{"hive_column select name orderby hive_column.name", 37, "name", true},
{"hive_column select name orderby hive_column.name limit 5", 5, "name", true},
{"hive_column select name orderby hive_column.name desc", 37, "name", false},
{"hive_column where hive_column.name=\"customer_id\" orderby hive_column.name", 6, "name", true},
{"hive_column where hive_column.name=\"customer_id\" orderby hive_column.name limit 2", 2, "name", true},
{"hive_column where hive_column.name=\"customer_id\" orderby hive_column.name limit 2 offset 1", 2, "name", true},
{"from hive_table select owner orderby hive_table.owner",10, "owner", true},
{"from hive_table select owner orderby hive_table.owner limit 5", 5, "owner", true},
{"from hive_table select owner orderby hive_table.owner desc limit 5", 5, "owner", false},
{"from hive_table select owner orderby hive_table.owner limit 5 offset 5", 5, "owner", true},
{"hive_db where (name = \"Reporting\") orderby hive_db.name", 1, "name", true},
{"hive_db where (name = \"Reporting\") orderby hive_db.name limit 10", 1, "name", true},
{"hive_db where hive_db has name orderby hive_db.owner", 3, "owner", true},
{"hive_db where hive_db has name orderby hive_db.owner limit 5", 3, "owner", true},
{"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 0", 2, "owner", true},
{"hive_db where hive_db has name orderby hive_db.owner limit 2 offset 1", 2, "owner", true},
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 orderby '_col_1'", 1, "_col_1", true},
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 orderby '_col_1' limit 10", 1, "_col_1", true},
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 orderby '_col_1' limit 10 offset 1", 0, "_col_1", true},
{"hive_db where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 orderby '_col_1' limit 10 offset 0", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' ", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 ", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 offset 0", 1, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime > \"2014-01-01\" ) select name as _col_0, createTime as _col_1 orderby '_col_1' limit 10 offset 5", 0, "_col_1", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' ", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10 offset 0", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10 offset 1", 0, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 10", 1, "_col_0", true},
{"hive_table where (name = \"sales_fact\" and createTime >= \"2014-12-11T02:35:58.440Z\" ) select name as _col_0, createTime as _col_1 orderby '_col_0' limit 0 offset 1", 0, "_col_0", true},
{"hive_column select hive_column.name orderby hive_column.name limit 10 withPath", 10, "hive_column.name", true},
{"hive_column select hive_column.name orderby hive_column.name limit 10 withPath", 10, "hive_column.name", true},
{"hive_table orderby 'hive_table.owner_notdefined'", 10, null, true},
};
}
@Test(dataProvider = "dslOrderByQueriesProvider")
public void testOrderByDSL(String query, int expected, String orderBy, boolean ascending) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0);
assertNotNull(searchResult.getEntities());
assertEquals(searchResult.getEntities().size(), expected);
// TODO: Implement order checking here
}
@DataProvider(name = "dslLikeQueriesProvider")
private Object[][] createDslLikeQueries() {
return new Object[][]{
{"hive_table where name like \"sa?es*\"", 3},
{"hive_db where name like \"R*\"", 1},
{"hive_db where hive_db.name like \"R???rt?*\" or hive_db.name like \"S?l?s\" or hive_db.name like\"Log*\"", 3},
{"hive_db where hive_db.name like \"R???rt?*\" and hive_db.name like \"S?l?s\" and hive_db.name like\"Log*\"", 0},
{"hive_table where name like 'sales*', db where name like 'Sa?es'", 1},
{"hive_table where name like 'sales*' and db.name like 'Sa?es'", 1},
{"hive_table where db.name like \"Sa*\"", 4},
{"hive_table where db.name like \"Sa*\" and name like \"*dim\"", 3},
};
}
@Test(dataProvider = "comparisonQueriesProvider")
public void testLikeQueries(String query, int expected) throws AtlasBaseException {
AtlasSearchResult searchResult = discoveryService.searchUsingDslQuery(query, 25, 0);
assertNotNull(searchResult.getEntities());
assertEquals(searchResult.getEntities().size(), expected);
}
// TODO: Implement FieldValidator with new Data types
// @DataProvider(name = "dslGroupByQueriesProvider")
// private Object[][] createDSLGroupByQueries() {
// return new Object[][]{
// { "from Person as p, mentor as m groupby(m.name) select m.name, count()",
// new FieldValueValidator().withFieldNames("m.name", "count()").withExpectedValues("Max", 1)
// .withExpectedValues("Julius", 1) },
//
// // This variant of this query is currently failing. See OMS-335 for details.
// { "from Person as p, mentor groupby(mentor.name) select mentor.name, count()",
// new FieldValueValidator().withFieldNames("mentor.name", "count()").withExpectedValues("Max", 1)
// .withExpectedValues("Julius", 1) },
//
// { "from Person, mentor groupby(mentor.name) select mentor.name, count()",
// new FieldValueValidator().withFieldNames("mentor.name", "count()").withExpectedValues("Max", 1)
// .withExpectedValues("Julius", 1) },
//
// { "from Person, mentor as m groupby(m.name) select m.name, count()",
// new FieldValueValidator().withFieldNames("m.name", "count()").withExpectedValues("Max", 1)
// .withExpectedValues("Julius", 1) },
//
// { "from Person groupby (isOrganDonor) select count()",
// new FieldValueValidator().withFieldNames("count()").withExpectedValues(2)
// .withExpectedValues(2) },
// { "from Person groupby (isOrganDonor) select Person.isOrganDonor, count()",
// new FieldValueValidator().withFieldNames("Person.isOrganDonor", "count()")
// .withExpectedValues(true, 2).withExpectedValues(false, 2) },
//
// { "from Person groupby (isOrganDonor) select Person.isOrganDonor as 'organDonor', count() as 'count', max(Person.age) as 'max', min(Person.age) as 'min'",
// new FieldValueValidator().withFieldNames("organDonor", "max", "min", "count")
// .withExpectedValues(true, 50, 36, 2).withExpectedValues(false, 0, 0, 2) },
//
// { "from hive_db groupby (owner, name) select count() ", new FieldValueValidator()
// .withFieldNames("count()").withExpectedValues(1).withExpectedValues(1).withExpectedValues(1) },
//
// { "from hive_db groupby (owner, name) select hive_db.owner, hive_db.name, count() ",
// new FieldValueValidator().withFieldNames("hive_db.owner", "hive_db.name", "count()")
// .withExpectedValues("Jane BI", "Reporting", 1)
// .withExpectedValues("Tim ETL", "Logging", 1)
// .withExpectedValues("John ETL", "Sales", 1) },
//
// { "from hive_db groupby (owner) select count() ",
// new FieldValueValidator().withFieldNames("count()").withExpectedValues(1).withExpectedValues(1)
// .withExpectedValues(1) },
//
// { "from hive_db groupby (owner) select hive_db.owner, count() ",
// new FieldValueValidator().withFieldNames("hive_db.owner", "count()")
// .withExpectedValues("Jane BI", 1).withExpectedValues("Tim ETL", 1)
// .withExpectedValues("John ETL", 1) },
//
// { "from hive_db groupby (owner) select hive_db.owner, max(hive_db.name) ",
// new FieldValueValidator().withFieldNames("hive_db.owner", "max(hive_db.name)")
// .withExpectedValues("Tim ETL", "Logging").withExpectedValues("Jane BI", "Reporting")
// .withExpectedValues("John ETL", "Sales") },
//
// { "from hive_db groupby (owner) select max(hive_db.name) ",
// new FieldValueValidator().withFieldNames("max(hive_db.name)").withExpectedValues("Logging")
// .withExpectedValues("Reporting").withExpectedValues("Sales") },
//
// { "from hive_db groupby (owner) select owner, hive_db.name, min(hive_db.name) ",
// new FieldValueValidator().withFieldNames("owner", "hive_db.name", "min(hive_db.name)")
// .withExpectedValues("Tim ETL", "Logging", "Logging")
// .withExpectedValues("Jane BI", "Reporting", "Reporting")
// .withExpectedValues("John ETL", "Sales", "Sales") },
//
// { "from hive_db groupby (owner) select owner, min(hive_db.name) ",
// new FieldValueValidator().withFieldNames("owner", "min(hive_db.name)")
// .withExpectedValues("Tim ETL", "Logging").withExpectedValues("Jane BI", "Reporting")
// .withExpectedValues("John ETL", "Sales") },
//
// { "from hive_db groupby (owner) select min(name) ",
// new FieldValueValidator().withFieldNames("min(name)")
// .withExpectedValues("Reporting").withExpectedValues("Logging")
// .withExpectedValues("Sales") },
//
// { "from hive_db groupby (owner) select min('name') ",
// new FieldValueValidator().withFieldNames("min(\"name\")").withExpectedValues("name")
// .withExpectedValues("name").withExpectedValues("name") }, //finding the minimum of a constant literal expression...
//
// { "from hive_db groupby (owner) select name ",
// new FieldValueValidator().withFieldNames("name").withExpectedValues("Reporting")
// .withExpectedValues("Sales").withExpectedValues("Logging") },
//
// //implied group by
// { "from hive_db select count() ",
// new FieldValueValidator().withFieldNames("count()").withExpectedValues(3) },
// //implied group by
// { "from Person select count() as 'count', max(Person.age) as 'max', min(Person.age) as 'min'",
// new FieldValueValidator().withFieldNames("max", "min", "count").withExpectedValues(50, 0, 4) },
// //Sum
// { "from Person groupby (isOrganDonor) select count() as 'count', sum(Person.age) as 'sum'",
// new FieldValueValidator().withFieldNames("count", "sum").withExpectedValues(2, 0)
// .withExpectedValues(2, 86) },
// { "from Person groupby (isOrganDonor) select Person.isOrganDonor as 'organDonor', count() as 'count', sum(Person.age) as 'sum'",
// new FieldValueValidator().withFieldNames("organDonor", "count", "sum").withExpectedValues(false, 2, 0)
// .withExpectedValues(true, 2, 86) },
// { "from Person select count() as 'count', sum(Person.age) as 'sum'",
// new FieldValueValidator().withFieldNames("count", "sum").withExpectedValues(4, 86) },
// // tests to ensure that group by works with order by and limit
// { "from hive_db groupby (owner) select min(name) orderby name limit 2 ",
// new FieldValueValidator().withFieldNames("min(name)")
// .withExpectedValues("Logging").withExpectedValues("Reporting")
// },
//
// { "from hive_db groupby (owner) select min(name) orderby name desc limit 2 ",
// new FieldValueValidator().withFieldNames("min(name)")
// .withExpectedValues("Reporting").withExpectedValues("Sales")
// },
// };
// }
//
// @DataProvider(name = "dslObjectQueriesReturnIdProvider")
// private Object[][] createDSLObjectIdQueries() {
// return new Object[][] { {
// "from hive_db as h select h as id",
// new FieldValueValidator().withFieldNames("id")
// .withExpectedValues(idType).withExpectedValues(idType)
// .withExpectedValues(idType) }
// };
// }
}
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.atlas.query;
import org.antlr.v4.runtime.CharStreams;
import org.antlr.v4.runtime.CommonTokenStream;
import org.antlr.v4.runtime.TokenStream;
import org.apache.atlas.query.antlr4.AtlasDSLLexer;
import org.apache.atlas.query.antlr4.AtlasDSLParser;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.apache.commons.lang.StringUtils;
import org.testng.annotations.Test;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import static org.mockito.Mockito.mock;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertNotNull;
import static org.testng.Assert.assertNull;
import static org.testng.Assert.assertTrue;
public class QueryProcessorTest {
private List<String> errorList = new ArrayList<>();
@Test
public void trait() {
String expected = "g.V().has('__traitNames', within('PII')).limit(25).toList()";
verify("PII", expected);
}
@Test()
public void dimension() {
String expected = "g.V().has('__typeName', 'Table').has('__traitNames', within('Dimension')).limit(25).toList()";
verify("Table isa Dimension", expected);
verify("Table is Dimension", expected);
verify("Table where Table is Dimension", expected);
}
@Test
public void fromDB() {
verify("from DB", "g.V().has('__typeName', 'DB').limit(25).toList()");
verify("from DB limit 10", "g.V().has('__typeName', 'DB').order().limit(10).toList()");
}
@Test
public void DBHasName() {
String expected = "g.V().has('__typeName', 'DB').has('DB.name').limit(25).toList()";
verify("DB has name", expected);
verify("DB where DB has name", expected);
}
@Test
public void DBasD() {
verify("DB as d", "g.V().has('__typeName', 'DB').as('d').limit(25).toList()");
}
@Test
public void tableSelectColumns() {
verify("Table select Columns limit 10", "g.V().has('__typeName', 'Table').out('__Table.columns').as('s0').select('s0').order().limit(10).toList()");
}
@Test
public void DBasDSelect() {
String expected = "g.V().has('__typeName', 'DB').as('d').valueMap('DB.name', 'DB.owner')";
verify("DB as d select d.name, d.owner", expected + ".limit(25).toList()");
verify("DB as d select d.name, d.owner limit 10", expected + ".order().limit(10).toList()");
}
@Test
public void DBTableFrom() {
verify("DB, Table", "g.V().has('__typeName', 'DB').out('__DB.Table').limit(25).toList()");
}
@Test
public void DBAsDSelectLimit() {
verify("from DB limit 5", "g.V().has('__typeName', 'DB').order().limit(5).toList()");
verify("from DB limit 5 offset 2", "g.V().has('__typeName', 'DB').order().range(2, 2 + 5).limit(25).toList()");
}
@Test
public void DBOrderBy() {
String expected = "g.V().has('__typeName', 'DB').order().by('DB.name').limit(25).toList()";
// verify("DB orderby name", expected);
verify("from DB orderby name", expected);
}
@Test
public void fromDBOrderByNameDesc() {
verify("from DB orderby name DESC", "g.V().has('__typeName', 'DB').order().by('DB.name', decr).limit(25).toList()");
}
@Test
public void fromDBSelect() {
verify("from DB select DB.name, DB.owner", "g.V().has('__typeName', 'DB').valueMap('DB.name', 'DB.owner').limit(25).toList()");
}
@Test
public void fromDBSelectGroupBy() {
verify("from DB groupby (DB.owner)", "g.V().has('__typeName', 'DB').group().by('DB.owner').limit(25).toList()");
}
@Test
public void whereClauseTextContains() {
String expected = "g.V().has('__typeName', 'DB').has('DB.name', eq(\"Reporting\")).valueMap('DB.name', 'DB.owner').limit(25).toList()";
verify("from DB where name = \"Reporting\" select name, owner)", expected);
verify("Table where Asset.name like \"Tab*\"",
"g.V().has('__typeName', 'Table').has('Asset.name', org.janusgraph.core.attribute.Text.textContainsRegex(\"Tab.*\")).limit(25).toList()");
verify("from DB where (name = \"Reporting\") select name, owner", expected);
verify("from DB as db1 Table where (db1.name = \"Reporting\") select name, owner",
"g.V().has('__typeName', 'DB').as('db1').out('__DB.Table').has('DB.name', eq(\"Reporting\")).valueMap('Column.name', 'Column.owner').limit(25).toList()");
}
@Test
public void whereClauseWithAsTextContains() {
verify("Table as t where t.name = \"testtable_1\" select t.name, t.owner)",
"g.V().has('__typeName', 'Table').as('t').has('Table.name', eq(\"testtable_1\")).valueMap('Table.name', 'Table.owner').limit(25).toList()");
}
@Test
public void multipleWhereClauses() {
verify("Table where name=\"sales_fact\", columns as c select c.owner, c.name, c.dataType",
"g.V().has('__typeName', 'Table').has('Table.name', eq(\"sales_fact\")).out('__Table.columns').as('c').valueMap('Column.owner', 'Column.name', 'Column.dataType').limit(25).toList()");
}
@Test
public void subType() {
verify("Asset select name, owner",
"g.V().has('__typeName', within('Asset','Table')).valueMap('Asset.name', 'Asset.owner').limit(25).toList()");
}
@Test
public void TraitWithSpace() {
verify("`Log Data`", "g.V().has('__typeName', 'Log Data').limit(25).toList()");
}
private void verify(String dsl, String expectedGremlin) {
AtlasDSLParser.QueryContext queryContext = getParsedQuery(dsl);
String actualGremlin = getGremlinQuery(queryContext);
assertEquals(actualGremlin, expectedGremlin);
}
private AtlasDSLParser.QueryContext getParsedQuery(String query) {
AtlasDSLParser.QueryContext queryContext = null;
InputStream stream = new ByteArrayInputStream(query.getBytes());
AtlasDSLLexer lexer = null;
try {
lexer = new AtlasDSLLexer(CharStreams.fromStream(stream));
} catch (IOException e) {
assertTrue(false);
}
TokenStream inputTokenStream = new CommonTokenStream(lexer);
AtlasDSLParser parser = new AtlasDSLParser(inputTokenStream);
queryContext = parser.query();
assertNotNull(queryContext);
assertNull(queryContext.exception);
return queryContext;
}
private String getGremlinQuery(AtlasDSLParser.QueryContext queryContext) {
QueryProcessor queryProcessor = new QueryProcessor(new TestTypeRegistryLookup(errorList, mock(AtlasTypeRegistry.class)));
DSLVisitor qv = new DSLVisitor(queryProcessor);
qv.visit(queryContext);
queryProcessor.close();
assertTrue(StringUtils.isNotEmpty(queryProcessor.getText()));
return queryProcessor.getText();
}
private static class TestTypeRegistryLookup extends QueryProcessor.TypeRegistryLookup {
private String activeType;
private HashMap<String, String> asContext = new HashMap<>();
public TestTypeRegistryLookup(List<String> errorList, AtlasTypeRegistry typeRegistry) {
super(errorList, typeRegistry);
}
public void registerActive(String typeName) {
activeType = typeName;
}
public boolean hasActiveType() {
return !StringUtils.isEmpty(activeType);
}
public void registerStepType(String stepName) {
if (!asContext.containsKey(stepName)) {
asContext.put(stepName, activeType);
} else {
addError(String.format("Multiple steps with same name detected: %s", stepName));
}
}
public String getRelationshipEdgeLabelForActiveType(String item) {
if(item.equalsIgnoreCase("columns"))
return "__Table.columns";
else
return "__DB.Table";
}
public String getQualifiedAttributeName(String item) {
if (item.contains(".")) {
String[] keyValue = StringUtils.split(item, ".");
if (!asContext.containsKey(keyValue[0])) {
return item;
} else {
String s = getStitchedString(keyValue, 1, keyValue.length - 1);
return getDefaultQualifiedAttributeNameFromType(asContext.get(keyValue[0]), s);
}
}
return getDefaultQualifiedAttributeNameFromType(activeType, item);
}
public String getDefaultQualifiedAttributeNameFromType(String s, String item) {
return StringUtils.isEmpty(s) ? item : String.format("%s.%s", s, item);
}
@Override
public String getTypeFromEdge(String item) {
return "Column";
}
@Override
public boolean isAttributePrimitiveTypeForActiveType(String s) {
return s.equalsIgnoreCase("name") || s.equalsIgnoreCase("owner");
}
@Override
public boolean isTypeTrait(String name) {
return name.equalsIgnoreCase("PII");
}
public boolean doesActiveTypeHaveSubTypes() {
return activeType.equalsIgnoreCase("Asset");
}
public String getActiveTypeAndSubTypes() {
String[] str = new String[]{"'Asset'", "'Table'"};
return StringUtils.join(str, ",");
}
@Override
public boolean isSameAsActive(String typeName) {
return (activeType != null) && activeType.equalsIgnoreCase(typeName);
}
}
}
...@@ -15,16 +15,42 @@ ...@@ -15,16 +15,42 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.atlas.query;
import org.apache.atlas.query.Expressions.Expression; package org.apache.atlas.services;
import org.apache.atlas.query.Expressions.SelectExpression;
import com.google.inject.Inject;
import org.apache.atlas.TestModules;
import org.apache.atlas.discovery.EntityDiscoveryService;
import org.apache.atlas.exception.AtlasBaseException;
import org.apache.atlas.model.discovery.AtlasSearchResult;
import org.apache.atlas.repository.graphdb.AtlasGraph;
import org.apache.atlas.store.AtlasTypeDefStore;
import org.apache.atlas.type.AtlasTypeRegistry;
import org.junit.Assert;
import org.testng.annotations.Guice;
import org.testng.annotations.Test;
public class SelectExpressionHelper { @Guice(modules = TestModules.TestOnlyModule.class)
public static SelectExpression extractSelectExpression(Expression expr) { public class EntityDiscoveryServiceTest {
SelectExpression ret = null; @Inject
AtlasTypeRegistry typeRegistry;
return ret; @Inject
private AtlasTypeDefStore typeDefStore;
@Inject
private AtlasGraph atlasGraph;
@Inject
EntityDiscoveryService entityDiscoveryService;
@Test
public void dslTest() throws AtlasBaseException {
//String dslQuery = "DB where name = \"Reporting\"";
String dslQuery = "hive_table where Asset.name = \"testtable_x_0\"";
AtlasSearchResult result = entityDiscoveryService.searchUsingDslQuery(dslQuery, 20 , 0);
Assert.assertNotNull(result);
} }
} }
...@@ -471,7 +471,7 @@ public class QuickStartV2 { ...@@ -471,7 +471,7 @@ public class QuickStartV2 {
"DB where DB.name=\"Reporting\" select name, owner", "DB where DB.name=\"Reporting\" select name, owner",
"DB has name", "DB has name",
"DB where DB has name", "DB where DB has name",
"DB, Table", //--TODO: Fix "DB, Table", // Table, db; Table db works
"DB is JdbcAccess", "DB is JdbcAccess",
"from Table", "from Table",
"Table", "Table",
...@@ -483,12 +483,12 @@ public class QuickStartV2 { ...@@ -483,12 +483,12 @@ public class QuickStartV2 {
"Column where Column.name=\"customer_id\"", "Column where Column.name=\"customer_id\"",
"from Table select Table.name", "from Table select Table.name",
"DB where (name = \"Reporting\")", "DB where (name = \"Reporting\")",
"DB where (name = \"Reporting\") select name as _col_0, owner as _col_1", //--TODO: Fix "DB where (name = \"Reporting\") select name as _col_0, owner as _col_1",
"DB where DB is JdbcAccess", "DB where DB is JdbcAccess",
"DB where DB has name", "DB where DB has name",
"DB Table", //--TODO: Fix "DB Table",
"DB as db1 Table where (db1.name = \"Reporting\")", "DB as db1 Table where (db1.name = \"Reporting\")",
"DB where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 ", //--TODO: Fix "DB where (name = \"Reporting\") select name as _col_0, (createTime + 1) as _col_1 ", // N
DIMENSION_CLASSIFICATION, DIMENSION_CLASSIFICATION,
JDBC_CLASSIFICATION, JDBC_CLASSIFICATION,
ETL_CLASSIFICATION, ETL_CLASSIFICATION,
...@@ -505,6 +505,7 @@ public class QuickStartV2 { ...@@ -505,6 +505,7 @@ public class QuickStartV2 {
System.out.println("\nSample DSL Queries: "); System.out.println("\nSample DSL Queries: ");
for (String dslQuery : getDSLQueries()) { for (String dslQuery : getDSLQueries()) {
try {
AtlasSearchResult results = atlasClientV2.dslSearchWithParams(dslQuery, 10, 0); AtlasSearchResult results = atlasClientV2.dslSearchWithParams(dslQuery, 10, 0);
if (results != null) { if (results != null) {
...@@ -522,6 +523,9 @@ public class QuickStartV2 { ...@@ -522,6 +523,9 @@ public class QuickStartV2 {
} else { } else {
System.out.println("query [" + dslQuery + "] failed, results:" + results); System.out.println("query [" + dslQuery + "] failed, results:" + results);
} }
} catch (Exception e) {
System.out.println("query [" + dslQuery + "] execution failed!");
}
} }
} }
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment