fix: correct the grammar usage, especially in the parts targeting javascript (#109)
* build: ignore gen folder * fix: correct the grammar when targeting Typescript * fix: move base Lexer and fix javascript syntax * fix: correct the usage of Javascript in grammar
This commit is contained in:
@ -1,10 +1,11 @@
|
||||
// dt-sql-parser/src/grammar/pgsql/PostgreSQLLexer.g4 by ANTLR 4.12.0
|
||||
// Generated from /Users/ziv/github.com/dt-sql-parser/src/grammar/pgsql/PostgreSQLLexer.g4 by ANTLR 4.12.0
|
||||
// noinspection ES6UnusedImports,JSUnusedGlobalSymbols,JSUnusedLocalSymbols
|
||||
import {
|
||||
ATN,
|
||||
ATNDeserializer,
|
||||
CharStream,
|
||||
DecisionState, DFA,
|
||||
Lexer,
|
||||
LexerATNSimulator,
|
||||
RuleContext,
|
||||
PredictionContextCache,
|
||||
@ -12,7 +13,7 @@ import {
|
||||
} from "antlr4";
|
||||
|
||||
|
||||
import PostgreSQLLexerBase from './base/PostgreSQLLexerBase';
|
||||
import PostgreSQLLexerBase from './PostgreSQLLexerBase';
|
||||
|
||||
export default class PostgreSQLLexer extends PostgreSQLLexerBase {
|
||||
public static readonly Dollar = 1;
|
||||
@ -1195,9 +1196,11 @@ export default class PostgreSQLLexer extends PostgreSQLLexerBase {
|
||||
"EndDollarStringConstant",
|
||||
];
|
||||
|
||||
|
||||
/* This field stores the tags which are used to detect the end of a dollar-quoted string literal.
|
||||
*/
|
||||
|
||||
|
||||
constructor(input: CharStream) {
|
||||
super(input);
|
||||
this._interp = new LexerATNSimulator(this, PostgreSQLLexer._ATN, PostgreSQLLexer.DecisionsToDFA, new PredictionContextCache());
|
||||
@ -1244,7 +1247,9 @@ export default class PostgreSQLLexer extends PostgreSQLLexerBase {
|
||||
private Operator_action(localctx: RuleContext, actionIndex: number): void {
|
||||
switch (actionIndex) {
|
||||
case 0:
|
||||
|
||||
this.HandleLessLessGreaterGreater();
|
||||
|
||||
break;
|
||||
}
|
||||
}
|
||||
@ -1266,7 +1271,7 @@ export default class PostgreSQLLexer extends PostgreSQLLexerBase {
|
||||
switch (actionIndex) {
|
||||
case 3:
|
||||
|
||||
this.UnterminatedBlockCommentDebugAssert();
|
||||
this.UnterminatedBlockCommentDebugAssert();
|
||||
|
||||
break;
|
||||
}
|
||||
|
@ -1,19 +1,26 @@
|
||||
// dt-sql-parser/src/grammar/pgsql/PostgreSQLParser.g4 by ANTLR 4.12.0
|
||||
// Generated from /Users/ziv/github.com/dt-sql-parser/src/grammar/pgsql/PostgreSQLParser.g4 by ANTLR 4.12.0
|
||||
// noinspection ES6UnusedImports,JSUnusedGlobalSymbols,JSUnusedLocalSymbols
|
||||
|
||||
import {
|
||||
ATN,
|
||||
ATNDeserializer, DecisionState, DFA, FailedPredicateException,
|
||||
RecognitionException, NoViableAltException,
|
||||
ParserATNSimulator,
|
||||
RuleContext, ParserRuleContext, PredictionContextCache,
|
||||
TerminalNode,
|
||||
RecognitionException, NoViableAltException, BailErrorStrategy,
|
||||
Parser, ParserATNSimulator,
|
||||
RuleContext, ParserRuleContext, PredictionMode, PredictionContextCache,
|
||||
TerminalNode, RuleNode,
|
||||
Token, TokenStream,
|
||||
Interval, IntervalSet
|
||||
} from 'antlr4';
|
||||
import PostgreSQLParserListener from "./PostgreSQLParserListener.js";
|
||||
import PostgreSQLParserVisitor from "./PostgreSQLParserVisitor.js";
|
||||
|
||||
import PostgreSQLParserBase from './base/PostgreSQLParserBase';
|
||||
// for running tests with parameters, TODO: discuss strategy for typed parameters in CI
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
type int = number;
|
||||
|
||||
|
||||
|
||||
import PostgreSQLParserBase from './PostgreSQLParserBase';
|
||||
|
||||
export default class PostgreSQLParser extends PostgreSQLParserBase {
|
||||
public static readonly Dollar = 1;
|
||||
@ -30836,7 +30843,9 @@ export default class PostgreSQLParser extends PostgreSQLParserBase {
|
||||
this._errHandler.sync(this);
|
||||
_alt = this._interp.adaptivePredict(this._input, 283, this._ctx);
|
||||
} while (_alt !== 2 && _alt !== ATN.INVALID_ALT_NUMBER);
|
||||
this.ParseRoutineBody(localctx);
|
||||
|
||||
this.ParseRoutineBody(localctx);
|
||||
|
||||
}
|
||||
}
|
||||
catch (re) {
|
||||
@ -55874,7 +55883,6 @@ export default class PostgreSQLParser extends PostgreSQLParserBase {
|
||||
let _parentctx: ParserRuleContext = this._ctx;
|
||||
let _parentState: number = this.state;
|
||||
let localctx: B_exprContext = new B_exprContext(this, this._ctx, _parentState);
|
||||
// @ts-ignore
|
||||
let _prevctx: B_exprContext = localctx;
|
||||
let _startState: number = 1192;
|
||||
this.enterRecursionRule(localctx, 1192, PostgreSQLParser.RULE_b_expr, _p);
|
||||
@ -97208,10 +97216,6 @@ export class AltertsconfigurationstmtContext extends ParserRuleContext {
|
||||
public CONFIGURATION(): TerminalNode {
|
||||
return this.getToken(PostgreSQLParser.CONFIGURATION, 0);
|
||||
}
|
||||
// @ts-ignore
|
||||
public any_name_list(): Any_nameContext[] {
|
||||
return this.getTypedRuleContexts(Any_nameContext) as Any_nameContext[];
|
||||
}
|
||||
public any_name(i: number): Any_nameContext {
|
||||
return this.getTypedRuleContext(Any_nameContext, i) as Any_nameContext;
|
||||
}
|
||||
@ -97230,7 +97234,6 @@ export class AltertsconfigurationstmtContext extends ParserRuleContext {
|
||||
public any_with(): Any_withContext {
|
||||
return this.getTypedRuleContext(Any_withContext, 0) as Any_withContext;
|
||||
}
|
||||
// @ts-ignore
|
||||
public any_name_list(): Any_name_listContext {
|
||||
return this.getTypedRuleContext(Any_name_listContext, 0) as Any_name_listContext;
|
||||
}
|
||||
|
@ -1,7 +1,7 @@
|
||||
/* eslint-disable new-cap,camelcase */
|
||||
import { Parser, CharStreams, CommonTokenStream } from 'antlr4';
|
||||
import PostgreSQLLexer from '../PostgreSQLLexer';
|
||||
import PostgreSQLParser from '../PostgreSQLParser';
|
||||
import PostgreSQLLexer from './PostgreSQLLexer';
|
||||
import PostgreSQLParser from './PostgreSQLParser';
|
||||
|
||||
export default class PostgreSQLParserBase extends Parser {
|
||||
|
@ -1,4 +1,4 @@
|
||||
// dt-sql-parser/src/grammar/pgsql/PostgreSQLParser.g4 by ANTLR 4.12.0
|
||||
// Generated from /Users/ziv/github.com/dt-sql-parser/src/grammar/pgsql/PostgreSQLParser.g4 by ANTLR 4.12.0
|
||||
|
||||
import {ParseTreeListener} from "antlr4";
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// dt-sql-parser/src/grammar/pgsql/PostgreSQLParser.g4 by ANTLR 4.12.0
|
||||
// Generated from /Users/ziv/github.com/dt-sql-parser/src/grammar/pgsql/PostgreSQLParser.g4 by ANTLR 4.12.0
|
||||
|
||||
import {ParseTreeVisitor} from 'antlr4';
|
||||
|
||||
|
@ -1,18 +1,17 @@
|
||||
// dt-sql-parser/src/grammar/plsql/PlSqlLexer.g4 by ANTLR 4.12.0
|
||||
// Generated from /Users/ziv/github.com/dt-sql-parser/src/grammar/plsql/PlSqlLexer.g4 by ANTLR 4.12.0
|
||||
// noinspection ES6UnusedImports,JSUnusedGlobalSymbols,JSUnusedLocalSymbols
|
||||
import {
|
||||
ATN,
|
||||
ATNDeserializer,
|
||||
CharStream,
|
||||
DecisionState, DFA,
|
||||
Lexer,
|
||||
LexerATNSimulator,
|
||||
RuleContext,
|
||||
PredictionContextCache,
|
||||
Token
|
||||
} from "antlr4";
|
||||
|
||||
|
||||
import PlSqlBaseLexer from './base/PlSqlBaseLexer';
|
||||
import PlSqlBaseLexer from './PlSqlBaseLexer';
|
||||
|
||||
export default class PlSqlLexer extends PlSqlBaseLexer {
|
||||
public static readonly ABORT = 1;
|
||||
|
@ -1,20 +1,24 @@
|
||||
// dt-sql-parser/src/grammar/plsql/PlSqlParser.g4 by ANTLR 4.12.0
|
||||
// Generated from /Users/ziv/github.com/dt-sql-parser/src/grammar/plsql/PlSqlParser.g4 by ANTLR 4.12.0
|
||||
// noinspection ES6UnusedImports,JSUnusedGlobalSymbols,JSUnusedLocalSymbols
|
||||
|
||||
import {
|
||||
ATN,
|
||||
ATNDeserializer, DecisionState, DFA, FailedPredicateException,
|
||||
RecognitionException, NoViableAltException, ParserATNSimulator,
|
||||
RuleContext, ParserRuleContext, PredictionContextCache,
|
||||
TerminalNode,
|
||||
Token, CommonTokenStream,
|
||||
RecognitionException, NoViableAltException, BailErrorStrategy,
|
||||
Parser, ParserATNSimulator,
|
||||
RuleContext, ParserRuleContext, PredictionMode, PredictionContextCache,
|
||||
TerminalNode, RuleNode,
|
||||
Token, TokenStream,
|
||||
Interval, IntervalSet
|
||||
} from 'antlr4';
|
||||
|
||||
import PlSqlParserListener from "./PlSqlParserListener.js";
|
||||
import PlSqlParserVisitor from "./PlSqlParserVisitor.js";
|
||||
|
||||
import PlSqlBaseParser from './base/PlSqlBaseParser';
|
||||
// for running tests with parameters, TODO: discuss strategy for typed parameters in CI
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
type int = number;
|
||||
|
||||
import PlSqlBaseParser from './PlSqlBaseParser';
|
||||
|
||||
export default class PlSqlParser extends PlSqlBaseParser {
|
||||
public static readonly ABORT = 1;
|
||||
@ -6984,7 +6988,7 @@ export default class PlSqlParser extends PlSqlBaseParser {
|
||||
return new FailedPredicateException(this, predicate, message);
|
||||
}
|
||||
|
||||
constructor(input: CommonTokenStream) {
|
||||
constructor(input: TokenStream) {
|
||||
super(input);
|
||||
this._interp = new ParserATNSimulator(this, PlSqlParser._ATN, PlSqlParser.DecisionsToDFA, new PredictionContextCache());
|
||||
}
|
||||
@ -55509,7 +55513,7 @@ export default class PlSqlParser extends PlSqlBaseParser {
|
||||
{
|
||||
this.state = 6733;
|
||||
if (!(this.isVersion10())) {
|
||||
throw this.createFailedPredicateException("isVersion10()");
|
||||
throw this.createFailedPredicateException("this.isVersion10()");
|
||||
}
|
||||
this.state = 6734;
|
||||
this.match(PlSqlParser.STANDBY);
|
||||
@ -100098,7 +100102,6 @@ export default class PlSqlParser extends PlSqlBaseParser {
|
||||
let _parentctx: ParserRuleContext = this._ctx;
|
||||
let _parentState: number = this.state;
|
||||
let localctx: Logical_expressionContext = new Logical_expressionContext(this, this._ctx, _parentState);
|
||||
// @ts-ignore
|
||||
let _prevctx: Logical_expressionContext = localctx;
|
||||
let _startState: number = 1238;
|
||||
this.enterRecursionRule(localctx, 1238, PlSqlParser.RULE_logical_expression, _p);
|
||||
@ -100424,7 +100427,6 @@ export default class PlSqlParser extends PlSqlBaseParser {
|
||||
let _parentctx: ParserRuleContext = this._ctx;
|
||||
let _parentState: number = this.state;
|
||||
let localctx: Relational_expressionContext = new Relational_expressionContext(this, this._ctx, _parentState);
|
||||
// @ts-ignore
|
||||
let _prevctx: Relational_expressionContext = localctx;
|
||||
let _startState: number = 1246;
|
||||
this.enterRecursionRule(localctx, 1246, PlSqlParser.RULE_relational_expression, _p);
|
||||
@ -100800,7 +100802,6 @@ export default class PlSqlParser extends PlSqlBaseParser {
|
||||
let _parentctx: ParserRuleContext = this._ctx;
|
||||
let _parentState: number = this.state;
|
||||
let localctx: ConcatenationContext = new ConcatenationContext(this, this._ctx, _parentState);
|
||||
// @ts-ignore
|
||||
let _prevctx: ConcatenationContext = localctx;
|
||||
let _startState: number = 1256;
|
||||
this.enterRecursionRule(localctx, 1256, PlSqlParser.RULE_concatenation, _p);
|
||||
|
@ -1,4 +1,4 @@
|
||||
// dt-sql-parser/src/grammar/plsql/PlSqlParser.g4 by ANTLR 4.12.0
|
||||
// Generated from /Users/ziv/github.com/dt-sql-parser/src/grammar/plsql/PlSqlParser.g4 by ANTLR 4.12.0
|
||||
|
||||
import {ParseTreeListener} from "antlr4";
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
// dt-sql-parser/src/grammar/plsql/PlSqlParser.g4 by ANTLR 4.12.0
|
||||
// Generated from /Users/ziv/github.com/dt-sql-parser/src/grammar/plsql/PlSqlParser.g4 by ANTLR 4.12.0
|
||||
|
||||
import {ParseTreeVisitor} from 'antlr4';
|
||||
|
||||
|
@ -1,19 +1,17 @@
|
||||
// dt-sql-parser/src/grammar/spark/SparkSql.g4 by ANTLR 4.12.0
|
||||
// Generated from /Users/ziv/github.com/dt-sql-parser/src/grammar/spark/SparkSql.g4 by ANTLR 4.12.0
|
||||
// noinspection ES6UnusedImports,JSUnusedGlobalSymbols,JSUnusedLocalSymbols
|
||||
import {
|
||||
ATN,
|
||||
ATNDeserializer,
|
||||
CharStream,
|
||||
DecisionState, DFA,
|
||||
Lexer,
|
||||
LexerATNSimulator,
|
||||
RuleContext,
|
||||
PredictionContextCache,
|
||||
Token
|
||||
} from "antlr4";
|
||||
|
||||
import SparkSqlBaseLexer from "./base/SparkSqlBaseLexer";
|
||||
|
||||
export default class SparkSqlLexer extends SparkSqlBaseLexer {
|
||||
export default class SparkSqlLexer extends Lexer {
|
||||
public static readonly T__0 = 1;
|
||||
public static readonly T__1 = 2;
|
||||
public static readonly T__2 = 3;
|
||||
@ -607,6 +605,41 @@ export default class SparkSqlLexer extends SparkSqlBaseLexer {
|
||||
"WS", "UNRECOGNIZED",
|
||||
];
|
||||
|
||||
|
||||
/**
|
||||
* Verify whether current token is a valid decimal token (which contains dot).
|
||||
* Returns true if the character that follows the token is not a digit or letter or underscore.
|
||||
*
|
||||
* For example:
|
||||
* For char stream "2.3", "2." is not a valid decimal token, because it is followed by digit '3'.
|
||||
* For char stream "2.3_", "2.3" is not a valid decimal token, because it is followed by '_'.
|
||||
* For char stream "2.3W", "2.3" is not a valid decimal token, because it is followed by 'W'.
|
||||
* For char stream "12.0D 34.E2+0.12 " 12.0D is a valid decimal token because it is followed
|
||||
* by a space. 34.E2 is a valid decimal token because it is followed by symbol '+'
|
||||
* which is not a digit or letter or underscore.
|
||||
*/
|
||||
isValidDecimal() {
|
||||
let nextChar = this.fromCodePoint(this._input.LA(1));
|
||||
return !(nextChar >= 'A' && nextChar <= 'Z' || nextChar >= '0' && nextChar <= '9' || nextChar == '_')
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will be called when we see '/*' and try to match it as a bracketed comment.
|
||||
* If the next character is '+', it should be parsed as hint later, and we cannot match
|
||||
* it as a bracketed comment.
|
||||
*
|
||||
* Returns true if the next character is '+'.
|
||||
*/
|
||||
isHint() {
|
||||
let nextChar = this.fromCodePoint(this._input.LA(1));
|
||||
return nextChar == '+'
|
||||
}
|
||||
|
||||
fromCodePoint(codePoint) {
|
||||
return String.fromCodePoint(codePoint);
|
||||
}
|
||||
|
||||
|
||||
constructor(input: CharStream) {
|
||||
super(input);
|
||||
this._interp = new LexerATNSimulator(this, SparkSqlLexer._ATN, SparkSqlLexer.DecisionsToDFA, new PredictionContextCache());
|
||||
|
@ -1,4 +1,4 @@
|
||||
// dt-sql-parser/src/grammar/spark/SparkSql.g4 by ANTLR 4.12.0
|
||||
// Generated from /Users/ziv/github.com/dt-sql-parser/src/grammar/spark/SparkSql.g4 by ANTLR 4.12.0
|
||||
|
||||
import {ParseTreeListener} from "antlr4";
|
||||
|
||||
|
@ -1,24 +1,22 @@
|
||||
// dt-sql-parser/src/grammar/spark/SparkSql.g4 by ANTLR 4.12.0
|
||||
// Generated from /Users/ziv/github.com/dt-sql-parser/src/grammar/spark/SparkSql.g4 by ANTLR 4.12.0
|
||||
// noinspection ES6UnusedImports,JSUnusedGlobalSymbols,JSUnusedLocalSymbols
|
||||
|
||||
import {
|
||||
ATN,
|
||||
ATNDeserializer, DecisionState, DFA, FailedPredicateException,
|
||||
RecognitionException, NoViableAltException,
|
||||
RecognitionException, NoViableAltException, BailErrorStrategy,
|
||||
Parser, ParserATNSimulator,
|
||||
RuleContext, ParserRuleContext, PredictionContextCache,
|
||||
TerminalNode,
|
||||
RuleContext, ParserRuleContext, PredictionMode, PredictionContextCache,
|
||||
TerminalNode, RuleNode,
|
||||
Token, TokenStream,
|
||||
Interval, IntervalSet
|
||||
} from 'antlr4';
|
||||
import SparkSqlListener from "./SparkSqlListener.js";
|
||||
import SparkSqlVisitor from "./SparkSqlVisitor.js";
|
||||
|
||||
// for running tests with parameters, TODO: discuss strategy for typed parameters in CI
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
|
||||
const legacy_setops_precedence_enbled = false;
|
||||
const legacy_exponent_literal_as_decimal_enabled = false;
|
||||
const SQL_standard_keyword_behavior = false;
|
||||
type int = number;
|
||||
|
||||
export default class SparkSqlParser extends Parser {
|
||||
public static readonly T__0 = 1;
|
||||
@ -882,6 +880,22 @@ export default class SparkSqlParser extends Parser {
|
||||
return new FailedPredicateException(this, predicate, message);
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* When false, INTERSECT is given the greater precedence over the other set
|
||||
* operations (UNION, EXCEPT and MINUS) as per the SQL standard.
|
||||
*/
|
||||
public legacy_setops_precedence_enbled = false;
|
||||
/**
|
||||
* When false, a literal with an exponent would be converted into
|
||||
* double type rather than decimal type.
|
||||
*/
|
||||
public legacy_exponent_literal_as_decimal_enabled = false;
|
||||
/**
|
||||
* When true, the behavior of keywords follows ANSI SQL standard.
|
||||
*/
|
||||
public SQL_standard_keyword_behavior = false;
|
||||
|
||||
constructor(input: TokenStream) {
|
||||
super(input);
|
||||
this._interp = new ParserATNSimulator(this, SparkSqlParser._ATN, SparkSqlParser.DecisionsToDFA, new PredictionContextCache());
|
||||
@ -6339,8 +6353,8 @@ export default class SparkSqlParser extends Parser {
|
||||
throw this.createFailedPredicateException("this.precpred(this._ctx, 3)");
|
||||
}
|
||||
this.state = 1614;
|
||||
if (!(legacy_setops_precedence_enbled)) {
|
||||
throw this.createFailedPredicateException("legacy_setops_precedence_enbled");
|
||||
if (!(this.legacy_setops_precedence_enbled)) {
|
||||
throw this.createFailedPredicateException("this.legacy_setops_precedence_enbled");
|
||||
}
|
||||
this.state = 1615;
|
||||
(localctx as SetOperationContext)._operator = this._input.LT(1);
|
||||
@ -6376,8 +6390,8 @@ export default class SparkSqlParser extends Parser {
|
||||
throw this.createFailedPredicateException("this.precpred(this._ctx, 2)");
|
||||
}
|
||||
this.state = 1621;
|
||||
if (!(!legacy_setops_precedence_enbled)) {
|
||||
throw this.createFailedPredicateException("!legacy_setops_precedence_enbled");
|
||||
if (!(!this.legacy_setops_precedence_enbled)) {
|
||||
throw this.createFailedPredicateException("!this.legacy_setops_precedence_enbled");
|
||||
}
|
||||
this.state = 1622;
|
||||
(localctx as SetOperationContext)._operator = this.match(SparkSqlParser.INTERSECT);
|
||||
@ -6405,8 +6419,8 @@ export default class SparkSqlParser extends Parser {
|
||||
throw this.createFailedPredicateException("this.precpred(this._ctx, 1)");
|
||||
}
|
||||
this.state = 1628;
|
||||
if (!(!legacy_setops_precedence_enbled)) {
|
||||
throw this.createFailedPredicateException("!legacy_setops_precedence_enbled");
|
||||
if (!(!this.legacy_setops_precedence_enbled)) {
|
||||
throw this.createFailedPredicateException("!this.legacy_setops_precedence_enbled");
|
||||
}
|
||||
this.state = 1629;
|
||||
(localctx as SetOperationContext)._operator = this._input.LT(1);
|
||||
@ -12272,7 +12286,7 @@ export default class SparkSqlParser extends Parser {
|
||||
this.state = 2894;
|
||||
localctx._frameType = this.match(SparkSqlParser.RANGE);
|
||||
this.state = 2895;
|
||||
localctx._start = this.frameBound();
|
||||
localctx._frameStart = this.frameBound();
|
||||
}
|
||||
break;
|
||||
case 2:
|
||||
@ -12281,7 +12295,7 @@ export default class SparkSqlParser extends Parser {
|
||||
this.state = 2896;
|
||||
localctx._frameType = this.match(SparkSqlParser.ROWS);
|
||||
this.state = 2897;
|
||||
localctx._start = this.frameBound();
|
||||
localctx._frameStart = this.frameBound();
|
||||
}
|
||||
break;
|
||||
case 3:
|
||||
@ -12292,7 +12306,7 @@ export default class SparkSqlParser extends Parser {
|
||||
this.state = 2899;
|
||||
this.match(SparkSqlParser.BETWEEN);
|
||||
this.state = 2900;
|
||||
localctx._start = this.frameBound();
|
||||
localctx._frameStart = this.frameBound();
|
||||
this.state = 2901;
|
||||
this.match(SparkSqlParser.AND);
|
||||
this.state = 2902;
|
||||
@ -12307,7 +12321,7 @@ export default class SparkSqlParser extends Parser {
|
||||
this.state = 2905;
|
||||
this.match(SparkSqlParser.BETWEEN);
|
||||
this.state = 2906;
|
||||
localctx._start = this.frameBound();
|
||||
localctx._frameStart = this.frameBound();
|
||||
this.state = 2907;
|
||||
this.match(SparkSqlParser.AND);
|
||||
this.state = 2908;
|
||||
@ -12642,8 +12656,8 @@ export default class SparkSqlParser extends Parser {
|
||||
this.enterOuterAlt(localctx, 2);
|
||||
{
|
||||
this.state = 2956;
|
||||
if (!(!SQL_standard_keyword_behavior)) {
|
||||
throw this.createFailedPredicateException("!SQL_standard_keyword_behavior");
|
||||
if (!(!this.SQL_standard_keyword_behavior)) {
|
||||
throw this.createFailedPredicateException("!this.SQL_standard_keyword_behavior");
|
||||
}
|
||||
this.state = 2957;
|
||||
this.strictNonReserved();
|
||||
@ -12694,8 +12708,8 @@ export default class SparkSqlParser extends Parser {
|
||||
this.enterOuterAlt(localctx, 3);
|
||||
{
|
||||
this.state = 2962;
|
||||
if (!(SQL_standard_keyword_behavior)) {
|
||||
throw this.createFailedPredicateException("SQL_standard_keyword_behavior");
|
||||
if (!(this.SQL_standard_keyword_behavior)) {
|
||||
throw this.createFailedPredicateException("this.SQL_standard_keyword_behavior");
|
||||
}
|
||||
this.state = 2963;
|
||||
this.ansiNonReserved();
|
||||
@ -12706,8 +12720,8 @@ export default class SparkSqlParser extends Parser {
|
||||
this.enterOuterAlt(localctx, 4);
|
||||
{
|
||||
this.state = 2964;
|
||||
if (!(!SQL_standard_keyword_behavior)) {
|
||||
throw this.createFailedPredicateException("!SQL_standard_keyword_behavior");
|
||||
if (!(!this.SQL_standard_keyword_behavior)) {
|
||||
throw this.createFailedPredicateException("!this.SQL_standard_keyword_behavior");
|
||||
}
|
||||
this.state = 2965;
|
||||
this.nonReserved();
|
||||
@ -12768,8 +12782,8 @@ export default class SparkSqlParser extends Parser {
|
||||
this.enterOuterAlt(localctx, 1);
|
||||
{
|
||||
this.state = 2970;
|
||||
if (!(!legacy_exponent_literal_as_decimal_enabled)) {
|
||||
throw this.createFailedPredicateException("!legacy_exponent_literal_as_decimal_enabled");
|
||||
if (!(!this.legacy_exponent_literal_as_decimal_enabled)) {
|
||||
throw this.createFailedPredicateException("!this.legacy_exponent_literal_as_decimal_enabled");
|
||||
}
|
||||
this.state = 2972;
|
||||
this._errHandler.sync(this);
|
||||
@ -12790,8 +12804,8 @@ export default class SparkSqlParser extends Parser {
|
||||
this.enterOuterAlt(localctx, 2);
|
||||
{
|
||||
this.state = 2975;
|
||||
if (!(!legacy_exponent_literal_as_decimal_enabled)) {
|
||||
throw this.createFailedPredicateException("!legacy_exponent_literal_as_decimal_enabled");
|
||||
if (!(!this.legacy_exponent_literal_as_decimal_enabled)) {
|
||||
throw this.createFailedPredicateException("!this.legacy_exponent_literal_as_decimal_enabled");
|
||||
}
|
||||
this.state = 2977;
|
||||
this._errHandler.sync(this);
|
||||
@ -12812,8 +12826,8 @@ export default class SparkSqlParser extends Parser {
|
||||
this.enterOuterAlt(localctx, 3);
|
||||
{
|
||||
this.state = 2980;
|
||||
if (!(legacy_exponent_literal_as_decimal_enabled)) {
|
||||
throw this.createFailedPredicateException("legacy_exponent_literal_as_decimal_enabled");
|
||||
if (!(this.legacy_exponent_literal_as_decimal_enabled)) {
|
||||
throw this.createFailedPredicateException("this.legacy_exponent_literal_as_decimal_enabled");
|
||||
}
|
||||
this.state = 2982;
|
||||
this._errHandler.sync(this);
|
||||
@ -13173,15 +13187,15 @@ export default class SparkSqlParser extends Parser {
|
||||
case 0:
|
||||
return this.precpred(this._ctx, 3);
|
||||
case 1:
|
||||
return legacy_setops_precedence_enbled;
|
||||
return this.legacy_setops_precedence_enbled;
|
||||
case 2:
|
||||
return this.precpred(this._ctx, 2);
|
||||
case 3:
|
||||
return !legacy_setops_precedence_enbled;
|
||||
return !this.legacy_setops_precedence_enbled;
|
||||
case 4:
|
||||
return this.precpred(this._ctx, 1);
|
||||
case 5:
|
||||
return !legacy_setops_precedence_enbled;
|
||||
return !this.legacy_setops_precedence_enbled;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@ -13223,27 +13237,27 @@ export default class SparkSqlParser extends Parser {
|
||||
private identifier_sempred(localctx: IdentifierContext, predIndex: number): boolean {
|
||||
switch (predIndex) {
|
||||
case 16:
|
||||
return !SQL_standard_keyword_behavior;
|
||||
return !this.SQL_standard_keyword_behavior;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
private strictIdentifier_sempred(localctx: StrictIdentifierContext, predIndex: number): boolean {
|
||||
switch (predIndex) {
|
||||
case 17:
|
||||
return SQL_standard_keyword_behavior;
|
||||
return this.SQL_standard_keyword_behavior;
|
||||
case 18:
|
||||
return !SQL_standard_keyword_behavior;
|
||||
return !this.SQL_standard_keyword_behavior;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
private number_sempred(localctx: NumberContext, predIndex: number): boolean {
|
||||
switch (predIndex) {
|
||||
case 19:
|
||||
return !legacy_exponent_literal_as_decimal_enabled;
|
||||
return !this.legacy_exponent_literal_as_decimal_enabled;
|
||||
case 20:
|
||||
return !legacy_exponent_literal_as_decimal_enabled;
|
||||
return !this.legacy_exponent_literal_as_decimal_enabled;
|
||||
case 21:
|
||||
return legacy_exponent_literal_as_decimal_enabled;
|
||||
return this.legacy_exponent_literal_as_decimal_enabled;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@ -24999,7 +25013,7 @@ export class WindowDefContext extends WindowSpecContext {
|
||||
|
||||
export class WindowFrameContext extends ParserRuleContext {
|
||||
public _frameType!: Token;
|
||||
public _start!: FrameBoundContext;
|
||||
public _frameStart!: FrameBoundContext;
|
||||
public _end!: FrameBoundContext;
|
||||
constructor(parser?: SparkSqlParser, parent?: ParserRuleContext, invokingState?: number) {
|
||||
super(parent, invokingState);
|
||||
|
@ -1,4 +1,4 @@
|
||||
// dt-sql-parser/src/grammar/spark/SparkSql.g4 by ANTLR 4.12.0
|
||||
// Generated from /Users/ziv/github.com/dt-sql-parser/src/grammar/spark/SparkSql.g4 by ANTLR 4.12.0
|
||||
|
||||
import {ParseTreeVisitor} from 'antlr4';
|
||||
|
||||
|
@ -1,25 +0,0 @@
|
||||
import { Lexer } from "antlr4";
|
||||
|
||||
export default class SparkSqlBaseLexer extends Lexer {
|
||||
|
||||
isValidDecimal() {
|
||||
let nextChar = this.fromCodePoint(this._input.LA(1));
|
||||
return !(nextChar >= 'A' && nextChar <= 'Z' || nextChar >= '0' && nextChar <= '9' || nextChar == '_')
|
||||
}
|
||||
|
||||
/**
|
||||
* This method will be called when we see '/*' and try to match it as a bracketed comment.
|
||||
* If the next character is '+', it should be parsed as hint later, and we cannot match
|
||||
* it as a bracketed comment.
|
||||
*
|
||||
* Returns true if the next character is '+'.
|
||||
*/
|
||||
isHint() {
|
||||
let nextChar = this.fromCodePoint(this._input.LA(1));
|
||||
return nextChar == '+'
|
||||
}
|
||||
|
||||
fromCodePoint(codePoint) {
|
||||
return String.fromCodePoint(codePoint);
|
||||
}
|
||||
}
|
Reference in New Issue
Block a user