refactor: migrate antlr4 v4.12.0 to antlr4ts(4.9.0) (#106)
* build: ignore gen folder * refactor: remove useless code * fix: correct the Javascript usage in grammar * refactor: move to antlr4ts * fix: remove useless * fix: update grammars for javascript target * refactor: migrate to antlr4ts * refactor: migrate to antlr4ts * refactor: implements ParserErrorListener * fix: rename the start reserved word * refactor: remove unused import * refactor: migrate to antlr4ts * test: update the expects of test cases * refactor: migrate hive to antlr4ts * refactor: update the incompatible syntax for antlr4ts * refactor: migrate pgsql grammar to antlr4ts, increasing tests * refactor: migrate the plsql to antlr4ts * build: remove unused config * build: migrate to antlr4ts * build: migrate ts-jest to @swc/jest * refactor: migrate to anltr4ts * build: migrate ts-jest to @swc/jest
This commit is contained in:
File diff suppressed because one or more lines are too long
@ -548,11 +548,9 @@ EscapeStringConstant=547
|
||||
UnterminatedEscapeStringConstant=548
|
||||
InvalidEscapeStringConstant=549
|
||||
InvalidUnterminatedEscapeStringConstant=550
|
||||
AfterEscapeStringConstantMode_NotContinued=551
|
||||
AfterEscapeStringConstantWithNewlineMode_NotContinued=552
|
||||
DollarText=553
|
||||
EndDollarStringConstant=554
|
||||
AfterEscapeStringConstantWithNewlineMode_Continued=555
|
||||
DollarText=551
|
||||
EndDollarStringConstant=552
|
||||
AfterEscapeStringConstantWithNewlineMode_Continued=553
|
||||
'$'=1
|
||||
'('=2
|
||||
')'=3
|
||||
@ -1063,4 +1061,4 @@ AfterEscapeStringConstantWithNewlineMode_Continued=555
|
||||
'LOOP'=510
|
||||
'OPEN'=511
|
||||
'\\\\'=545
|
||||
'\''=555
|
||||
'\''=553
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,13 +1,60 @@
|
||||
import { Lexer } from "antlr4ts/Lexer";
|
||||
|
||||
import { Lexer } from 'antlr4';
|
||||
|
||||
function isLetter(str) {
|
||||
return str.length === 1 && str.match(/[a-z]/i);
|
||||
}
|
||||
|
||||
export default class PostgreSQLLexerBase extends Lexer {
|
||||
|
||||
export default abstract class PostgreSQLLexerBase extends Lexer {
|
||||
|
||||
tags: string[] = [];
|
||||
_interp: any;
|
||||
|
||||
constructor(input) {
|
||||
super(input);
|
||||
}
|
||||
|
||||
pushTag() {
|
||||
this.tags.push(this.text);
|
||||
}
|
||||
|
||||
isTag() {
|
||||
return this.text === this.tags[this.tags.length - 1];
|
||||
}
|
||||
|
||||
popTag() {
|
||||
this.tags.pop();
|
||||
}
|
||||
|
||||
getInputStream() {
|
||||
return this._input;
|
||||
}
|
||||
checkLA( c) {
|
||||
// eslint-disable-next-line new-cap
|
||||
return this.getInputStream().LA(1) !== c;
|
||||
}
|
||||
|
||||
charIsLetter() {
|
||||
// eslint-disable-next-line new-cap
|
||||
return isLetter(this.getInputStream().LA(-1));
|
||||
}
|
||||
|
||||
HandleNumericFail() {
|
||||
this.getInputStream().seek(this.getInputStream().index - 2);
|
||||
const Integral = 535;
|
||||
this.type = Integral;
|
||||
}
|
||||
|
||||
HandleLessLessGreaterGreater() {
|
||||
const LESS_LESS = 18;
|
||||
const GREATER_GREATER = 19;
|
||||
if (this.text === '<<') this.type = LESS_LESS;
|
||||
if (this.text === '>>') this.type = GREATER_GREATER;
|
||||
}
|
||||
|
||||
UnterminatedBlockCommentDebugAssert() {
|
||||
// Debug.Assert(InputStream.LA(1) == -1 /*EOF*/);
|
||||
}
|
||||
|
||||
CheckIfUtf32Letter() {
|
||||
// eslint-disable-next-line new-cap
|
||||
@ -21,52 +68,4 @@ export default class PostgreSQLLexerBase extends Lexer {
|
||||
}
|
||||
return isLetter(c[0]);
|
||||
}
|
||||
|
||||
UnterminatedBlockCommentDebugAssert() {
|
||||
// Debug.Assert(InputStream.LA(1) == -1 /*EOF*/);
|
||||
}
|
||||
|
||||
HandleLessLessGreaterGreater() {
|
||||
const LESS_LESS = 18;
|
||||
const GREATER_GREATER = 19;
|
||||
if (this.text === '<<') {
|
||||
this._type = LESS_LESS;
|
||||
}
|
||||
if (this.text === '>>') {
|
||||
this._type = GREATER_GREATER;
|
||||
}
|
||||
}
|
||||
|
||||
HandleNumericFail() {
|
||||
this.getInputStream().seek(this.getInputStream().index - 2);
|
||||
const Integral = 535;
|
||||
this._type = Integral;
|
||||
}
|
||||
|
||||
charIsLetter() {
|
||||
// eslint-disable-next-line new-cap
|
||||
return isLetter(this.getInputStream().LA(-1));
|
||||
}
|
||||
|
||||
pushTag() {
|
||||
this.tags.push(this.text);
|
||||
};
|
||||
|
||||
isTag() {
|
||||
return this.text === this.tags.pop();
|
||||
}
|
||||
|
||||
popTag() {
|
||||
this.tags.pop();
|
||||
}
|
||||
|
||||
getInputStream() {
|
||||
return this._input;
|
||||
}
|
||||
|
||||
checkLA(c) {
|
||||
// eslint-disable-next-line new-cap
|
||||
return this.getInputStream().LA(1) !== c;
|
||||
}
|
||||
|
||||
}
|
File diff suppressed because one or more lines are too long
@ -548,11 +548,9 @@ EscapeStringConstant=547
|
||||
UnterminatedEscapeStringConstant=548
|
||||
InvalidEscapeStringConstant=549
|
||||
InvalidUnterminatedEscapeStringConstant=550
|
||||
AfterEscapeStringConstantMode_NotContinued=551
|
||||
AfterEscapeStringConstantWithNewlineMode_NotContinued=552
|
||||
DollarText=553
|
||||
EndDollarStringConstant=554
|
||||
AfterEscapeStringConstantWithNewlineMode_Continued=555
|
||||
DollarText=551
|
||||
EndDollarStringConstant=552
|
||||
AfterEscapeStringConstantWithNewlineMode_Continued=553
|
||||
'$'=1
|
||||
'('=2
|
||||
')'=3
|
||||
@ -1063,4 +1061,4 @@ AfterEscapeStringConstantWithNewlineMode_Continued=555
|
||||
'LOOP'=510
|
||||
'OPEN'=511
|
||||
'\\\\'=545
|
||||
'\''=555
|
||||
'\''=553
|
||||
|
File diff suppressed because one or more lines are too long
@ -1,24 +1,20 @@
|
||||
/* eslint-disable new-cap,camelcase */
|
||||
import { Parser, CharStreams, CommonTokenStream } from 'antlr4';
|
||||
import PostgreSQLLexer from './PostgreSQLLexer';
|
||||
import PostgreSQLParser from './PostgreSQLParser';
|
||||
import { CharStreams, CommonTokenStream, Parser } from 'antlr4ts';
|
||||
import { PostgreSQLLexer } from './PostgreSQLLexer';
|
||||
import { PostgreSQLParser } from './PostgreSQLParser';
|
||||
|
||||
export default class PostgreSQLParserBase extends Parser {
|
||||
|
||||
getPostgreSQLParser(script) {
|
||||
const charStream = CharStreams.fromString(script);
|
||||
const lexer = new PostgreSQLLexer(charStream);
|
||||
const tokens = new CommonTokenStream(lexer);
|
||||
const parser = new PostgreSQLParser(tokens);
|
||||
return parser;
|
||||
// @ts-ignore
|
||||
export default abstract class PostgreSQLParserBase extends Parser {
|
||||
constructor( input) {
|
||||
super(input);
|
||||
}
|
||||
|
||||
GetParsedSqlTree(script, line) {
|
||||
GetParsedSqlTree( script, line) {
|
||||
const ph = this.getPostgreSQLParser(script);
|
||||
return ph.program();
|
||||
}
|
||||
|
||||
ParseRoutineBody(_localctx) {
|
||||
ParseRoutineBody( _localctx) {
|
||||
let lang = null;
|
||||
for (let _i = 0, _a = _localctx.createfunc_opt_item(); _i < _a.length; _i++) {
|
||||
const coi = _a[_i];
|
||||
@ -35,13 +31,10 @@ export default class PostgreSQLParserBase extends Parser {
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!lang) {
|
||||
return;
|
||||
}
|
||||
if (!lang) return;
|
||||
// eslint-disable-next-line camelcase
|
||||
let func_as = null;
|
||||
for (let _b = 0, _c = _localctx.createfunc_opt_item(); _b < _c.length; _b++) {
|
||||
const a = _c[_b];
|
||||
for (const a of _localctx.createfunc_opt_item()) {
|
||||
if (!a.func_as()) {
|
||||
// eslint-disable-next-line camelcase
|
||||
func_as = a;
|
||||
@ -49,9 +42,8 @@ export default class PostgreSQLParserBase extends Parser {
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line camelcase
|
||||
if (!!func_as) {
|
||||
if (!func_as) {
|
||||
const txt = this.GetRoutineBodyString(func_as.func_as().sconst(0));
|
||||
// @ts-ignore
|
||||
const line = func_as.func_as().sconst(0).start.getLine();
|
||||
const ph = this.getPostgreSQLParser(txt);
|
||||
switch (lang) {
|
||||
@ -65,46 +57,47 @@ export default class PostgreSQLParserBase extends Parser {
|
||||
}
|
||||
}
|
||||
|
||||
TrimQuotes(s: string) {
|
||||
return (!s) ? s : s.substring(1, s.length - 1);
|
||||
TrimQuotes( s) {
|
||||
return (!s) ? s : s.substring(1, s.length() - 1);
|
||||
}
|
||||
|
||||
unquote(s: string) {
|
||||
const slength = s.length;
|
||||
unquote( s) {
|
||||
const slength = s.length();
|
||||
let r = '';
|
||||
let i = 0;
|
||||
while (i < slength) {
|
||||
const c = s.charAt(i);
|
||||
r = r.concat(c);
|
||||
if (c === '\'' && i < slength - 1 && (s.charAt(i + 1) === '\'')) {
|
||||
i++;
|
||||
}
|
||||
if (c === '\'' && i < slength - 1 && (s.charAt(i + 1) === '\'')) i++;
|
||||
i++;
|
||||
}
|
||||
return r.toString();
|
||||
};
|
||||
}
|
||||
|
||||
GetRoutineBodyString(rule) {
|
||||
GetRoutineBodyString( rule) {
|
||||
const anysconst = rule.anysconst();
|
||||
// eslint-disable-next-line new-cap
|
||||
const StringConstant = anysconst.StringConstant();
|
||||
if (!!StringConstant) {
|
||||
return this.unquote(this.TrimQuotes(StringConstant.getText()));
|
||||
}
|
||||
if (null !== StringConstant) return this.unquote(this.TrimQuotes(StringConstant.getText()));
|
||||
const UnicodeEscapeStringConstant = anysconst.UnicodeEscapeStringConstant();
|
||||
if (!!UnicodeEscapeStringConstant) {
|
||||
return this.TrimQuotes(UnicodeEscapeStringConstant.getText());
|
||||
}
|
||||
if (null !== UnicodeEscapeStringConstant) return this.TrimQuotes(UnicodeEscapeStringConstant.getText());
|
||||
const EscapeStringConstant = anysconst.EscapeStringConstant();
|
||||
if (!!EscapeStringConstant) {
|
||||
return this.TrimQuotes(EscapeStringConstant.getText());
|
||||
}
|
||||
if (null !== EscapeStringConstant) return this.TrimQuotes(EscapeStringConstant.getText());
|
||||
let result = '';
|
||||
const dollartext = anysconst.DollarText();
|
||||
for (let _i = 0, dollartext_1 = dollartext; _i < dollartext_1.length; _i++) {
|
||||
const s = dollartext_1[_i];
|
||||
for (const s of dollartext) {
|
||||
result += s.getText();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
getPostgreSQLParser( script) {
|
||||
const charStream = CharStreams.fromString(script);
|
||||
const lexer = new PostgreSQLLexer(charStream);
|
||||
const tokens = new CommonTokenStream(lexer);
|
||||
const parser = new PostgreSQLParser(tokens);
|
||||
// lexer.removeErrorListeners();
|
||||
// parser.removeErrorListeners();
|
||||
return parser;
|
||||
}
|
||||
}
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user