feat: upgrade antlr4 to 4.12.0 (#88)
This commit is contained in:
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
3235
src/lib/pgsql/PostgreSQLLexer.ts
Normal file
3235
src/lib/pgsql/PostgreSQLLexer.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
115014
src/lib/pgsql/PostgreSQLParser.ts
Normal file
115014
src/lib/pgsql/PostgreSQLParser.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
8946
src/lib/pgsql/PostgreSQLParserListener.ts
Normal file
8946
src/lib/pgsql/PostgreSQLParserListener.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
5699
src/lib/pgsql/PostgreSQLParserVisitor.ts
Normal file
5699
src/lib/pgsql/PostgreSQLParserVisitor.ts
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,101 +0,0 @@
|
||||
// https://github.com/antlr/grammars-v4/blob/master/sql/postgresql/Java/PostgreSQLLexerBase.java
|
||||
|
||||
// eslint-disable-next-line no-invalid-this
|
||||
const __extends = (this && this.__extends) || (function() {
|
||||
let extendStatics = function(d, b) {
|
||||
extendStatics = Object.setPrototypeOf ||
|
||||
({ __proto__: [] } instanceof Array && function(d, b) {
|
||||
d.__proto__ = b;
|
||||
}) ||
|
||||
function(d, b) {
|
||||
for (const p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p];
|
||||
};
|
||||
return extendStatics(d, b);
|
||||
};
|
||||
return function(d, b) {
|
||||
if (typeof b !== 'function' && b !== null) {
|
||||
throw new TypeError('Class extends value ' + String(b) + ' is not a constructor or null');
|
||||
}
|
||||
extendStatics(d, b);
|
||||
function __() {
|
||||
this.constructor = d;
|
||||
}
|
||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||
};
|
||||
})();
|
||||
|
||||
const Lexer = require('antlr4').Lexer;
|
||||
|
||||
function isLetter(str) {
|
||||
return str.length === 1 && str.match(/[a-z]/i);
|
||||
}
|
||||
|
||||
function PostgreSQLLexerBase(input) {
|
||||
const _this = Lexer.call(this, input) || this;
|
||||
_this.tags = [];
|
||||
return _this;
|
||||
}
|
||||
|
||||
__extends(PostgreSQLLexerBase, Lexer);
|
||||
|
||||
PostgreSQLLexerBase.prototype.pushTag = function() {
|
||||
this.tags.push(getText());
|
||||
};
|
||||
|
||||
PostgreSQLLexerBase.prototype.isTag = function() {
|
||||
return this.getText().equals(this.tags.peek());
|
||||
};
|
||||
|
||||
PostgreSQLLexerBase.prototype.popTag = function() {
|
||||
this.tags.pop();
|
||||
};
|
||||
|
||||
PostgreSQLLexerBase.prototype.getInputStream = function() {
|
||||
return this._input;
|
||||
};
|
||||
|
||||
PostgreSQLLexerBase.prototype.checkLA = function(c) {
|
||||
// eslint-disable-next-line new-cap
|
||||
return this.getInputStream().LA(1) !== c;
|
||||
};
|
||||
|
||||
PostgreSQLLexerBase.prototype.charIsLetter = function() {
|
||||
// eslint-disable-next-line new-cap
|
||||
return isLetter(this.getInputStream().LA(-1));
|
||||
};
|
||||
|
||||
PostgreSQLLexerBase.prototype.HandleNumericFail = function() {
|
||||
this.getInputStream().seek(this.getInputStream().index() - 2);
|
||||
const Integral = 535;
|
||||
this.setType(Integral);
|
||||
};
|
||||
|
||||
PostgreSQLLexerBase.prototype.HandleLessLessGreaterGreater = function() {
|
||||
const LESS_LESS = 18;
|
||||
const GREATER_GREATER = 19;
|
||||
if (this.getText() === '<<') {
|
||||
this.setType(LESS_LESS);
|
||||
}
|
||||
if (this.getText() === '>>') {
|
||||
this.setType(GREATER_GREATER);
|
||||
}
|
||||
};
|
||||
|
||||
PostgreSQLLexerBase.prototype.UnterminatedBlockCommentDebugAssert = function() {
|
||||
// Debug.Assert(InputStream.LA(1) == -1 /*EOF*/);
|
||||
};
|
||||
|
||||
PostgreSQLLexerBase.prototype.CheckIfUtf32Letter = function() {
|
||||
// eslint-disable-next-line new-cap
|
||||
let codePoint = this.getInputStream().LA(-2) << 8 + this.getInputStream().LA(-1);
|
||||
let c;
|
||||
if (codePoint < 0x10000) {
|
||||
c = String.fromCharCode(codePoint);
|
||||
} else {
|
||||
codePoint -= 0x10000;
|
||||
c = String.fromCharCode(codePoint / 0x400 + 0xd800, codePoint % 0x400 + 0xdc00);
|
||||
}
|
||||
return isLetter(c[0]);
|
||||
};
|
||||
|
||||
exports.PostgreSQLLexerBase = PostgreSQLLexerBase;
|
72
src/lib/pgsql/base/PostgreSQLLexerBase.ts
Normal file
72
src/lib/pgsql/base/PostgreSQLLexerBase.ts
Normal file
@ -0,0 +1,72 @@
|
||||
|
||||
import { Lexer } from 'antlr4';
|
||||
|
||||
function isLetter(str) {
|
||||
return str.length === 1 && str.match(/[a-z]/i);
|
||||
}
|
||||
|
||||
export default class PostgreSQLLexerBase extends Lexer {
|
||||
|
||||
tags: string[] = [];
|
||||
|
||||
CheckIfUtf32Letter() {
|
||||
// eslint-disable-next-line new-cap
|
||||
let codePoint = this.getInputStream().LA(-2) << 8 + this.getInputStream().LA(-1);
|
||||
let c;
|
||||
if (codePoint < 0x10000) {
|
||||
c = String.fromCharCode(codePoint);
|
||||
} else {
|
||||
codePoint -= 0x10000;
|
||||
c = String.fromCharCode(codePoint / 0x400 + 0xd800, codePoint % 0x400 + 0xdc00);
|
||||
}
|
||||
return isLetter(c[0]);
|
||||
}
|
||||
|
||||
UnterminatedBlockCommentDebugAssert() {
|
||||
// Debug.Assert(InputStream.LA(1) == -1 /*EOF*/);
|
||||
}
|
||||
|
||||
HandleLessLessGreaterGreater() {
|
||||
const LESS_LESS = 18;
|
||||
const GREATER_GREATER = 19;
|
||||
if (this.text === '<<') {
|
||||
this._type = LESS_LESS;
|
||||
}
|
||||
if (this.text === '>>') {
|
||||
this._type = GREATER_GREATER;
|
||||
}
|
||||
}
|
||||
|
||||
HandleNumericFail() {
|
||||
this.getInputStream().seek(this.getInputStream().index - 2);
|
||||
const Integral = 535;
|
||||
this._type = Integral;
|
||||
}
|
||||
|
||||
charIsLetter() {
|
||||
// eslint-disable-next-line new-cap
|
||||
return isLetter(this.getInputStream().LA(-1));
|
||||
}
|
||||
|
||||
pushTag() {
|
||||
this.tags.push(this.text);
|
||||
};
|
||||
|
||||
isTag() {
|
||||
return this.text === this.tags.pop();
|
||||
}
|
||||
|
||||
popTag() {
|
||||
this.tags.pop();
|
||||
}
|
||||
|
||||
getInputStream() {
|
||||
return this._input;
|
||||
}
|
||||
|
||||
checkLA(c) {
|
||||
// eslint-disable-next-line new-cap
|
||||
return this.getInputStream().LA(1) !== c;
|
||||
}
|
||||
|
||||
}
|
@ -1,149 +0,0 @@
|
||||
/* eslint-disable new-cap,camelcase */
|
||||
|
||||
// https://github.com/antlr/grammars-v4/blob/master/sql/postgresql/Java/PostgreSQLParserBase.java
|
||||
// eslint-disable-next-line no-invalid-this
|
||||
const __extends = (this && this.__extends) || (function() {
|
||||
let extendStatics = function(d, b) {
|
||||
extendStatics = Object.setPrototypeOf ||
|
||||
({ __proto__: [] } instanceof Array && function(d, b) {
|
||||
d.__proto__ = b;
|
||||
}) ||
|
||||
function(d, b) {
|
||||
for (const p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p];
|
||||
};
|
||||
return extendStatics(d, b);
|
||||
};
|
||||
return function(d, b) {
|
||||
if (typeof b !== 'function' && b !== null) {
|
||||
throw new TypeError('Class extends value ' + String(b) + ' is not a constructor or null');
|
||||
}
|
||||
extendStatics(d, b);
|
||||
function __() {
|
||||
this.constructor = d;
|
||||
}
|
||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||
};
|
||||
})();
|
||||
|
||||
const PostgreSQLLexer_1 = require('../PostgreSQLLexer');
|
||||
const PostgreSQLParser_1 = require('../PostgreSQLParser');
|
||||
|
||||
const antlr4 = require('antlr4/index');
|
||||
const CharStreams = antlr4.CharStreams;
|
||||
const CommonTokenStream = antlr4.CommonTokenStream;
|
||||
const Parser = antlr4.Parser;
|
||||
|
||||
__extends(PostgreSQLParserBase, Parser);
|
||||
|
||||
function PostgreSQLParserBase(input) {
|
||||
return Parser.call(this, input) || this;
|
||||
}
|
||||
|
||||
PostgreSQLParserBase.prototype.GetParsedSqlTree = function(script, line) {
|
||||
const ph = this.getPostgreSQLParser(script);
|
||||
return ph.program();
|
||||
};
|
||||
|
||||
PostgreSQLParserBase.prototype.ParseRoutineBody = function(_localctx) {
|
||||
let lang = null;
|
||||
for (let _i = 0, _a = _localctx.createfunc_opt_item(); _i < _a.length; _i++) {
|
||||
const coi = _a[_i];
|
||||
if (!!coi.LANGUAGE()) {
|
||||
if (!!coi.nonreservedword_or_sconst()) {
|
||||
if (!!coi.nonreservedword_or_sconst().nonreservedword()) {
|
||||
if (!!coi.nonreservedword_or_sconst().nonreservedword().identifier()) {
|
||||
if (!!coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier()) {
|
||||
lang = coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier().getText();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!lang) {
|
||||
return;
|
||||
}
|
||||
// eslint-disable-next-line camelcase
|
||||
let func_as = null;
|
||||
for (let _b = 0, _c = _localctx.createfunc_opt_item(); _b < _c.length; _b++) {
|
||||
const a = _c[_b];
|
||||
if (!a.func_as()) {
|
||||
// eslint-disable-next-line camelcase
|
||||
func_as = a;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line camelcase
|
||||
if (!!func_as) {
|
||||
const txt = this.GetRoutineBodyString(func_as.func_as().sconst(0));
|
||||
const line = func_as.func_as().sconst(0).start.getLine();
|
||||
const ph = this.getPostgreSQLParser(txt);
|
||||
switch (lang) {
|
||||
case 'plpgsql':
|
||||
func_as.func_as().Definition = ph.plsqlroot();
|
||||
break;
|
||||
case 'sql':
|
||||
func_as.func_as().Definition = ph.program();
|
||||
break;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
PostgreSQLParserBase.prototype.TrimQuotes = function(s) {
|
||||
return (!s) ? s : s.substring(1, s.length() - 1);
|
||||
};
|
||||
|
||||
PostgreSQLParserBase.prototype.unquote = function(s) {
|
||||
const slength = s.length();
|
||||
const r = '';
|
||||
let i = 0;
|
||||
while (i < slength) {
|
||||
const c = s.charAt(i);
|
||||
r.append(c);
|
||||
if (c === '\'' && i < slength - 1 && (s.charAt(i + 1) === '\'')) {
|
||||
i++;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
return r.toString();
|
||||
};
|
||||
|
||||
PostgreSQLParserBase.prototype.GetRoutineBodyString = function(rule) {
|
||||
const anysconst = rule.anysconst();
|
||||
const StringConstant = anysconst.StringConstant();
|
||||
if (!!StringConstant) {
|
||||
return this.unquote(this.TrimQuotes(StringConstant.getText()));
|
||||
}
|
||||
const UnicodeEscapeStringConstant = anysconst.UnicodeEscapeStringConstant();
|
||||
if (!!UnicodeEscapeStringConstant) {
|
||||
return this.TrimQuotes(UnicodeEscapeStringConstant.getText());
|
||||
}
|
||||
const EscapeStringConstant = anysconst.EscapeStringConstant();
|
||||
if (!!EscapeStringConstant) {
|
||||
return this.TrimQuotes(EscapeStringConstant.getText());
|
||||
}
|
||||
let result = '';
|
||||
const dollartext = anysconst.DollarText();
|
||||
for (let _i = 0, dollartext_1 = dollartext; _i < dollartext_1.length; _i++) {
|
||||
const s = dollartext_1[_i];
|
||||
result += s.getText();
|
||||
}
|
||||
return result;
|
||||
};
|
||||
|
||||
PostgreSQLParserBase.getPostgreSQLParser = function(script) {
|
||||
const charStream = CharStreams.fromString(script);
|
||||
const lexer = new PostgreSQLLexer_1.PostgreSQLLexer(charStream);
|
||||
const tokens = new CommonTokenStream(lexer);
|
||||
const parser = new PostgreSQLParser_1.PostgreSQLParser(tokens);
|
||||
lexer.removeErrorListeners();
|
||||
parser.removeErrorListeners();
|
||||
// LexerDispatchingErrorListener listener_lexer = new LexerDispatchingErrorListener((Lexer)(((CommonTokenStream)(this.getInputStream())).getTokenSource()));
|
||||
// ParserDispatchingErrorListener listener_parser = new ParserDispatchingErrorListener(this);
|
||||
// lexer.addErrorListener(listener_lexer);
|
||||
// parser.addErrorListener(listener_parser);
|
||||
return parser;
|
||||
};
|
||||
|
||||
exports.PostgreSQLParserBase = PostgreSQLParserBase;
|
110
src/lib/pgsql/base/PostgreSQLParserBase.ts
Normal file
110
src/lib/pgsql/base/PostgreSQLParserBase.ts
Normal file
@ -0,0 +1,110 @@
|
||||
/* eslint-disable new-cap,camelcase */
|
||||
import { Parser, CharStreams, CommonTokenStream } from 'antlr4';
|
||||
import PostgreSQLLexer from '../PostgreSQLLexer';
|
||||
import PostgreSQLParser from '../PostgreSQLParser';
|
||||
|
||||
export default class PostgreSQLParserBase extends Parser {
|
||||
|
||||
getPostgreSQLParser(script) {
|
||||
const charStream = CharStreams.fromString(script);
|
||||
const lexer = new PostgreSQLLexer(charStream);
|
||||
const tokens = new CommonTokenStream(lexer);
|
||||
const parser = new PostgreSQLParser(tokens);
|
||||
return parser;
|
||||
}
|
||||
|
||||
GetParsedSqlTree(script, line) {
|
||||
const ph = this.getPostgreSQLParser(script);
|
||||
return ph.program();
|
||||
}
|
||||
|
||||
ParseRoutineBody(_localctx) {
|
||||
let lang = null;
|
||||
for (let _i = 0, _a = _localctx.createfunc_opt_item(); _i < _a.length; _i++) {
|
||||
const coi = _a[_i];
|
||||
if (!!coi.LANGUAGE()) {
|
||||
if (!!coi.nonreservedword_or_sconst()) {
|
||||
if (!!coi.nonreservedword_or_sconst().nonreservedword()) {
|
||||
if (!!coi.nonreservedword_or_sconst().nonreservedword().identifier()) {
|
||||
if (!!coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier()) {
|
||||
lang = coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier().getText();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!lang) {
|
||||
return;
|
||||
}
|
||||
// eslint-disable-next-line camelcase
|
||||
let func_as = null;
|
||||
for (let _b = 0, _c = _localctx.createfunc_opt_item(); _b < _c.length; _b++) {
|
||||
const a = _c[_b];
|
||||
if (!a.func_as()) {
|
||||
// eslint-disable-next-line camelcase
|
||||
func_as = a;
|
||||
break;
|
||||
}
|
||||
}
|
||||
// eslint-disable-next-line camelcase
|
||||
if (!!func_as) {
|
||||
const txt = this.GetRoutineBodyString(func_as.func_as().sconst(0));
|
||||
// @ts-ignore
|
||||
const line = func_as.func_as().sconst(0).start.getLine();
|
||||
const ph = this.getPostgreSQLParser(txt);
|
||||
switch (lang) {
|
||||
case 'plpgsql':
|
||||
func_as.func_as().Definition = ph.plsqlroot();
|
||||
break;
|
||||
case 'sql':
|
||||
func_as.func_as().Definition = ph.program();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
TrimQuotes(s: string) {
|
||||
return (!s) ? s : s.substring(1, s.length - 1);
|
||||
}
|
||||
|
||||
unquote(s: string) {
|
||||
const slength = s.length;
|
||||
let r = '';
|
||||
let i = 0;
|
||||
while (i < slength) {
|
||||
const c = s.charAt(i);
|
||||
r = r.concat(c);
|
||||
if (c === '\'' && i < slength - 1 && (s.charAt(i + 1) === '\'')) {
|
||||
i++;
|
||||
}
|
||||
i++;
|
||||
}
|
||||
return r.toString();
|
||||
};
|
||||
|
||||
GetRoutineBodyString(rule) {
|
||||
const anysconst = rule.anysconst();
|
||||
const StringConstant = anysconst.StringConstant();
|
||||
if (!!StringConstant) {
|
||||
return this.unquote(this.TrimQuotes(StringConstant.getText()));
|
||||
}
|
||||
const UnicodeEscapeStringConstant = anysconst.UnicodeEscapeStringConstant();
|
||||
if (!!UnicodeEscapeStringConstant) {
|
||||
return this.TrimQuotes(UnicodeEscapeStringConstant.getText());
|
||||
}
|
||||
const EscapeStringConstant = anysconst.EscapeStringConstant();
|
||||
if (!!EscapeStringConstant) {
|
||||
return this.TrimQuotes(EscapeStringConstant.getText());
|
||||
}
|
||||
let result = '';
|
||||
const dollartext = anysconst.DollarText();
|
||||
for (let _i = 0, dollartext_1 = dollartext; _i < dollartext_1.length; _i++) {
|
||||
const s = dollartext_1[_i];
|
||||
result += s.getText();
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
}
|
Reference in New Issue
Block a user