convert base to es5 - code clean
This commit is contained in:
parent
ea0f061ff9
commit
ae767d5815
@ -1,55 +0,0 @@
|
|||||||
import java.util.BitSet;
|
|
||||||
import org.antlr.v4.runtime.*;
|
|
||||||
import org.antlr.v4.runtime.atn.*;
|
|
||||||
import org.antlr.v4.runtime.dfa.*;
|
|
||||||
import org.antlr.v4.runtime.misc.*;
|
|
||||||
|
|
||||||
public class LexerDispatchingErrorListener implements ANTLRErrorListener
|
|
||||||
{
|
|
||||||
Lexer _parent;
|
|
||||||
|
|
||||||
public LexerDispatchingErrorListener(Lexer parent)
|
|
||||||
{
|
|
||||||
_parent = parent;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e)
|
|
||||||
{
|
|
||||||
var foo = new ProxyErrorListener(_parent.getErrorListeners());
|
|
||||||
foo.syntaxError(recognizer, offendingSymbol, line, charPositionInLine, msg, e);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void reportAmbiguity(Parser recognizer,
|
|
||||||
DFA dfa,
|
|
||||||
int startIndex,
|
|
||||||
int stopIndex,
|
|
||||||
boolean exact,
|
|
||||||
BitSet ambigAlts,
|
|
||||||
ATNConfigSet configs)
|
|
||||||
{
|
|
||||||
var foo = new ProxyErrorListener(_parent.getErrorListeners());
|
|
||||||
foo.reportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void reportAttemptingFullContext(Parser recognizer,
|
|
||||||
DFA dfa,
|
|
||||||
int startIndex,
|
|
||||||
int stopIndex,
|
|
||||||
BitSet conflictingAlts,
|
|
||||||
ATNConfigSet configs)
|
|
||||||
{
|
|
||||||
var foo = new ProxyErrorListener(_parent.getErrorListeners());
|
|
||||||
foo.reportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void reportContextSensitivity(Parser recognizer,
|
|
||||||
DFA dfa,
|
|
||||||
int startIndex,
|
|
||||||
int stopIndex,
|
|
||||||
int prediction,
|
|
||||||
ATNConfigSet configs)
|
|
||||||
{
|
|
||||||
var foo = new ProxyErrorListener(_parent.getErrorListeners());
|
|
||||||
foo.reportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,55 +0,0 @@
|
|||||||
import java.util.BitSet;
|
|
||||||
import org.antlr.v4.runtime.*;
|
|
||||||
import org.antlr.v4.runtime.atn.*;
|
|
||||||
import org.antlr.v4.runtime.dfa.*;
|
|
||||||
import org.antlr.v4.runtime.misc.*;
|
|
||||||
|
|
||||||
public class ParserDispatchingErrorListener implements ANTLRErrorListener
|
|
||||||
{
|
|
||||||
Parser _parent;
|
|
||||||
|
|
||||||
public ParserDispatchingErrorListener(Parser parent)
|
|
||||||
{
|
|
||||||
_parent = parent;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void syntaxError(Recognizer<?, ?> recognizer, Object offendingSymbol, int line, int charPositionInLine, String msg, RecognitionException e)
|
|
||||||
{
|
|
||||||
var foo = new ProxyErrorListener(_parent.getErrorListeners());
|
|
||||||
foo.syntaxError(recognizer, offendingSymbol, line, charPositionInLine, msg, e);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void reportAmbiguity(Parser recognizer,
|
|
||||||
DFA dfa,
|
|
||||||
int startIndex,
|
|
||||||
int stopIndex,
|
|
||||||
boolean exact,
|
|
||||||
BitSet ambigAlts,
|
|
||||||
ATNConfigSet configs)
|
|
||||||
{
|
|
||||||
var foo = new ProxyErrorListener(_parent.getErrorListeners());
|
|
||||||
foo.reportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void reportAttemptingFullContext(Parser recognizer,
|
|
||||||
DFA dfa,
|
|
||||||
int startIndex,
|
|
||||||
int stopIndex,
|
|
||||||
BitSet conflictingAlts,
|
|
||||||
ATNConfigSet configs)
|
|
||||||
{
|
|
||||||
var foo = new ProxyErrorListener(_parent.getErrorListeners());
|
|
||||||
foo.reportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs);
|
|
||||||
}
|
|
||||||
|
|
||||||
public void reportContextSensitivity(Parser recognizer,
|
|
||||||
DFA dfa,
|
|
||||||
int startIndex,
|
|
||||||
int stopIndex,
|
|
||||||
int prediction,
|
|
||||||
ATNConfigSet configs)
|
|
||||||
{
|
|
||||||
var foo = new ProxyErrorListener(_parent.getErrorListeners());
|
|
||||||
foo.reportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,67 +0,0 @@
|
|||||||
const antlr4 = require('antlr4/index');
|
|
||||||
const Lexer = antlr4.Lexer;
|
|
||||||
function isLetter(str) {
|
|
||||||
return str.length === 1 && str.match(/[a-z]/i);
|
|
||||||
}
|
|
||||||
export class PostgreSQLLexerBase extends Lexer {
|
|
||||||
tags = [];
|
|
||||||
|
|
||||||
constructor(input) {
|
|
||||||
super(input);
|
|
||||||
}
|
|
||||||
|
|
||||||
pushTag() {
|
|
||||||
this.tags.push(getText());
|
|
||||||
}
|
|
||||||
|
|
||||||
isTag() {
|
|
||||||
return this.getText().equals(this.tags.peek());
|
|
||||||
}
|
|
||||||
|
|
||||||
popTag() {
|
|
||||||
tags.pop();
|
|
||||||
}
|
|
||||||
|
|
||||||
getInputStream() {
|
|
||||||
return this._input;
|
|
||||||
}
|
|
||||||
checkLA( c) {
|
|
||||||
// eslint-disable-next-line new-cap
|
|
||||||
return this.getInputStream().LA(1) !== c;
|
|
||||||
}
|
|
||||||
|
|
||||||
charIsLetter() {
|
|
||||||
// eslint-disable-next-line new-cap
|
|
||||||
return isLetter(this.getInputStream().LA(-1));
|
|
||||||
}
|
|
||||||
|
|
||||||
HandleNumericFail() {
|
|
||||||
this.getInputStream().seek(this.getInputStream().index() - 2);
|
|
||||||
const Integral = 535;
|
|
||||||
this.setType(Integral);
|
|
||||||
}
|
|
||||||
|
|
||||||
HandleLessLessGreaterGreater() {
|
|
||||||
const LESS_LESS = 18;
|
|
||||||
const GREATER_GREATER = 19;
|
|
||||||
if (this.getText() === '<<') this.setType(LESS_LESS);
|
|
||||||
if (this.getText() === '>>') this.setType(GREATER_GREATER);
|
|
||||||
}
|
|
||||||
|
|
||||||
UnterminatedBlockCommentDebugAssert() {
|
|
||||||
// Debug.Assert(InputStream.LA(1) == -1 /*EOF*/);
|
|
||||||
}
|
|
||||||
|
|
||||||
CheckIfUtf32Letter() {
|
|
||||||
// eslint-disable-next-line new-cap
|
|
||||||
let codePoint = this.getInputStream().LA(-2) << 8 + this.getInputStream().LA(-1);
|
|
||||||
let c;
|
|
||||||
if (codePoint < 0x10000) {
|
|
||||||
c = String.fromCharCode(codePoint);
|
|
||||||
} else {
|
|
||||||
codePoint -= 0x10000;
|
|
||||||
c = String.fromCharCode(codePoint / 0x400 + 0xd800, codePoint % 0x400 + 0xdc00);
|
|
||||||
}
|
|
||||||
return isLetter(c[0]);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,114 +0,0 @@
|
|||||||
/* eslint-disable new-cap */
|
|
||||||
import { PostgreSQLLexer } from '../PostgreSQLLexer';
|
|
||||||
import { PostgreSQLParser } from '../PostgreSQLParser';
|
|
||||||
|
|
||||||
|
|
||||||
const antlr4 = require('antlr4/index');
|
|
||||||
const CharStreams = antlr4.CharStreams;
|
|
||||||
const CommonTokenStream = antlr4.CommonTokenStream;
|
|
||||||
|
|
||||||
|
|
||||||
// @ts-ignore
|
|
||||||
export class PostgreSQLParserBase extends antlr4.Parser {
|
|
||||||
constructor( input) {
|
|
||||||
super(input);
|
|
||||||
}
|
|
||||||
|
|
||||||
GetParsedSqlTree( script, line) {
|
|
||||||
const ph = this.getPostgreSQLParser(script);
|
|
||||||
return ph.program();
|
|
||||||
}
|
|
||||||
|
|
||||||
ParseRoutineBody( _localctx) {
|
|
||||||
let lang = null;
|
|
||||||
for (const coi of _localctx.createfunc_opt_item()) {
|
|
||||||
// eslint-disable-next-line new-cap
|
|
||||||
if (!coi.LANGUAGE()) {
|
|
||||||
if (!coi.nonreservedword_or_sconst()) {
|
|
||||||
if (!coi.nonreservedword_or_sconst().nonreservedword()) {
|
|
||||||
if (!coi.nonreservedword_or_sconst().nonreservedword().identifier()) {
|
|
||||||
// eslint-disable-next-line new-cap
|
|
||||||
if (!coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier()) {
|
|
||||||
// eslint-disable-next-line new-cap
|
|
||||||
lang = coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier().getText();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!lang) return;
|
|
||||||
// eslint-disable-next-line camelcase
|
|
||||||
let func_as = null;
|
|
||||||
for (const a of _localctx.createfunc_opt_item()) {
|
|
||||||
if (!a.func_as()) {
|
|
||||||
// eslint-disable-next-line camelcase
|
|
||||||
func_as = a;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// eslint-disable-next-line camelcase
|
|
||||||
if (!func_as) {
|
|
||||||
const txt = this.GetRoutineBodyString(func_as.func_as().sconst(0));
|
|
||||||
const line = func_as.func_as().sconst(0).start.getLine();
|
|
||||||
const ph = this.getPostgreSQLParser(txt);
|
|
||||||
switch (lang) {
|
|
||||||
case 'plpgsql':
|
|
||||||
func_as.func_as().Definition = ph.plsqlroot();
|
|
||||||
break;
|
|
||||||
case 'sql':
|
|
||||||
func_as.func_as().Definition = ph.program();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
TrimQuotes( s) {
|
|
||||||
return (!s) ? s : s.substring(1, s.length() - 1);
|
|
||||||
}
|
|
||||||
|
|
||||||
unquote( s) {
|
|
||||||
const slength = s.length();
|
|
||||||
const r = '';
|
|
||||||
let i = 0;
|
|
||||||
while (i < slength) {
|
|
||||||
const c = s.charAt(i);
|
|
||||||
r.append(c);
|
|
||||||
if (c === '\'' && i < slength - 1 && (s.charAt(i + 1) === '\'')) i++;
|
|
||||||
i++;
|
|
||||||
}
|
|
||||||
return r.toString();
|
|
||||||
}
|
|
||||||
|
|
||||||
GetRoutineBodyString( rule) {
|
|
||||||
const anysconst = rule.anysconst();
|
|
||||||
// eslint-disable-next-line new-cap
|
|
||||||
const StringConstant = anysconst.StringConstant();
|
|
||||||
if (null !== StringConstant) return this.unquote(this.TrimQuotes(StringConstant.getText()));
|
|
||||||
const UnicodeEscapeStringConstant = anysconst.UnicodeEscapeStringConstant();
|
|
||||||
if (null !== UnicodeEscapeStringConstant) return this.TrimQuotes(UnicodeEscapeStringConstant.getText());
|
|
||||||
const EscapeStringConstant = anysconst.EscapeStringConstant();
|
|
||||||
if (null !== EscapeStringConstant) return this.TrimQuotes(EscapeStringConstant.getText());
|
|
||||||
let result = '';
|
|
||||||
const dollartext = anysconst.DollarText();
|
|
||||||
for (const s of dollartext) {
|
|
||||||
result += s.getText();
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
static getPostgreSQLParser( script) {
|
|
||||||
const charStream = CharStreams.fromString(script);
|
|
||||||
const lexer = new PostgreSQLLexer(charStream);
|
|
||||||
const tokens = new CommonTokenStream(lexer);
|
|
||||||
const parser = new PostgreSQLParser(tokens);
|
|
||||||
lexer.removeErrorListeners();
|
|
||||||
parser.removeErrorListeners();
|
|
||||||
// LexerDispatchingErrorListener listener_lexer = new LexerDispatchingErrorListener((Lexer)(((CommonTokenStream)(this.getInputStream())).getTokenSource()));
|
|
||||||
// ParserDispatchingErrorListener listener_parser = new ParserDispatchingErrorListener(this);
|
|
||||||
// lexer.addErrorListener(listener_lexer);
|
|
||||||
// parser.addErrorListener(listener_parser);
|
|
||||||
return parser;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,82 +1,101 @@
|
|||||||
"use strict";
|
// https://github.com/antlr/grammars-v4/blob/master/sql/postgresql/Java/PostgreSQLLexerBase.java
|
||||||
var __extends = (this && this.__extends) || (function () {
|
|
||||||
var extendStatics = function (d, b) {
|
// eslint-disable-next-line no-invalid-this
|
||||||
|
const __extends = (this && this.__extends) || (function() {
|
||||||
|
let extendStatics = function(d, b) {
|
||||||
extendStatics = Object.setPrototypeOf ||
|
extendStatics = Object.setPrototypeOf ||
|
||||||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
({ __proto__: [] } instanceof Array && function(d, b) {
|
||||||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
|
d.__proto__ = b;
|
||||||
|
}) ||
|
||||||
|
function(d, b) {
|
||||||
|
for (const p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p];
|
||||||
|
};
|
||||||
return extendStatics(d, b);
|
return extendStatics(d, b);
|
||||||
};
|
};
|
||||||
return function(d, b) {
|
return function(d, b) {
|
||||||
if (typeof b !== "function" && b !== null)
|
if (typeof b !== 'function' && b !== null) {
|
||||||
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
|
throw new TypeError('Class extends value ' + String(b) + ' is not a constructor or null');
|
||||||
|
}
|
||||||
extendStatics(d, b);
|
extendStatics(d, b);
|
||||||
function __() { this.constructor = d; }
|
function __() {
|
||||||
|
this.constructor = d;
|
||||||
|
}
|
||||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||||
};
|
};
|
||||||
})();
|
})();
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
exports.PostgreSQLLexerBase = void 0;
|
const Lexer = require('antlr4').Lexer;
|
||||||
var antlr4 = require('antlr4/index');
|
|
||||||
var Lexer = antlr4.Lexer;
|
|
||||||
function isLetter(str) {
|
function isLetter(str) {
|
||||||
return str.length === 1 && str.match(/[a-z]/i);
|
return str.length === 1 && str.match(/[a-z]/i);
|
||||||
}
|
}
|
||||||
var PostgreSQLLexerBase = /** @class */ (function (_super) {
|
|
||||||
__extends(PostgreSQLLexerBase, _super);
|
|
||||||
function PostgreSQLLexerBase(input) {
|
function PostgreSQLLexerBase(input) {
|
||||||
var _this = _super.call(this, input) || this;
|
const _this = Lexer.call(this, input) || this;
|
||||||
_this.tags = [];
|
_this.tags = [];
|
||||||
return _this;
|
return _this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
__extends(PostgreSQLLexerBase, Lexer);
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.pushTag = function() {
|
PostgreSQLLexerBase.prototype.pushTag = function() {
|
||||||
this.tags.push(getText());
|
this.tags.push(getText());
|
||||||
};
|
};
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.isTag = function() {
|
PostgreSQLLexerBase.prototype.isTag = function() {
|
||||||
return this.getText().equals(this.tags.peek());
|
return this.getText().equals(this.tags.peek());
|
||||||
};
|
};
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.popTag = function() {
|
PostgreSQLLexerBase.prototype.popTag = function() {
|
||||||
tags.pop();
|
this.tags.pop();
|
||||||
};
|
};
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.getInputStream = function() {
|
PostgreSQLLexerBase.prototype.getInputStream = function() {
|
||||||
return this._input;
|
return this._input;
|
||||||
};
|
};
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.checkLA = function(c) {
|
PostgreSQLLexerBase.prototype.checkLA = function(c) {
|
||||||
// eslint-disable-next-line new-cap
|
// eslint-disable-next-line new-cap
|
||||||
return this.getInputStream().LA(1) !== c;
|
return this.getInputStream().LA(1) !== c;
|
||||||
};
|
};
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.charIsLetter = function() {
|
PostgreSQLLexerBase.prototype.charIsLetter = function() {
|
||||||
// eslint-disable-next-line new-cap
|
// eslint-disable-next-line new-cap
|
||||||
return isLetter(this.getInputStream().LA(-1));
|
return isLetter(this.getInputStream().LA(-1));
|
||||||
};
|
};
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.HandleNumericFail = function() {
|
PostgreSQLLexerBase.prototype.HandleNumericFail = function() {
|
||||||
this.getInputStream().seek(this.getInputStream().index() - 2);
|
this.getInputStream().seek(this.getInputStream().index() - 2);
|
||||||
var Integral = 535;
|
const Integral = 535;
|
||||||
this.setType(Integral);
|
this.setType(Integral);
|
||||||
};
|
};
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.HandleLessLessGreaterGreater = function() {
|
PostgreSQLLexerBase.prototype.HandleLessLessGreaterGreater = function() {
|
||||||
var LESS_LESS = 18;
|
const LESS_LESS = 18;
|
||||||
var GREATER_GREATER = 19;
|
const GREATER_GREATER = 19;
|
||||||
if (this.getText() === '<<')
|
if (this.getText() === '<<') {
|
||||||
this.setType(LESS_LESS);
|
this.setType(LESS_LESS);
|
||||||
if (this.getText() === '>>')
|
}
|
||||||
|
if (this.getText() === '>>') {
|
||||||
this.setType(GREATER_GREATER);
|
this.setType(GREATER_GREATER);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.UnterminatedBlockCommentDebugAssert = function() {
|
PostgreSQLLexerBase.prototype.UnterminatedBlockCommentDebugAssert = function() {
|
||||||
// Debug.Assert(InputStream.LA(1) == -1 /*EOF*/);
|
// Debug.Assert(InputStream.LA(1) == -1 /*EOF*/);
|
||||||
};
|
};
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.CheckIfUtf32Letter = function() {
|
PostgreSQLLexerBase.prototype.CheckIfUtf32Letter = function() {
|
||||||
// eslint-disable-next-line new-cap
|
// eslint-disable-next-line new-cap
|
||||||
var codePoint = this.getInputStream().LA(-2) << 8 + this.getInputStream().LA(-1);
|
let codePoint = this.getInputStream().LA(-2) << 8 + this.getInputStream().LA(-1);
|
||||||
var c;
|
let c;
|
||||||
if (codePoint < 0x10000) {
|
if (codePoint < 0x10000) {
|
||||||
c = String.fromCharCode(codePoint);
|
c = String.fromCharCode(codePoint);
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
codePoint -= 0x10000;
|
codePoint -= 0x10000;
|
||||||
c = String.fromCharCode(codePoint / 0x400 + 0xd800, codePoint % 0x400 + 0xdc00);
|
c = String.fromCharCode(codePoint / 0x400 + 0xd800, codePoint % 0x400 + 0xdc00);
|
||||||
}
|
}
|
||||||
return isLetter(c[0]);
|
return isLetter(c[0]);
|
||||||
};
|
};
|
||||||
return PostgreSQLLexerBase;
|
|
||||||
}(Lexer));
|
|
||||||
exports.PostgreSQLLexerBase = PostgreSQLLexerBase;
|
exports.PostgreSQLLexerBase = PostgreSQLLexerBase;
|
@ -1,49 +1,58 @@
|
|||||||
"use strict";
|
/* eslint-disable new-cap,camelcase */
|
||||||
var __extends = (this && this.__extends) || (function () {
|
|
||||||
var extendStatics = function (d, b) {
|
// https://github.com/antlr/grammars-v4/blob/master/sql/postgresql/Java/PostgreSQLParserBase.java
|
||||||
|
// eslint-disable-next-line no-invalid-this
|
||||||
|
const __extends = (this && this.__extends) || (function() {
|
||||||
|
let extendStatics = function(d, b) {
|
||||||
extendStatics = Object.setPrototypeOf ||
|
extendStatics = Object.setPrototypeOf ||
|
||||||
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
({ __proto__: [] } instanceof Array && function(d, b) {
|
||||||
function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; };
|
d.__proto__ = b;
|
||||||
|
}) ||
|
||||||
|
function(d, b) {
|
||||||
|
for (const p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p];
|
||||||
|
};
|
||||||
return extendStatics(d, b);
|
return extendStatics(d, b);
|
||||||
};
|
};
|
||||||
return function(d, b) {
|
return function(d, b) {
|
||||||
if (typeof b !== "function" && b !== null)
|
if (typeof b !== 'function' && b !== null) {
|
||||||
throw new TypeError("Class extends value " + String(b) + " is not a constructor or null");
|
throw new TypeError('Class extends value ' + String(b) + ' is not a constructor or null');
|
||||||
|
}
|
||||||
extendStatics(d, b);
|
extendStatics(d, b);
|
||||||
function __() { this.constructor = d; }
|
function __() {
|
||||||
|
this.constructor = d;
|
||||||
|
}
|
||||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
||||||
};
|
};
|
||||||
})();
|
})();
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
exports.PostgreSQLParserBase = void 0;
|
const PostgreSQLLexer_1 = require('../PostgreSQLLexer');
|
||||||
/* eslint-disable new-cap */
|
const PostgreSQLParser_1 = require('../PostgreSQLParser');
|
||||||
var PostgreSQLLexer_1 = require("../PostgreSQLLexer");
|
|
||||||
var PostgreSQLParser_1 = require("../PostgreSQLParser");
|
const antlr4 = require('antlr4/index');
|
||||||
var antlr4 = require('antlr4/index');
|
const CharStreams = antlr4.CharStreams;
|
||||||
var CharStreams = antlr4.CharStreams;
|
const CommonTokenStream = antlr4.CommonTokenStream;
|
||||||
var CommonTokenStream = antlr4.CommonTokenStream;
|
const Parser = antlr4.Parser;
|
||||||
// @ts-ignore
|
|
||||||
var PostgreSQLParserBase = /** @class */ (function (_super) {
|
__extends(PostgreSQLParserBase, Parser);
|
||||||
__extends(PostgreSQLParserBase, _super);
|
|
||||||
function PostgreSQLParserBase(input) {
|
function PostgreSQLParserBase(input) {
|
||||||
return _super.call(this, input) || this;
|
return Parser.call(this, input) || this;
|
||||||
}
|
}
|
||||||
|
|
||||||
PostgreSQLParserBase.prototype.GetParsedSqlTree = function(script, line) {
|
PostgreSQLParserBase.prototype.GetParsedSqlTree = function(script, line) {
|
||||||
var ph = this.getPostgreSQLParser(script);
|
const ph = this.getPostgreSQLParser(script);
|
||||||
return ph.program();
|
return ph.program();
|
||||||
};
|
};
|
||||||
|
|
||||||
PostgreSQLParserBase.prototype.ParseRoutineBody = function(_localctx) {
|
PostgreSQLParserBase.prototype.ParseRoutineBody = function(_localctx) {
|
||||||
var lang = null;
|
let lang = null;
|
||||||
for (var _i = 0, _a = _localctx.createfunc_opt_item(); _i < _a.length; _i++) {
|
for (let _i = 0, _a = _localctx.createfunc_opt_item(); _i < _a.length; _i++) {
|
||||||
var coi = _a[_i];
|
const coi = _a[_i];
|
||||||
// eslint-disable-next-line new-cap
|
if (!!coi.LANGUAGE()) {
|
||||||
if (!coi.LANGUAGE()) {
|
if (!!coi.nonreservedword_or_sconst()) {
|
||||||
if (!coi.nonreservedword_or_sconst()) {
|
if (!!coi.nonreservedword_or_sconst().nonreservedword()) {
|
||||||
if (!coi.nonreservedword_or_sconst().nonreservedword()) {
|
if (!!coi.nonreservedword_or_sconst().nonreservedword().identifier()) {
|
||||||
if (!coi.nonreservedword_or_sconst().nonreservedword().identifier()) {
|
if (!!coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier()) {
|
||||||
// eslint-disable-next-line new-cap
|
|
||||||
if (!coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier()) {
|
|
||||||
// eslint-disable-next-line new-cap
|
|
||||||
lang = coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier().getText();
|
lang = coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier().getText();
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
@ -52,12 +61,13 @@ var PostgreSQLParserBase = /** @class */ (function (_super) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (!lang)
|
if (!lang) {
|
||||||
return;
|
return;
|
||||||
|
}
|
||||||
// eslint-disable-next-line camelcase
|
// eslint-disable-next-line camelcase
|
||||||
var func_as = null;
|
let func_as = null;
|
||||||
for (var _b = 0, _c = _localctx.createfunc_opt_item(); _b < _c.length; _b++) {
|
for (let _b = 0, _c = _localctx.createfunc_opt_item(); _b < _c.length; _b++) {
|
||||||
var a = _c[_b];
|
const a = _c[_b];
|
||||||
if (!a.func_as()) {
|
if (!a.func_as()) {
|
||||||
// eslint-disable-next-line camelcase
|
// eslint-disable-next-line camelcase
|
||||||
func_as = a;
|
func_as = a;
|
||||||
@ -65,10 +75,10 @@ var PostgreSQLParserBase = /** @class */ (function (_super) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
// eslint-disable-next-line camelcase
|
// eslint-disable-next-line camelcase
|
||||||
if (!func_as) {
|
if (!!func_as) {
|
||||||
var txt = this.GetRoutineBodyString(func_as.func_as().sconst(0));
|
const txt = this.GetRoutineBodyString(func_as.func_as().sconst(0));
|
||||||
var line = func_as.func_as().sconst(0).start.getLine();
|
const line = func_as.func_as().sconst(0).start.getLine();
|
||||||
var ph = this.getPostgreSQLParser(txt);
|
const ph = this.getPostgreSQLParser(txt);
|
||||||
switch (lang) {
|
switch (lang) {
|
||||||
case 'plpgsql':
|
case 'plpgsql':
|
||||||
func_as.func_as().Definition = ph.plsqlroot();
|
func_as.func_as().Definition = ph.plsqlroot();
|
||||||
@ -79,47 +89,54 @@ var PostgreSQLParserBase = /** @class */ (function (_super) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
PostgreSQLParserBase.prototype.TrimQuotes = function(s) {
|
PostgreSQLParserBase.prototype.TrimQuotes = function(s) {
|
||||||
return (!s) ? s : s.substring(1, s.length() - 1);
|
return (!s) ? s : s.substring(1, s.length() - 1);
|
||||||
};
|
};
|
||||||
|
|
||||||
PostgreSQLParserBase.prototype.unquote = function(s) {
|
PostgreSQLParserBase.prototype.unquote = function(s) {
|
||||||
var slength = s.length();
|
const slength = s.length();
|
||||||
var r = '';
|
const r = '';
|
||||||
var i = 0;
|
let i = 0;
|
||||||
while (i < slength) {
|
while (i < slength) {
|
||||||
var c = s.charAt(i);
|
const c = s.charAt(i);
|
||||||
r.append(c);
|
r.append(c);
|
||||||
if (c === '\'' && i < slength - 1 && (s.charAt(i + 1) === '\''))
|
if (c === '\'' && i < slength - 1 && (s.charAt(i + 1) === '\'')) {
|
||||||
i++;
|
i++;
|
||||||
|
}
|
||||||
i++;
|
i++;
|
||||||
}
|
}
|
||||||
return r.toString();
|
return r.toString();
|
||||||
};
|
};
|
||||||
|
|
||||||
PostgreSQLParserBase.prototype.GetRoutineBodyString = function(rule) {
|
PostgreSQLParserBase.prototype.GetRoutineBodyString = function(rule) {
|
||||||
var anysconst = rule.anysconst();
|
const anysconst = rule.anysconst();
|
||||||
// eslint-disable-next-line new-cap
|
const StringConstant = anysconst.StringConstant();
|
||||||
var StringConstant = anysconst.StringConstant();
|
if (!!StringConstant) {
|
||||||
if (null !== StringConstant)
|
|
||||||
return this.unquote(this.TrimQuotes(StringConstant.getText()));
|
return this.unquote(this.TrimQuotes(StringConstant.getText()));
|
||||||
var UnicodeEscapeStringConstant = anysconst.UnicodeEscapeStringConstant();
|
}
|
||||||
if (null !== UnicodeEscapeStringConstant)
|
const UnicodeEscapeStringConstant = anysconst.UnicodeEscapeStringConstant();
|
||||||
|
if (!!UnicodeEscapeStringConstant) {
|
||||||
return this.TrimQuotes(UnicodeEscapeStringConstant.getText());
|
return this.TrimQuotes(UnicodeEscapeStringConstant.getText());
|
||||||
var EscapeStringConstant = anysconst.EscapeStringConstant();
|
}
|
||||||
if (null !== EscapeStringConstant)
|
const EscapeStringConstant = anysconst.EscapeStringConstant();
|
||||||
|
if (!!EscapeStringConstant) {
|
||||||
return this.TrimQuotes(EscapeStringConstant.getText());
|
return this.TrimQuotes(EscapeStringConstant.getText());
|
||||||
var result = '';
|
}
|
||||||
var dollartext = anysconst.DollarText();
|
let result = '';
|
||||||
for (var _i = 0, dollartext_1 = dollartext; _i < dollartext_1.length; _i++) {
|
const dollartext = anysconst.DollarText();
|
||||||
var s = dollartext_1[_i];
|
for (let _i = 0, dollartext_1 = dollartext; _i < dollartext_1.length; _i++) {
|
||||||
|
const s = dollartext_1[_i];
|
||||||
result += s.getText();
|
result += s.getText();
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
};
|
};
|
||||||
|
|
||||||
PostgreSQLParserBase.getPostgreSQLParser = function(script) {
|
PostgreSQLParserBase.getPostgreSQLParser = function(script) {
|
||||||
var charStream = CharStreams.fromString(script);
|
const charStream = CharStreams.fromString(script);
|
||||||
var lexer = new PostgreSQLLexer_1.PostgreSQLLexer(charStream);
|
const lexer = new PostgreSQLLexer_1.PostgreSQLLexer(charStream);
|
||||||
var tokens = new CommonTokenStream(lexer);
|
const tokens = new CommonTokenStream(lexer);
|
||||||
var parser = new PostgreSQLParser_1.PostgreSQLParser(tokens);
|
const parser = new PostgreSQLParser_1.PostgreSQLParser(tokens);
|
||||||
lexer.removeErrorListeners();
|
lexer.removeErrorListeners();
|
||||||
parser.removeErrorListeners();
|
parser.removeErrorListeners();
|
||||||
// LexerDispatchingErrorListener listener_lexer = new LexerDispatchingErrorListener((Lexer)(((CommonTokenStream)(this.getInputStream())).getTokenSource()));
|
// LexerDispatchingErrorListener listener_lexer = new LexerDispatchingErrorListener((Lexer)(((CommonTokenStream)(this.getInputStream())).getTokenSource()));
|
||||||
@ -128,6 +145,5 @@ var PostgreSQLParserBase = /** @class */ (function (_super) {
|
|||||||
// parser.addErrorListener(listener_parser);
|
// parser.addErrorListener(listener_parser);
|
||||||
return parser;
|
return parser;
|
||||||
};
|
};
|
||||||
return PostgreSQLParserBase;
|
|
||||||
}(antlr4.Parser));
|
|
||||||
exports.PostgreSQLParserBase = PostgreSQLParserBase;
|
exports.PostgreSQLParserBase = PostgreSQLParserBase;
|
Loading…
Reference in New Issue
Block a user