feat: upgrade antlr4 to 4.12.0 (#88)

This commit is contained in:
Ziv
2023-05-04 10:13:05 +08:00
committed by GitHub
parent c0842b3e07
commit c1c72def30
116 changed files with 552721 additions and 609942 deletions

View File

@ -1,21 +1,27 @@
import { Token, Lexer } from 'antlr4';
import { ParseTreeWalker } from 'antlr4/tree';
import { ParseTreeWalker, CommonTokenStream } from 'antlr4';
import type { Parser } from 'antlr4/src/antlr4';
import ParserErrorListener, {
ParserError,
ErrorHandler,
ParserErrorCollector,
} from './parserErrorListener';
interface IParser {
// Lost in type definition
ruleNames: string[];
// Customized in our parser
program(): any;
}
/**
* Custom Parser class, subclass needs extends it.
*/
export default abstract class BasicParser<C = any> {
private _parser;
export default abstract class BasicParser {
private _parser: IParser & Parser;
public parse(
input: string,
errorListener?: ErrorHandler,
errorListener?: ErrorHandler<any>,
) {
const parser = this.createParser(input);
this._parser = parser;
@ -46,33 +52,31 @@ export default abstract class BasicParser<C = any> {
* Create antrl4 Lexer object
* @param input source string
*/
public abstract createLexer(input: string): Lexer;
public abstract createLexer(input: string);
/**
* Create Parser by lexer
* @param lexer Lexer
*/
public abstract createParserFromLexer(lexer: Lexer);
public abstract createParserFromLexer(lexer);
/**
* Visit parser tree
* @param parserTree
*/
// public abstract visit(visitor: any, parserTree: any);
/**
* The source string
* Get all Tokens of input string
* @param input string
* @returns Token[]
*/
public getAllTokens(input: string): Token[] {
return this.createLexer(input).getAllTokens();
public getAllTokens(input: string): string[] {
const lexer = this.createLexer(input);
const tokensStream = new CommonTokenStream(lexer);
tokensStream.fill();
return tokensStream.tokens;
};
/**
* Get Parser instance by input string
* @param input
*/
public createParser(input: string) {
public createParser(input: string): IParser & Parser {
const lexer = this.createLexer(input);
const parser: any = this.createParserFromLexer(lexer);
parser.buildParseTrees = true;

View File

@ -1,6 +1,4 @@
import { Token, Recognizer } from 'antlr4';
import { ErrorListener } from 'antlr4/error';
import { Token, Recognizer, ErrorListener, RecognitionException } from 'antlr4';
export interface ParserError {
startLine: number;
endLine: number;
@ -9,8 +7,8 @@ export interface ParserError {
message: string;
}
export interface SyntaxError {
recognizer: Recognizer;
export interface SyntaxError<T> {
recognizer: Recognizer<T>;
offendingSymbol: Token;
line: number;
charPositionInLine: number;
@ -18,9 +16,13 @@ export interface SyntaxError {
e: any;
}
export type ErrorHandler = (err: ParserError, errOption: SyntaxError) => void;
type ErrorOffendingSymbol = {
text: string;
};
export class ParserErrorCollector extends ErrorListener {
export type ErrorHandler<T> = (err: ParserError, errOption: SyntaxError<T>) => void;
export class ParserErrorCollector extends ErrorListener<ErrorOffendingSymbol> {
private _errors: ParserError[];
constructor(error: ParserError[]) {
@ -29,11 +31,11 @@ export class ParserErrorCollector extends ErrorListener {
}
syntaxError(
recognizer: Recognizer, offendingSymbol: Token, line: number,
charPositionInLine: number, msg: string, e: any,
recognizer: Recognizer<ErrorOffendingSymbol>, offendingSymbol: ErrorOffendingSymbol, line: number,
charPositionInLine: number, msg: string, e: RecognitionException,
) {
let endCol = charPositionInLine + 1;
if (offendingSymbol &&offendingSymbol.text !== null) {
if (offendingSymbol && offendingSymbol.text !== null) {
endCol = charPositionInLine + offendingSymbol.text.length;
}
this._errors.push({
@ -47,20 +49,20 @@ export class ParserErrorCollector extends ErrorListener {
}
export default class ParserErrorListener extends ErrorListener {
export default class ParserErrorListener extends ErrorListener<ErrorOffendingSymbol> {
private _errorHandler;
constructor(errorListener: ErrorHandler) {
constructor(errorListener: ErrorHandler<ErrorOffendingSymbol>) {
super();
this._errorHandler = errorListener;
}
syntaxError(
recognizer: Recognizer, offendingSymbol: Token, line: number,
recognizer: Recognizer<ErrorOffendingSymbol>, offendingSymbol: ErrorOffendingSymbol, line: number,
charPositionInLine: number, msg: string, e: any,
) {
let endCol = charPositionInLine + 1;
if (offendingSymbol &&offendingSymbol.text !== null) {
if (offendingSymbol && offendingSymbol.text !== null) {
endCol = charPositionInLine + offendingSymbol.text.length;
}
if (this._errorHandler) {

View File

@ -1,17 +1,17 @@
import { InputStream, CommonTokenStream, Lexer } from 'antlr4';
import { FlinkSqlLexer } from '../lib/flinksql/FlinkSqlLexer';
import { FlinkSqlParser } from '../lib/flinksql/FlinkSqlParser';
import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
import FlinkSqlLexer from '../lib/flinksql/FlinkSqlLexer';
import FlinkSqlParser from '../lib/flinksql/FlinkSqlParser';
import BasicParser from './common/basicParser';
export default class FlinkSQL extends BasicParser {
public createLexer(input: string): Lexer {
const chars = new InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = <unknown> new FlinkSqlLexer(chars) as Lexer;
public createLexer(input: string): FlinkSqlLexer {
const chars = new CharStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = new FlinkSqlLexer(chars);
return lexer;
}
public createParserFromLexer(lexer: Lexer): any {
const tokenStream = new CommonTokenStream(lexer);
return new FlinkSqlParser(tokenStream);
public createParserFromLexer(lexer: Lexer): FlinkSqlParser {
const tokens = new CommonTokenStream(lexer);
const parser = new FlinkSqlParser(tokens);
return parser;
}
}

View File

@ -1,15 +1,15 @@
import { InputStream, CommonTokenStream, Lexer } from 'antlr4';
import { SqlLexer } from '../lib/generic/SqlLexer';
import { SqlParser } from '../lib/generic/SqlParser';
import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
import SqlLexer from '../lib/generic/SqlLexer';
import SqlParser from '../lib/generic/SqlParser';
import BasicParser from './common/basicParser';
export default class GenericSQL extends BasicParser {
public createLexer(input: string): Lexer {
const chars = new InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = <unknown> new SqlLexer(chars) as Lexer;
public createLexer(input: string): SqlLexer {
const chars = new CharStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = new SqlLexer(chars);
return lexer;
}
public createParserFromLexer(lexer: Lexer): any {
public createParserFromLexer(lexer: Lexer): SqlParser {
const tokenStream = new CommonTokenStream(lexer);
return new SqlParser(tokenStream);
}

View File

@ -1,15 +1,15 @@
import { InputStream, CommonTokenStream, Lexer } from 'antlr4';
import { HiveSqlLexer } from '../lib/hive/HiveSqlLexer';
import { HiveSql } from '../lib/hive/HiveSql';
import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
import HiveSqlLexer from '../lib/hive/HiveSqlLexer';
import HiveSql from '../lib/hive/HiveSql';
import BasicParser from './common/basicParser';
export default class HiveSQL extends BasicParser {
public createLexer(input: string): Lexer {
const chars = new InputStream(input);
const lexer = <unknown> new HiveSqlLexer(chars) as Lexer;
public createLexer(input: string): HiveSqlLexer {
const chars = new CharStream(input);
const lexer = new HiveSqlLexer(chars);
return lexer;
}
public createParserFromLexer(lexer: Lexer): any {
public createParserFromLexer(lexer: Lexer): HiveSql {
const tokenStream = new CommonTokenStream(lexer);
return new HiveSql(tokenStream);
}

View File

@ -1,13 +1,13 @@
import { InputStream, CommonTokenStream, Lexer } from 'antlr4';
import { PostgreSQLLexer } from '../lib/pgsql/PostgreSQLLexer';
import { PostgreSQLParser } from '../lib/pgsql/PostgreSQLParser';
import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
import BasicParser from './common/basicParser';
import PostgreSQLLexer from '../lib/pgsql/PostgreSQLLexer';
import PostgreSQLParser from '../lib/pgsql/PostgreSQLParser';
export default class PostgresSQL extends BasicParser {
public createLexer(input: string): Lexer {
const chars = new InputStream(input.toUpperCase());
const lexer = <unknown> new PostgreSQLLexer(chars) as Lexer;
public createLexer(input: string): PostgreSQLLexer {
const chars = new CharStream(input.toUpperCase());
const lexer = new PostgreSQLLexer(chars);
return lexer;
}
public createParserFromLexer(lexer: Lexer): any {

View File

@ -1,16 +1,16 @@
import { InputStream, CommonTokenStream, Lexer } from 'antlr4';
import { PlSqlLexer } from '../lib/plsql/PlSqlLexer';
import { PlSqlParser } from '../lib/plsql/PlSqlParser';
import { CharStream, CommonTokenStream } from 'antlr4';
import BasicParser from './common/basicParser';
import PlSqlLexer from '../lib/plsql/PlSqlLexer';
import PlSqlParser from '../lib/plsql/PlSqlParser';
export default class PLSQLParser extends BasicParser {
public createLexer(input: string): Lexer {
const chars = new InputStream(input.toUpperCase());
const lexer = <unknown> new PlSqlLexer(chars) as Lexer;
public createLexer(input: string): PlSqlLexer {
const chars = new CharStream(input.toUpperCase());
const lexer = new PlSqlLexer(chars);
return lexer;
}
public createParserFromLexer(lexer: Lexer): any {
public createParserFromLexer(lexer: PlSqlLexer): PlSqlParser {
const tokenStream = new CommonTokenStream(lexer);
return new PlSqlParser(tokenStream);
}

View File

@ -1,12 +1,12 @@
import { InputStream, CommonTokenStream, Lexer } from 'antlr4';
import { SparkSqlLexer } from '../lib/spark/SparkSqlLexer';
import { SparkSqlParser } from '../lib/spark/SparkSqlParser';
import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
import BasicParser from './common/basicParser';
import SparkSqlLexer from '../lib/spark/SparkSqlLexer';
import SparkSqlParser from '../lib/spark/SparkSqlParser';
export default class SparkSQL extends BasicParser {
public createLexer(input: string): Lexer {
const chars = new InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = <unknown> new SparkSqlLexer(chars) as Lexer;
const chars = new CharStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = new SparkSqlLexer(chars);
return lexer;
}
public createParserFromLexer(lexer: Lexer): any {