refactor: migrate antlr4 v4.12.0 to antlr4ts(4.9.0) (#106)
* build: ignore gen folder * refactor: remove useless code * fix: correct the Javascript usage in grammar * refactor: move to antlr4ts * fix: remove useless * fix: update grammars for javascript target * refactor: migrate to antlr4ts * refactor: migrate to antlr4ts * refactor: implements ParserErrorListener * fix: rename the start reserved word * refactor: remove unused import * refactor: migrate to antlr4ts * test: update the expects of test cases * refactor: migrate hive to antlr4ts * refactor: update the incompatible syntax for antlr4ts * refactor: migrate pgsql grammar to antlr4ts, increasing tests * refactor: migrate the plsql to antlr4ts * build: remove unused config * build: migrate to antlr4ts * build: migrate ts-jest to @swc/jest * refactor: migrate to anltr4ts * build: migrate ts-jest to @swc/jest
This commit is contained in:
@ -1,12 +1,13 @@
|
||||
import { ParseTreeWalker, CommonTokenStream } from 'antlr4';
|
||||
import type { Parser } from 'antlr4/src/antlr4';
|
||||
import { Parser } from 'antlr4ts';
|
||||
import { ParseTreeWalker } from 'antlr4ts/tree';
|
||||
|
||||
import ParserErrorListener, {
|
||||
ParserError,
|
||||
ErrorHandler,
|
||||
ParserErrorCollector,
|
||||
} from './parserErrorListener';
|
||||
|
||||
interface IParser {
|
||||
interface IParser extends Parser {
|
||||
// Lost in type definition
|
||||
ruleNames: string[];
|
||||
// Customized in our parser
|
||||
@ -17,7 +18,7 @@ interface IParser {
|
||||
* Custom Parser class, subclass needs extends it.
|
||||
*/
|
||||
export default abstract class BasicParser {
|
||||
private _parser: IParser & Parser;
|
||||
private _parser: IParser;
|
||||
|
||||
public parse(
|
||||
input: string,
|
||||
@ -66,16 +67,14 @@ export default abstract class BasicParser {
|
||||
*/
|
||||
public getAllTokens(input: string): string[] {
|
||||
const lexer = this.createLexer(input);
|
||||
const tokensStream = new CommonTokenStream(lexer);
|
||||
tokensStream.fill();
|
||||
return tokensStream.tokens;
|
||||
return lexer.getAllTokens().map(token => token.text);
|
||||
};
|
||||
|
||||
/**
|
||||
* Get Parser instance by input string
|
||||
* @param input
|
||||
*/
|
||||
public createParser(input: string): IParser & Parser {
|
||||
public createParser(input: string): IParser {
|
||||
const lexer = this.createLexer(input);
|
||||
const parser: any = this.createParserFromLexer(lexer);
|
||||
parser.buildParseTrees = true;
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { Token, Recognizer, ErrorListener, RecognitionException } from 'antlr4';
|
||||
import { Token, Recognizer, ParserErrorListener, RecognitionException } from 'antlr4ts';
|
||||
export interface ParserError {
|
||||
startLine: number;
|
||||
endLine: number;
|
||||
@ -8,7 +8,7 @@ export interface ParserError {
|
||||
}
|
||||
|
||||
export interface SyntaxError<T> {
|
||||
recognizer: Recognizer<T>;
|
||||
recognizer: Recognizer<T, any>;
|
||||
offendingSymbol: Token;
|
||||
line: number;
|
||||
charPositionInLine: number;
|
||||
@ -22,16 +22,15 @@ type ErrorOffendingSymbol = {
|
||||
|
||||
export type ErrorHandler<T> = (err: ParserError, errOption: SyntaxError<T>) => void;
|
||||
|
||||
export class ParserErrorCollector extends ErrorListener<ErrorOffendingSymbol> {
|
||||
export class ParserErrorCollector implements ParserErrorListener {
|
||||
private _errors: ParserError[];
|
||||
|
||||
constructor(error: ParserError[]) {
|
||||
super();
|
||||
this._errors = error;
|
||||
}
|
||||
|
||||
syntaxError(
|
||||
recognizer: Recognizer<ErrorOffendingSymbol>, offendingSymbol: ErrorOffendingSymbol, line: number,
|
||||
recognizer: Recognizer<ErrorOffendingSymbol, any>, offendingSymbol: ErrorOffendingSymbol, line: number,
|
||||
charPositionInLine: number, msg: string, e: RecognitionException,
|
||||
) {
|
||||
let endCol = charPositionInLine + 1;
|
||||
@ -49,16 +48,15 @@ export class ParserErrorCollector extends ErrorListener<ErrorOffendingSymbol> {
|
||||
}
|
||||
|
||||
|
||||
export default class ParserErrorListener extends ErrorListener<ErrorOffendingSymbol> {
|
||||
export default class CustomParserErrorListener implements ParserErrorListener {
|
||||
private _errorHandler;
|
||||
|
||||
constructor(errorListener: ErrorHandler<ErrorOffendingSymbol>) {
|
||||
super();
|
||||
this._errorHandler = errorListener;
|
||||
}
|
||||
|
||||
syntaxError(
|
||||
recognizer: Recognizer<ErrorOffendingSymbol>, offendingSymbol: ErrorOffendingSymbol, line: number,
|
||||
recognizer: Recognizer<ErrorOffendingSymbol, any>, offendingSymbol: ErrorOffendingSymbol, line: number,
|
||||
charPositionInLine: number, msg: string, e: any,
|
||||
) {
|
||||
let endCol = charPositionInLine + 1;
|
||||
|
@ -1,14 +1,14 @@
|
||||
import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
|
||||
import FlinkSqlLexer from '../lib/flinksql/FlinkSqlLexer';
|
||||
import FlinkSqlParser from '../lib/flinksql/FlinkSqlParser';
|
||||
import { CharStreams, CommonTokenStream } from 'antlr4ts';
|
||||
import { FlinkSqlLexer } from '../lib/flinksql/FlinkSqlLexer';
|
||||
import { FlinkSqlParser } from '../lib/flinksql/FlinkSqlParser';
|
||||
import BasicParser from './common/basicParser';
|
||||
export default class FlinkSQL extends BasicParser {
|
||||
public createLexer(input: string): FlinkSqlLexer {
|
||||
const chars = new CharStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
|
||||
const chars = CharStreams.fromString(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
|
||||
const lexer = new FlinkSqlLexer(chars);
|
||||
return lexer;
|
||||
}
|
||||
public createParserFromLexer(lexer: Lexer): FlinkSqlParser {
|
||||
public createParserFromLexer(lexer: FlinkSqlLexer): FlinkSqlParser {
|
||||
const tokens = new CommonTokenStream(lexer);
|
||||
const parser = new FlinkSqlParser(tokens);
|
||||
return parser;
|
||||
|
@ -1,15 +1,15 @@
|
||||
import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
|
||||
import SqlLexer from '../lib/generic/SqlLexer';
|
||||
import SqlParser from '../lib/generic/SqlParser';
|
||||
import { CharStreams, CommonTokenStream } from 'antlr4ts';
|
||||
import { SqlLexer } from '../lib/generic/SqlLexer';
|
||||
import { SqlParser } from '../lib/generic/SqlParser';
|
||||
import BasicParser from './common/basicParser';
|
||||
|
||||
export default class GenericSQL extends BasicParser {
|
||||
public createLexer(input: string): SqlLexer {
|
||||
const chars = new CharStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
|
||||
const chars = CharStreams.fromString(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
|
||||
const lexer = new SqlLexer(chars);
|
||||
return lexer;
|
||||
}
|
||||
public createParserFromLexer(lexer: Lexer): SqlParser {
|
||||
public createParserFromLexer(lexer: SqlLexer): SqlParser {
|
||||
const tokenStream = new CommonTokenStream(lexer);
|
||||
return new SqlParser(tokenStream);
|
||||
}
|
||||
|
@ -1,15 +1,15 @@
|
||||
import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
|
||||
import HiveSqlLexer from '../lib/hive/HiveSqlLexer';
|
||||
import HiveSql from '../lib/hive/HiveSql';
|
||||
import { CharStreams, CommonTokenStream } from 'antlr4ts';
|
||||
import { HiveSqlLexer } from '../lib/hive/HiveSqlLexer';
|
||||
import { HiveSql } from '../lib/hive/HiveSql';
|
||||
import BasicParser from './common/basicParser';
|
||||
|
||||
export default class HiveSQL extends BasicParser {
|
||||
public createLexer(input: string): HiveSqlLexer {
|
||||
const chars = new CharStream(input);
|
||||
const chars = CharStreams.fromString(input.toUpperCase());
|
||||
const lexer = new HiveSqlLexer(chars);
|
||||
return lexer;
|
||||
}
|
||||
public createParserFromLexer(lexer: Lexer): HiveSql {
|
||||
public createParserFromLexer(lexer: HiveSqlLexer): HiveSql {
|
||||
const tokenStream = new CommonTokenStream(lexer);
|
||||
return new HiveSql(tokenStream);
|
||||
}
|
||||
|
@ -1,16 +1,16 @@
|
||||
import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
|
||||
import { CharStreams, CommonTokenStream, Lexer } from 'antlr4ts';
|
||||
|
||||
import BasicParser from './common/basicParser';
|
||||
import PostgreSQLLexer from '../lib/pgsql/PostgreSQLLexer';
|
||||
import PostgreSQLParser from '../lib/pgsql/PostgreSQLParser';
|
||||
import { PostgreSQLLexer } from '../lib/pgsql/PostgreSQLLexer';
|
||||
import { PostgreSQLParser } from '../lib/pgsql/PostgreSQLParser';
|
||||
|
||||
export default class PostgresSQL extends BasicParser {
|
||||
public createLexer(input: string): PostgreSQLLexer {
|
||||
const chars = new CharStream(input.toUpperCase());
|
||||
const chars = CharStreams.fromString(input.toUpperCase());
|
||||
const lexer = new PostgreSQLLexer(chars);
|
||||
return lexer;
|
||||
}
|
||||
public createParserFromLexer(lexer: Lexer): any {
|
||||
public createParserFromLexer(lexer: Lexer): PostgreSQLParser {
|
||||
const tokenStream = new CommonTokenStream(lexer);
|
||||
return new PostgreSQLParser(tokenStream);
|
||||
}
|
||||
|
@ -1,16 +1,16 @@
|
||||
import { CharStream, CommonTokenStream } from 'antlr4';
|
||||
import { CharStreams, CommonTokenStream, Lexer } from 'antlr4ts';
|
||||
|
||||
import BasicParser from './common/basicParser';
|
||||
import PlSqlLexer from '../lib/plsql/PlSqlLexer';
|
||||
import PlSqlParser from '../lib/plsql/PlSqlParser';
|
||||
import { PlSqlLexer } from '../lib/plsql/PlSqlLexer';
|
||||
import { PlSqlParser } from '../lib/plsql/PlSqlParser';
|
||||
|
||||
export default class PLSQLParser extends BasicParser {
|
||||
public createLexer(input: string): PlSqlLexer {
|
||||
const chars = new CharStream(input.toUpperCase());
|
||||
const chars = CharStreams.fromString(input.toUpperCase());
|
||||
const lexer = new PlSqlLexer(chars);
|
||||
return lexer;
|
||||
}
|
||||
public createParserFromLexer(lexer: PlSqlLexer): PlSqlParser {
|
||||
public createParserFromLexer(lexer: Lexer): PlSqlParser {
|
||||
const tokenStream = new CommonTokenStream(lexer);
|
||||
return new PlSqlParser(tokenStream);
|
||||
}
|
||||
|
@ -1,15 +1,15 @@
|
||||
import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
|
||||
import { CharStreams, CommonTokenStream } from 'antlr4ts';
|
||||
import BasicParser from './common/basicParser';
|
||||
import SparkSqlLexer from '../lib/spark/SparkSqlLexer';
|
||||
import SparkSqlParser from '../lib/spark/SparkSqlParser';
|
||||
import { SparkSqlLexer } from '../lib/spark/SparkSqlLexer';
|
||||
import { SparkSqlParser } from '../lib/spark/SparkSqlParser';
|
||||
|
||||
export default class SparkSQL extends BasicParser {
|
||||
public createLexer(input: string): Lexer {
|
||||
const chars = new CharStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
|
||||
public createLexer(input: string): SparkSqlLexer {
|
||||
const chars = CharStreams.fromString(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
|
||||
const lexer = new SparkSqlLexer(chars);
|
||||
return lexer;
|
||||
}
|
||||
public createParserFromLexer(lexer: Lexer): any {
|
||||
public createParserFromLexer(lexer: SparkSqlLexer): SparkSqlParser {
|
||||
const tokenStream = new CommonTokenStream(lexer);
|
||||
return new SparkSqlParser(tokenStream);
|
||||
}
|
||||
|
@ -1,10 +1,10 @@
|
||||
import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
|
||||
import TrinoSqlLexer from '../lib/trinosql/TrinoSqlLexer';
|
||||
import TrinoSqlParser from '../lib/trinosql/TrinoSqlParser';
|
||||
import { CharStreams, CommonTokenStream, Lexer } from 'antlr4ts';
|
||||
import { TrinoSqlLexer } from '../lib/trinosql/TrinoSqlLexer';
|
||||
import { TrinoSqlParser } from '../lib/trinosql/TrinoSqlParser';
|
||||
import BasicParser from './common/basicParser';
|
||||
export default class trinoSQL extends BasicParser {
|
||||
public createLexer(input: string): TrinoSqlLexer {
|
||||
const chars = new CharStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
|
||||
const chars = CharStreams.fromString(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
|
||||
const lexer = new TrinoSqlLexer(chars);
|
||||
return lexer;
|
||||
}
|
||||
|
Reference in New Issue
Block a user