build: open ts strict check (#279)

* feat: no check lib dir

* feat: open strict check to src dir

* test: update test tsconfig.json

* feat: remove any type

* feat: do not export AbstractParseTreeVisitor

* feat: export StmtContextType as enum

* build: improve antlr4 script
This commit is contained in:
Hayden 2024-03-27 19:04:16 +08:00 committed by GitHub
parent bb0fad1dbe
commit c6615aecac
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
77 changed files with 439 additions and 349 deletions

View File

@ -67,9 +67,16 @@ function main() {
}); });
} else if (argv.lang) { } else if (argv.lang) {
// compile single: yarn antlr4 --lang=mysql // compile single: yarn antlr4 --lang=mysql
const supportedLanguage = languageEntries.some((language) => language === argv.lang); const supportedLanguage = languageEntries.find((language) =>
if (supportedLanguage) { language.startsWith(argv.lang)
compile(argv.lang); );
if (argv.lang === 'all') {
languageEntries.forEach((language) => {
compile(language);
});
} else if (supportedLanguage) {
compile(supportedLanguage);
} else { } else {
console.error( console.error(
chalk.bold.red('\n[Invalid language]:'), chalk.bold.red('\n[Invalid language]:'),

View File

@ -19,7 +19,9 @@ function processFile(filePath) {
if (slices.length !== 2) return; if (slices.length !== 2) return;
firstLineContent = `// Generated from dt-sql-parser/src/grammar/` + slices[1]; firstLineContent = `// Generated from dt-sql-parser/src/grammar/` + slices[1];
fs.writeFileSync(filePath, firstLineContent + restContent, 'utf-8'); const tsNoCheckComment = '\n\n// @ts-nocheck';
fs.writeFileSync(filePath, firstLineContent + tsNoCheckComment + restContent, 'utf-8');
} catch (error) { } catch (error) {
console.error(error); console.error(error);
} }

View File

@ -1,5 +1,3 @@
export { AbstractParseTreeVisitor } from 'antlr4ng';
export { export {
MySQL, MySQL,
FlinkSQL, FlinkSQL,
@ -40,13 +38,15 @@ export {
EntityContextType as SyntaxContextType, EntityContextType as SyntaxContextType,
} from './parser/common/types'; } from './parser/common/types';
export { StmtContextType } from './parser/common/entityCollector';
export type { CaretPosition, Suggestions, SyntaxSuggestion } from './parser/common/types'; export type { CaretPosition, Suggestions, SyntaxSuggestion } from './parser/common/types';
export type { WordRange, TextSlice } from './parser/common/textAndWord'; export type { WordRange, TextSlice } from './parser/common/textAndWord';
export type { SyntaxError, ParseError, ErrorListener } from './parser/common/parseErrorListener'; export type { SyntaxError, ParseError, ErrorListener } from './parser/common/parseErrorListener';
export type { StmtContextType, StmtContext, EntityContext } from './parser/common/entityCollector'; export type { StmtContext, EntityContext } from './parser/common/entityCollector';
/** /**
* @deprecated Legacy utils will be removed when the stable version is released. * @deprecated Legacy utils will be removed when the stable version is released.

View File

@ -12,7 +12,7 @@ export abstract class SQLParserBase<T = antlr.ParserRuleContext> extends antlr.P
public shouldMatchEmpty () { public shouldMatchEmpty () {
return this.entityCollecting return this.entityCollecting
&& this.tokenStream.LT(-1).tokenIndex <= this.caretTokenIndex && (this.tokenStream.LT(-1)?.tokenIndex ?? Infinity) <= this.caretTokenIndex
&& this.tokenStream.LT(1).tokenIndex >= this.caretTokenIndex && (this.tokenStream.LT(1)?.tokenIndex ?? -Infinity) >= this.caretTokenIndex
} }
} }

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/flink/FlinkSqlLexer.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/flink/FlinkSqlLexer.g4 by ANTLR 4.13.1
// @ts-nocheck
import * as antlr from "antlr4ng"; import * as antlr from "antlr4ng";
import { Token } from "antlr4ng"; import { Token } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/flink/FlinkSqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/flink/FlinkSqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import * as antlr from "antlr4ng"; import * as antlr from "antlr4ng";
import { Token } from "antlr4ng"; import { Token } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/flink/FlinkSqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/flink/FlinkSqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng"; import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/flink/FlinkSqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/flink/FlinkSqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import { AbstractParseTreeVisitor } from "antlr4ng"; import { AbstractParseTreeVisitor } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/hive/HiveSqlLexer.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/hive/HiveSqlLexer.g4 by ANTLR 4.13.1
// @ts-nocheck
import * as antlr from "antlr4ng"; import * as antlr from "antlr4ng";
import { Token } from "antlr4ng"; import { Token } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import * as antlr from "antlr4ng"; import * as antlr from "antlr4ng";
import { Token } from "antlr4ng"; import { Token } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng"; import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import { AbstractParseTreeVisitor } from "antlr4ng"; import { AbstractParseTreeVisitor } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/impala/ImpalaSqlLexer.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/impala/ImpalaSqlLexer.g4 by ANTLR 4.13.1
// @ts-nocheck
import * as antlr from "antlr4ng"; import * as antlr from "antlr4ng";
import { Token } from "antlr4ng"; import { Token } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/impala/ImpalaSqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/impala/ImpalaSqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import * as antlr from "antlr4ng"; import * as antlr from "antlr4ng";
import { Token } from "antlr4ng"; import { Token } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/impala/ImpalaSqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/impala/ImpalaSqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng"; import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/impala/ImpalaSqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/impala/ImpalaSqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import { AbstractParseTreeVisitor } from "antlr4ng"; import { AbstractParseTreeVisitor } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/mysql/MySqlLexer.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/mysql/MySqlLexer.g4 by ANTLR 4.13.1
// @ts-nocheck
import * as antlr from "antlr4ng"; import * as antlr from "antlr4ng";
import { Token } from "antlr4ng"; import { Token } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/mysql/MySqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/mysql/MySqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import * as antlr from "antlr4ng"; import * as antlr from "antlr4ng";
import { Token } from "antlr4ng"; import { Token } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/mysql/MySqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/mysql/MySqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng"; import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/mysql/MySqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/mysql/MySqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import { AbstractParseTreeVisitor } from "antlr4ng"; import { AbstractParseTreeVisitor } from "antlr4ng";

View File

@ -1,9 +1,6 @@
import { Lexer } from "antlr4ng"; import { Lexer } from "antlr4ng";
export abstract class PlSqlBaseLexer extends Lexer { export abstract class PlSqlBaseLexer extends Lexer {
_interp: any;
IsNewlineAtPos(pos: number): boolean { IsNewlineAtPos(pos: number): boolean {
const la = this._input.LA(pos); const la = this._input.LA(pos);
return la == -1 || String.fromCharCode(la) == '\n'; return la == -1 || String.fromCharCode(la) == '\n';

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/plsql/PlSqlLexer.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/plsql/PlSqlLexer.g4 by ANTLR 4.13.1
// @ts-nocheck
import * as antlr from "antlr4ng"; import * as antlr from "antlr4ng";
import { Token } from "antlr4ng"; import { Token } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/plsql/PlSqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/plsql/PlSqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import * as antlr from "antlr4ng"; import * as antlr from "antlr4ng";
import { Token } from "antlr4ng"; import { Token } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/plsql/PlSqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/plsql/PlSqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng"; import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/plsql/PlSqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/plsql/PlSqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import { AbstractParseTreeVisitor } from "antlr4ng"; import { AbstractParseTreeVisitor } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/postgresql/PostgreSqlLexer.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/postgresql/PostgreSqlLexer.g4 by ANTLR 4.13.1
// @ts-nocheck
import * as antlr from "antlr4ng"; import * as antlr from "antlr4ng";
import { Token } from "antlr4ng"; import { Token } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/postgresql/PostgreSqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/postgresql/PostgreSqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import * as antlr from "antlr4ng"; import * as antlr from "antlr4ng";
import { Token } from "antlr4ng"; import { Token } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/postgresql/PostgreSqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/postgresql/PostgreSqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng"; import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/postgresql/PostgreSqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/postgresql/PostgreSqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import { AbstractParseTreeVisitor } from "antlr4ng"; import { AbstractParseTreeVisitor } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/spark/SparkSqlLexer.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/spark/SparkSqlLexer.g4 by ANTLR 4.13.1
// @ts-nocheck
import * as antlr from "antlr4ng"; import * as antlr from "antlr4ng";
import { Token } from "antlr4ng"; import { Token } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/spark/SparkSqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/spark/SparkSqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import * as antlr from "antlr4ng"; import * as antlr from "antlr4ng";
import { Token } from "antlr4ng"; import { Token } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/spark/SparkSqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/spark/SparkSqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng"; import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/spark/SparkSqlParser.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/spark/SparkSqlParser.g4 by ANTLR 4.13.1
// @ts-nocheck
import { AbstractParseTreeVisitor } from "antlr4ng"; import { AbstractParseTreeVisitor } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/trino/TrinoSql.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/trino/TrinoSql.g4 by ANTLR 4.13.1
// @ts-nocheck
import * as antlr from "antlr4ng"; import * as antlr from "antlr4ng";
import { Token } from "antlr4ng"; import { Token } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/trino/TrinoSql.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/trino/TrinoSql.g4 by ANTLR 4.13.1
// @ts-nocheck
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng"; import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/trino/TrinoSql.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/trino/TrinoSql.g4 by ANTLR 4.13.1
// @ts-nocheck
import * as antlr from "antlr4ng"; import * as antlr from "antlr4ng";
import { Token } from "antlr4ng"; import { Token } from "antlr4ng";

View File

@ -1,5 +1,7 @@
// Generated from dt-sql-parser/src/grammar/trino/TrinoSql.g4 by ANTLR 4.13.1 // Generated from dt-sql-parser/src/grammar/trino/TrinoSql.g4 by ANTLR 4.13.1
// @ts-nocheck
import { AbstractParseTreeVisitor } from "antlr4ng"; import { AbstractParseTreeVisitor } from "antlr4ng";

View File

@ -33,12 +33,12 @@ export abstract class BasicSQL<
protected _lexer: L; protected _lexer: L;
protected _tokenStream: CommonTokenStream; protected _tokenStream: CommonTokenStream;
protected _parser: P; protected _parser: P;
protected _parseTree: PRC; protected _parseTree: PRC | null;
protected _parsedInput: string = null; protected _parsedInput: string;
protected _parseErrors: ParseError[] = []; protected _parseErrors: ParseError[] = [];
/** members for cache end */ /** members for cache end */
private _errorListener: ErrorListener<any> = (error) => { private _errorListener: ErrorListener = (error) => {
this._parseErrors.push(error); this._parseErrors.push(error);
}; };
@ -90,7 +90,7 @@ export abstract class BasicSQL<
* Create an antlr4 lexer from input. * Create an antlr4 lexer from input.
* @param input string * @param input string
*/ */
public createLexer(input: string, errorListener?: ErrorListener<any>) { public createLexer(input: string, errorListener?: ErrorListener) {
const charStreams = CharStreams.fromString(input); const charStreams = CharStreams.fromString(input);
const lexer = this.createLexerFromCharStream(charStreams); const lexer = this.createLexerFromCharStream(charStreams);
if (errorListener) { if (errorListener) {
@ -104,7 +104,7 @@ export abstract class BasicSQL<
* Create an antlr4 parser from input. * Create an antlr4 parser from input.
* @param input string * @param input string
*/ */
public createParser(input: string, errorListener?: ErrorListener<any>) { public createParser(input: string, errorListener?: ErrorListener) {
const lexer = this.createLexer(input, errorListener); const lexer = this.createLexer(input, errorListener);
const tokenStream = new CommonTokenStream(lexer); const tokenStream = new CommonTokenStream(lexer);
const parser = this.createParserFromTokenStream(tokenStream); const parser = this.createParserFromTokenStream(tokenStream);
@ -123,7 +123,7 @@ export abstract class BasicSQL<
* @param errorListener listen parse errors and lexer errors. * @param errorListener listen parse errors and lexer errors.
* @returns parseTree * @returns parseTree
*/ */
public parse(input: string, errorListener?: ErrorListener<any>) { public parse(input: string, errorListener?: ErrorListener) {
const parser = this.createParser(input, errorListener); const parser = this.createParser(input, errorListener);
parser.buildParseTrees = true; parser.buildParseTrees = true;
parser.errorHandler = new ErrorStrategy(); parser.errorHandler = new ErrorStrategy();
@ -168,9 +168,9 @@ export abstract class BasicSQL<
* @param errorListener listen errors * @param errorListener listen errors
* @returns parseTree * @returns parseTree
*/ */
private parseWithCache(input: string, errorListener?: ErrorListener<any>) { private parseWithCache(input: string, errorListener?: ErrorListener): PRC {
// Avoid parsing the same input repeatedly. // Avoid parsing the same input repeatedly.
if (this._parsedInput === input && !errorListener) { if (this._parsedInput === input && !errorListener && this._parseTree) {
return this._parseTree; return this._parseTree;
} }
this._parseErrors = []; this._parseErrors = [];
@ -225,9 +225,9 @@ export abstract class BasicSQL<
* If exist syntax error it will return null. * If exist syntax error it will return null.
* @param input source string * @param input source string
*/ */
public splitSQLByStatement(input): TextSlice[] { public splitSQLByStatement(input: string): TextSlice[] | null {
const errors = this.validate(input); const errors = this.validate(input);
if (errors.length) { if (errors.length || !this._parseTree) {
return null; return null;
} }
const splitListener = this.splitListener; const splitListener = this.splitListener;
@ -236,9 +236,11 @@ export abstract class BasicSQL<
this.listen(splitListener, this._parseTree); this.listen(splitListener, this._parseTree);
const res = splitListener.statementsContext.map((context) => { const res = splitListener.statementsContext
.map((context) => {
return ctxToText(context, this._parsedInput); return ctxToText(context, this._parsedInput);
}); })
.filter(Boolean) as TextSlice[];
return res; return res;
} }
@ -258,6 +260,8 @@ export abstract class BasicSQL<
if (!splitListener) return null; if (!splitListener) return null;
this.parseWithCache(input); this.parseWithCache(input);
if (!this._parseTree) return null;
let sqlParserIns = this._parser; let sqlParserIns = this._parser;
const allTokens = this.getAllTokens(input); const allTokens = this.getAllTokens(input);
let caretTokenIndex = findCaretTokenIndex(caretPosition, allTokens); let caretTokenIndex = findCaretTokenIndex(caretPosition, allTokens);
@ -281,8 +285,8 @@ export abstract class BasicSQL<
* The boundaries of this range must be statements with no syntax errors. * The boundaries of this range must be statements with no syntax errors.
* This can ensure the stable performance of the C3. * This can ensure the stable performance of the C3.
*/ */
let startStatement: ParserRuleContext; let startStatement: ParserRuleContext | null = null;
let stopStatement: ParserRuleContext; let stopStatement: ParserRuleContext | null = null;
for (let index = 0; index < statementCount; index++) { for (let index = 0; index < statementCount; index++) {
const ctx = statementsContext[index]; const ctx = statementsContext[index];
@ -297,11 +301,16 @@ export abstract class BasicSQL<
const isNextCtxValid = const isNextCtxValid =
index === statementCount - 1 || !statementsContext[index + 1]?.exception; index === statementCount - 1 || !statementsContext[index + 1]?.exception;
if (ctx.stop.tokenIndex < caretTokenIndex && isPrevCtxValid) { if (ctx.stop && ctx.stop.tokenIndex < caretTokenIndex && isPrevCtxValid) {
startStatement = ctx; startStatement = ctx;
} }
if (!stopStatement && ctx.start.tokenIndex > caretTokenIndex && isNextCtxValid) { if (
ctx.start &&
!stopStatement &&
ctx.start.tokenIndex > caretTokenIndex &&
isNextCtxValid
) {
stopStatement = ctx; stopStatement = ctx;
break; break;
} }
@ -369,7 +378,9 @@ export abstract class BasicSQL<
public getAllEntities(input: string, caretPosition?: CaretPosition): EntityContext[] | null { public getAllEntities(input: string, caretPosition?: CaretPosition): EntityContext[] | null {
const allTokens = this.getAllTokens(input); const allTokens = this.getAllTokens(input);
const caretTokenIndex = findCaretTokenIndex(caretPosition, allTokens); const caretTokenIndex = caretPosition
? findCaretTokenIndex(caretPosition, allTokens)
: void 0;
const collectListener = this.createEntityCollector(input, caretTokenIndex); const collectListener = this.createEntityCollector(input, caretTokenIndex);
// TODO: add entityCollector to all sqlParser implements and remove following if // TODO: add entityCollector to all sqlParser implements and remove following if

View File

@ -34,8 +34,10 @@ export function toStmtContext(
rootStmt: StmtContext | null, rootStmt: StmtContext | null,
parentStmt: StmtContext | null, parentStmt: StmtContext | null,
isContainCaret?: boolean isContainCaret?: boolean
): StmtContext { ): StmtContext | null {
const { text: _, ...position } = ctxToText(ctx, input); const text = ctxToText(ctx, input);
if (!text) return null;
const { text: _, ...position } = text;
return { return {
stmtContextType: type, stmtContextType: type,
position, position,
@ -72,8 +74,10 @@ export function toEntityContext(
input: string, input: string,
belongStmt: StmtContext, belongStmt: StmtContext,
alias?: BaseAliasContext alias?: BaseAliasContext
): EntityContext { ): EntityContext | null {
const { text, ...position } = ctxToWord(ctx, input); const word = ctxToWord(ctx, input);
if (!word) return null;
const { text, ...position } = word;
const finalAlias = Object.assign({}, baseAlias, alias ?? {}); const finalAlias = Object.assign({}, baseAlias, alias ?? {});
return { return {
entityContextType: type, entityContextType: type,
@ -110,7 +114,7 @@ export abstract class EntityCollector {
* Always point to the first non-commonStmt at the bottom of the _stmtStack, * Always point to the first non-commonStmt at the bottom of the _stmtStack,
* unless there are only commonStmts in the _stmtStack. * unless there are only commonStmts in the _stmtStack.
* */ * */
private _rootStmt: StmtContext; private _rootStmt: StmtContext | null;
visitTerminal() {} visitTerminal() {}
@ -132,11 +136,13 @@ export abstract class EntityCollector {
} }
protected pushStmt(ctx: ParserRuleContext, type: StmtContextType) { protected pushStmt(ctx: ParserRuleContext, type: StmtContextType) {
let isContainCaret; let isContainCaret: boolean | undefined;
if (this._caretTokenIndex >= 0) { if (this._caretTokenIndex >= 0) {
isContainCaret = isContainCaret =
!!ctx.start &&
!!ctx.stop &&
ctx.start.tokenIndex <= this._caretTokenIndex && ctx.start.tokenIndex <= this._caretTokenIndex &&
ctx.stop?.tokenIndex >= this._caretTokenIndex; ctx.stop.tokenIndex >= this._caretTokenIndex;
} }
const stmtContext = toStmtContext( const stmtContext = toStmtContext(
ctx, ctx,
@ -146,6 +152,7 @@ export abstract class EntityCollector {
this._stmtStack.peek(), this._stmtStack.peek(),
isContainCaret isContainCaret
); );
if (stmtContext) {
if ( if (
this._stmtStack.isEmpty() || this._stmtStack.isEmpty() ||
this._stmtStack.peek()?.stmtContextType === StmtContextType.COMMON_STMT this._stmtStack.peek()?.stmtContextType === StmtContextType.COMMON_STMT
@ -153,13 +160,14 @@ export abstract class EntityCollector {
this._rootStmt = stmtContext; this._rootStmt = stmtContext;
} }
this._stmtStack.push(stmtContext); this._stmtStack.push(stmtContext);
}
return stmtContext; return stmtContext;
} }
protected popStmt() { protected popStmt() {
const stmtContext = this._stmtStack.pop(); const stmtContext = this._stmtStack.pop();
if (this._rootStmt === stmtContext) { if (stmtContext && this._rootStmt === stmtContext) {
this._rootStmt = this._stmtStack.peek(); this._rootStmt = this._stmtStack.peek();
if (!this._entityStack.isEmpty()) { if (!this._entityStack.isEmpty()) {
this.combineEntitiesAndAdd(stmtContext); this.combineEntitiesAndAdd(stmtContext);
@ -180,12 +188,14 @@ export abstract class EntityCollector {
this._stmtStack.peek(), this._stmtStack.peek(),
alias alias
); );
if (entityContext) {
if (this._stmtStack.isEmpty()) { if (this._stmtStack.isEmpty()) {
this._entitiesSet.add(entityContext); this._entitiesSet.add(entityContext);
} else { } else {
// If is inside a statement // If is inside a statement
this._entityStack.push(entityContext); this._entityStack.push(entityContext);
} }
}
return entityContext; return entityContext;
} }
@ -204,12 +214,11 @@ export abstract class EntityCollector {
entitiesInsideStmt.unshift(this._entityStack.pop()); entitiesInsideStmt.unshift(this._entityStack.pop());
} }
let tmpResults = entitiesInsideStmt; const combinedEntities = this.combineRootStmtEntities(stmtContext, entitiesInsideStmt);
tmpResults = this.combineRootStmtEntities(stmtContext, entitiesInsideStmt); while (combinedEntities.length) {
const entity = combinedEntities.shift();
while (tmpResults.length) { entity && this._entitiesSet.add(entity);
this._entitiesSet.add(tmpResults.shift());
} }
} }
@ -235,7 +244,7 @@ export abstract class EntityCollector {
): EntityContext[] { ): EntityContext[] {
const columns: EntityContext[] = []; const columns: EntityContext[] = [];
const relatedEntities: EntityContext[] = []; const relatedEntities: EntityContext[] = [];
let mainEntity: EntityContext = null; let mainEntity: EntityContext | null = null;
const finalEntities = entitiesInsideStmt.reduce((result, entity) => { const finalEntities = entitiesInsideStmt.reduce((result, entity) => {
if (entity.belongStmt !== stmtContext) { if (entity.belongStmt !== stmtContext) {
if ( if (
@ -262,14 +271,14 @@ export abstract class EntityCollector {
result.push(entity); result.push(entity);
} }
return result; return result;
}, []); }, [] as EntityContext[]);
if (columns.length) { if (mainEntity && columns.length) {
mainEntity.columns = columns; (mainEntity as EntityContext).columns = columns;
} }
if (relatedEntities.length) { if (mainEntity && relatedEntities.length) {
mainEntity.relatedEntities = relatedEntities; (mainEntity as EntityContext).relatedEntities = relatedEntities;
} }
return finalEntities; return finalEntities;

View File

@ -13,10 +13,10 @@ import {
* The difference is that it assigns exception to the context.exception when it encounters error. * The difference is that it assigns exception to the context.exception when it encounters error.
*/ */
export class ErrorStrategy extends DefaultErrorStrategy { export class ErrorStrategy extends DefaultErrorStrategy {
public recover(recognizer: Parser, e: RecognitionException): void { public override recover(recognizer: Parser, e: RecognitionException): void {
// Mark the context as an anomaly // Mark the context as an anomaly
for ( for (
let context: ParserRuleContext | undefined = recognizer.context; let context: ParserRuleContext | null = recognizer.context;
context; context;
context = context.parent context = context.parent
) { ) {
@ -40,7 +40,7 @@ export class ErrorStrategy extends DefaultErrorStrategy {
this.consumeUntil(recognizer, followSet); this.consumeUntil(recognizer, followSet);
} }
public recoverInline(recognizer: Parser): Token { public override recoverInline(recognizer: Parser): Token {
let e: RecognitionException; let e: RecognitionException;
if (this.nextTokensContext === undefined) { if (this.nextTokensContext === undefined) {
e = new InputMismatchException(recognizer); e = new InputMismatchException(recognizer);
@ -50,7 +50,7 @@ export class ErrorStrategy extends DefaultErrorStrategy {
// Mark the context as an anomaly // Mark the context as an anomaly
for ( for (
let context: ParserRuleContext | undefined = recognizer.context; let context: ParserRuleContext | null = recognizer.context;
context; context;
context = context.parent context = context.parent
) { ) {

View File

@ -7,7 +7,10 @@ import { CaretPosition } from './types';
* @param allTokens all the tokens * @param allTokens all the tokens
* @returns caretTokenIndex * @returns caretTokenIndex
*/ */
export function findCaretTokenIndex(caretPosition: CaretPosition, allTokens: Token[]) { export function findCaretTokenIndex(
caretPosition: CaretPosition,
allTokens: Token[]
): number | undefined {
const { lineNumber: caretLine, column: caretCol } = caretPosition; const { lineNumber: caretLine, column: caretCol } = caretPosition;
let left = 0; let left = 0;
let right = allTokens.length - 1; let right = allTokens.length - 1;
@ -19,12 +22,12 @@ export function findCaretTokenIndex(caretPosition: CaretPosition, allTokens: Tok
right = mid - 1; right = mid - 1;
} else if ( } else if (
token.line < caretLine || token.line < caretLine ||
(token.line === caretLine && token.column + token.text.length + 1 < caretCol) (token.line === caretLine && token.column + (token.text?.length ?? 0) + 1 < caretCol)
) { ) {
left = mid + 1; left = mid + 1;
} else { } else {
return allTokens[mid].tokenIndex; return allTokens[mid].tokenIndex;
} }
} }
return null; return void 0;
} }

View File

@ -24,9 +24,9 @@ export interface ParseError {
/** /**
* The type of error resulting from lexical parsing and parsing. * The type of error resulting from lexical parsing and parsing.
*/ */
export interface SyntaxError<T> { export interface SyntaxError {
readonly recognizer: Recognizer<ATNSimulator>; readonly recognizer: Recognizer<ATNSimulator>;
readonly offendingSymbol: Token; readonly offendingSymbol: Token | null;
readonly line: number; readonly line: number;
readonly charPositionInLine: number; readonly charPositionInLine: number;
readonly msg: string; readonly msg: string;
@ -37,12 +37,12 @@ export interface SyntaxError<T> {
* ErrorListener will be invoked when it encounters a parsing error. * ErrorListener will be invoked when it encounters a parsing error.
* Includes lexical errors and parsing errors. * Includes lexical errors and parsing errors.
*/ */
export type ErrorListener<T> = (parseError: ParseError, originalError: SyntaxError<T>) => void; export type ErrorListener = (parseError: ParseError, originalError: SyntaxError) => void;
export class ParseErrorListener implements ANTLRErrorListener { export class ParseErrorListener implements ANTLRErrorListener {
private _errorListener: ErrorListener<Token>; private _errorListener: ErrorListener;
constructor(errorListener: ErrorListener<Token>) { constructor(errorListener: ErrorListener) {
this._errorListener = errorListener; this._errorListener = errorListener;
} }
@ -54,7 +54,7 @@ export class ParseErrorListener implements ANTLRErrorListener {
syntaxError( syntaxError(
recognizer: Recognizer<ATNSimulator>, recognizer: Recognizer<ATNSimulator>,
offendingSymbol, offendingSymbol: Token | null,
line: number, line: number,
charPositionInLine: number, charPositionInLine: number,
msg: string, msg: string,

View File

@ -9,7 +9,7 @@ export class SimpleStack<T> {
} }
pop(): T { pop(): T {
return this.stack.pop(); return this.stack.pop() as T;
} }
peek(): T { peek(): T {

View File

@ -43,20 +43,27 @@ export interface TextSlice extends TextPosition {
export function tokenToWord(token: Token, input: string): WordPosition & { text: string } { export function tokenToWord(token: Token, input: string): WordPosition & { text: string } {
const startIndex = token.start; const startIndex = token.start;
const endIndex = token.stop; const endIndex = token.stop;
const text = token.text ?? '';
return { return {
text: token.text, text,
line: token.line, line: token.line,
startIndex, startIndex,
endIndex, endIndex,
startColumn: token.column + 1, startColumn: token.column + 1,
endColumn: token.column + 1 + token.text.length, endColumn: token.column + 1 + text.length,
}; };
} }
/** /**
* Convert ParserRuleContext to Word * Convert ParserRuleContext to Word
*/ */
export function ctxToWord(ctx: ParserRuleContext, input: string): WordPosition & { text: string } { export function ctxToWord(
ctx: ParserRuleContext,
input: string
): (WordPosition & { text: string }) | null {
if (!ctx.start || !ctx.stop) {
return null;
}
const startIndex = ctx.start.start; const startIndex = ctx.start.start;
const endIndex = ctx.stop.stop; const endIndex = ctx.stop.stop;
const text = input.slice(startIndex, endIndex + 1); const text = input.slice(startIndex, endIndex + 1);
@ -66,14 +73,20 @@ export function ctxToWord(ctx: ParserRuleContext, input: string): WordPosition &
startIndex, startIndex,
endIndex, endIndex,
startColumn: ctx.start.column + 1, startColumn: ctx.start.column + 1,
endColumn: ctx.stop.column + 1 + ctx.stop.text.length, endColumn: ctx.stop.column + 1 + (ctx.stop.text?.length ?? 0),
}; };
} }
/** /**
* Convert ParserRuleContext to Text * Convert ParserRuleContext to Text
*/ */
export function ctxToText(ctx: ParserRuleContext, input: string): TextPosition & { text: string } { export function ctxToText(
ctx: ParserRuleContext,
input: string
): (TextPosition & { text: string }) | null {
if (!ctx.start || !ctx.stop) {
return null;
}
const startIndex = ctx.start.start; const startIndex = ctx.start.start;
const endIndex = ctx.stop.stop; const endIndex = ctx.stop.stop;
const text = input.slice(startIndex, endIndex + 1); const text = input.slice(startIndex, endIndex + 1);
@ -84,6 +97,6 @@ export function ctxToText(ctx: ParserRuleContext, input: string): TextPosition &
startIndex, startIndex,
endIndex, endIndex,
startColumn: ctx.start.column + 1, startColumn: ctx.start.column + 1,
endColumn: ctx.stop.column + 1 + ctx.stop.text.length, endColumn: ctx.stop.column + 1 + (ctx.stop.text?.length ?? 0),
}; };
} }

View File

@ -1,4 +1,4 @@
import { Token } from 'antlr4ng'; import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3'; import { CandidatesCollection } from 'antlr4-c3';
import { FlinkSqlLexer } from '../../lib/flink/FlinkSqlLexer'; import { FlinkSqlLexer } from '../../lib/flink/FlinkSqlLexer';
import { FlinkSqlParser, ProgramContext } from '../../lib/flink/FlinkSqlParser'; import { FlinkSqlParser, ProgramContext } from '../../lib/flink/FlinkSqlParser';
@ -11,11 +11,11 @@ import { FlinkEntityCollector } from './flinkEntityCollector';
export { FlinkSqlSplitListener, FlinkEntityCollector }; export { FlinkSqlSplitListener, FlinkEntityCollector };
export class FlinkSQL extends BasicSQL<FlinkSqlLexer, ProgramContext, FlinkSqlParser> { export class FlinkSQL extends BasicSQL<FlinkSqlLexer, ProgramContext, FlinkSqlParser> {
protected createLexerFromCharStream(charStreams) { protected createLexerFromCharStream(charStreams: CharStream) {
return new FlinkSqlLexer(charStreams); return new FlinkSqlLexer(charStreams);
} }
protected createParserFromTokenStream(tokenStream) { protected createParserFromTokenStream(tokenStream: CommonTokenStream) {
return new FlinkSqlParser(tokenStream); return new FlinkSqlParser(tokenStream);
} }
@ -58,7 +58,7 @@ export class FlinkSQL extends BasicSQL<FlinkSqlLexer, ProgramContext, FlinkSqlPa
caretTokenIndex + tokenIndexOffset + 1 caretTokenIndex + tokenIndexOffset + 1
); );
let syntaxContextType: EntityContextType | StmtContextType; let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
switch (ruleType) { switch (ruleType) {
case FlinkSqlParser.RULE_catalogPath: { case FlinkSqlParser.RULE_catalogPath: {
syntaxContextType = EntityContextType.CATALOG; syntaxContextType = EntityContextType.CATALOG;
@ -119,7 +119,7 @@ export class FlinkSQL extends BasicSQL<FlinkSqlLexer, ProgramContext, FlinkSqlPa
for (let candidate of candidates.tokens) { for (let candidate of candidates.tokens) {
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]); const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]); const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
if (symbolicName && symbolicName.startsWith('KW_')) { if (displayName && symbolicName && symbolicName.startsWith('KW_')) {
const keyword = const keyword =
displayName.startsWith("'") && displayName.endsWith("'") displayName.startsWith("'") && displayName.endsWith("'")
? displayName.slice(1, -1) ? displayName.slice(1, -1)

View File

@ -11,7 +11,6 @@ import {
DbSchemaNameCreateContext, DbSchemaNameCreateContext,
FromInsertStmtContext, FromInsertStmtContext,
FromSelectStmtContext, FromSelectStmtContext,
FromStatementContext,
FunctionNameCreateContext, FunctionNameCreateContext,
InsertStmtContext, InsertStmtContext,
SelectStatementContext, SelectStatementContext,

View File

@ -1,4 +1,4 @@
import { Token } from 'antlr4ng'; import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3'; import { CandidatesCollection } from 'antlr4-c3';
import { HiveSqlLexer } from '../../lib/hive/HiveSqlLexer'; import { HiveSqlLexer } from '../../lib/hive/HiveSqlLexer';
import { HiveSqlParser, ProgramContext } from '../../lib/hive/HiveSqlParser'; import { HiveSqlParser, ProgramContext } from '../../lib/hive/HiveSqlParser';
@ -12,11 +12,11 @@ import { HiveEntityCollector } from './hiveEntityCollector';
export { HiveEntityCollector, HiveSqlSplitListener }; export { HiveEntityCollector, HiveSqlSplitListener };
export class HiveSQL extends BasicSQL<HiveSqlLexer, ProgramContext, HiveSqlParser> { export class HiveSQL extends BasicSQL<HiveSqlLexer, ProgramContext, HiveSqlParser> {
protected createLexerFromCharStream(charStreams) { protected createLexerFromCharStream(charStreams: CharStream) {
return new HiveSqlLexer(charStreams); return new HiveSqlLexer(charStreams);
} }
protected createParserFromTokenStream(tokenStream) { protected createParserFromTokenStream(tokenStream: CommonTokenStream) {
return new HiveSqlParser(tokenStream); return new HiveSqlParser(tokenStream);
} }
@ -58,7 +58,7 @@ export class HiveSQL extends BasicSQL<HiveSqlLexer, ProgramContext, HiveSqlParse
caretTokenIndex + tokenIndexOffset + 1 caretTokenIndex + tokenIndexOffset + 1
); );
let syntaxContextType: EntityContextType | StmtContextType; let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
switch (ruleType) { switch (ruleType) {
case HiveSqlParser.RULE_dbSchemaName: { case HiveSqlParser.RULE_dbSchemaName: {
syntaxContextType = EntityContextType.DATABASE; syntaxContextType = EntityContextType.DATABASE;
@ -116,7 +116,7 @@ export class HiveSQL extends BasicSQL<HiveSqlLexer, ProgramContext, HiveSqlParse
for (let candidate of candidates.tokens) { for (let candidate of candidates.tokens) {
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]); const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]); const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
if (symbolicName && symbolicName.startsWith('KW_')) { if (displayName && symbolicName && symbolicName.startsWith('KW_')) {
const keyword = const keyword =
displayName.startsWith("'") && displayName.endsWith("'") displayName.startsWith("'") && displayName.endsWith("'")
? displayName.slice(1, -1) ? displayName.slice(1, -1)

View File

@ -11,7 +11,6 @@ import {
DatabaseNameCreateContext, DatabaseNameCreateContext,
DatabaseNamePathContext, DatabaseNamePathContext,
FunctionNameCreateContext, FunctionNameCreateContext,
FunctionNamePathContext,
InsertStatementContext, InsertStatementContext,
QueryStatementContext, QueryStatementContext,
SingleStatementContext, SingleStatementContext,

View File

@ -1,4 +1,4 @@
import { Token } from 'antlr4ng'; import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3'; import { CandidatesCollection } from 'antlr4-c3';
import { ImpalaSqlLexer } from '../../lib/impala/ImpalaSqlLexer'; import { ImpalaSqlLexer } from '../../lib/impala/ImpalaSqlLexer';
import { ImpalaSqlParser, ProgramContext } from '../../lib/impala/ImpalaSqlParser'; import { ImpalaSqlParser, ProgramContext } from '../../lib/impala/ImpalaSqlParser';
@ -11,11 +11,11 @@ import { ImpalaEntityCollector } from './impalaEntityCollector';
export { ImpalaEntityCollector, ImpalaSqlSplitListener }; export { ImpalaEntityCollector, ImpalaSqlSplitListener };
export class ImpalaSQL extends BasicSQL<ImpalaSqlLexer, ProgramContext, ImpalaSqlParser> { export class ImpalaSQL extends BasicSQL<ImpalaSqlLexer, ProgramContext, ImpalaSqlParser> {
protected createLexerFromCharStream(charStreams) { protected createLexerFromCharStream(charStreams: CharStream) {
return new ImpalaSqlLexer(charStreams); return new ImpalaSqlLexer(charStreams);
} }
protected createParserFromTokenStream(tokenStream) { protected createParserFromTokenStream(tokenStream: CommonTokenStream) {
return new ImpalaSqlParser(tokenStream); return new ImpalaSqlParser(tokenStream);
} }
@ -56,7 +56,7 @@ export class ImpalaSQL extends BasicSQL<ImpalaSqlLexer, ProgramContext, ImpalaSq
caretTokenIndex + tokenIndexOffset + 1 caretTokenIndex + tokenIndexOffset + 1
); );
let syntaxContextType: EntityContextType | StmtContextType; let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
switch (ruleType) { switch (ruleType) {
case ImpalaSqlParser.RULE_functionNameCreate: { case ImpalaSqlParser.RULE_functionNameCreate: {
syntaxContextType = EntityContextType.FUNCTION_CREATE; syntaxContextType = EntityContextType.FUNCTION_CREATE;
@ -112,7 +112,7 @@ export class ImpalaSQL extends BasicSQL<ImpalaSqlLexer, ProgramContext, ImpalaSq
for (let candidate of candidates.tokens) { for (let candidate of candidates.tokens) {
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]); const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]); const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
if (symbolicName && symbolicName.startsWith('KW_')) { if (displayName && symbolicName && symbolicName.startsWith('KW_')) {
const keyword = const keyword =
displayName.startsWith("'") && displayName.endsWith("'") displayName.startsWith("'") && displayName.endsWith("'")
? displayName.slice(1, -1) ? displayName.slice(1, -1)

View File

@ -1,4 +1,4 @@
import { Token } from 'antlr4ng'; import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3'; import { CandidatesCollection } from 'antlr4-c3';
import { MySqlLexer } from '../../lib/mysql/MySqlLexer'; import { MySqlLexer } from '../../lib/mysql/MySqlLexer';
import { MySqlParser, ProgramContext } from '../../lib/mysql/MySqlParser'; import { MySqlParser, ProgramContext } from '../../lib/mysql/MySqlParser';
@ -11,11 +11,11 @@ import { MySqlEntityCollector } from './mysqlEntityCollector';
export { MySqlEntityCollector, MysqlSplitListener }; export { MySqlEntityCollector, MysqlSplitListener };
export class MySQL extends BasicSQL<MySqlLexer, ProgramContext, MySqlParser> { export class MySQL extends BasicSQL<MySqlLexer, ProgramContext, MySqlParser> {
protected createLexerFromCharStream(charStreams): MySqlLexer { protected createLexerFromCharStream(charStreams: CharStream): MySqlLexer {
return new MySqlLexer(charStreams); return new MySqlLexer(charStreams);
} }
protected createParserFromTokenStream(tokenStream): MySqlParser { protected createParserFromTokenStream(tokenStream: CommonTokenStream): MySqlParser {
return new MySqlParser(tokenStream); return new MySqlParser(tokenStream);
} }
@ -57,7 +57,7 @@ export class MySQL extends BasicSQL<MySqlLexer, ProgramContext, MySqlParser> {
caretTokenIndex + tokenIndexOffset + 1 caretTokenIndex + tokenIndexOffset + 1
); );
let syntaxContextType: EntityContextType | StmtContextType; let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
switch (ruleType) { switch (ruleType) {
case MySqlParser.RULE_databaseName: { case MySqlParser.RULE_databaseName: {
syntaxContextType = EntityContextType.DATABASE; syntaxContextType = EntityContextType.DATABASE;
@ -114,7 +114,7 @@ export class MySQL extends BasicSQL<MySqlLexer, ProgramContext, MySqlParser> {
for (const candidate of candidates.tokens) { for (const candidate of candidates.tokens) {
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]); const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]); const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
if (symbolicName && symbolicName.startsWith('KW_')) { if (displayName && symbolicName && symbolicName.startsWith('KW_')) {
const keyword = const keyword =
displayName.startsWith("'") && displayName.endsWith("'") displayName.startsWith("'") && displayName.endsWith("'")
? displayName.slice(1, -1) ? displayName.slice(1, -1)

View File

@ -10,7 +10,6 @@ import type {
FunctionNameCreateContext, FunctionNameCreateContext,
InsertStatementContext, InsertStatementContext,
QueryCreateTableContext, QueryCreateTableContext,
SelectExpressionContext,
SelectStatementContext, SelectStatementContext,
SingleStatementContext, SingleStatementContext,
TableNameContext, TableNameContext,

View File

@ -1,4 +1,4 @@
import { Token } from 'antlr4ng'; import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3'; import { CandidatesCollection } from 'antlr4-c3';
import { PlSqlLexer } from '../lib/plsql/PlSqlLexer'; import { PlSqlLexer } from '../lib/plsql/PlSqlLexer';
import { PlSqlParser, ProgramContext } from '../lib/plsql/PlSqlParser'; import { PlSqlParser, ProgramContext } from '../lib/plsql/PlSqlParser';
@ -6,11 +6,11 @@ import { BasicSQL } from './common/basicSQL';
import { Suggestions } from './common/types'; import { Suggestions } from './common/types';
export class PLSQL extends BasicSQL<PlSqlLexer, ProgramContext, PlSqlParser> { export class PLSQL extends BasicSQL<PlSqlLexer, ProgramContext, PlSqlParser> {
protected createLexerFromCharStream(charStreams) { protected createLexerFromCharStream(charStreams: CharStream) {
return new PlSqlLexer(charStreams); return new PlSqlLexer(charStreams);
} }
protected createParserFromTokenStream(tokenStream) { protected createParserFromTokenStream(tokenStream: CommonTokenStream) {
return new PlSqlParser(tokenStream); return new PlSqlParser(tokenStream);
} }
@ -21,7 +21,7 @@ export class PLSQL extends BasicSQL<PlSqlLexer, ProgramContext, PlSqlParser> {
} }
protected createEntityCollector(input: string, caretTokenIndex?: number) { protected createEntityCollector(input: string, caretTokenIndex?: number) {
return null; return null as any;
} }
protected processCandidates( protected processCandidates(

View File

@ -1,5 +1,5 @@
import { CandidatesCollection } from 'antlr4-c3'; import { CandidatesCollection } from 'antlr4-c3';
import { Token } from 'antlr4ng'; import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
import { PostgreSqlLexer } from '../../lib/postgresql/PostgreSqlLexer'; import { PostgreSqlLexer } from '../../lib/postgresql/PostgreSqlLexer';
import { PostgreSqlParser, ProgramContext } from '../../lib/postgresql/PostgreSqlParser'; import { PostgreSqlParser, ProgramContext } from '../../lib/postgresql/PostgreSqlParser';
@ -12,11 +12,11 @@ import { PostgreSqlSplitListener } from './postgreSplitListener';
export { PostgreSqlEntityCollector, PostgreSqlSplitListener }; export { PostgreSqlEntityCollector, PostgreSqlSplitListener };
export class PostgreSQL extends BasicSQL<PostgreSqlLexer, ProgramContext, PostgreSqlParser> { export class PostgreSQL extends BasicSQL<PostgreSqlLexer, ProgramContext, PostgreSqlParser> {
protected createLexerFromCharStream(charStreams) { protected createLexerFromCharStream(charStreams: CharStream) {
return new PostgreSqlLexer(charStreams); return new PostgreSqlLexer(charStreams);
} }
protected createParserFromTokenStream(tokenStream) { protected createParserFromTokenStream(tokenStream: CommonTokenStream) {
return new PostgreSqlParser(tokenStream); return new PostgreSqlParser(tokenStream);
} }
@ -61,7 +61,7 @@ export class PostgreSQL extends BasicSQL<PostgreSqlLexer, ProgramContext, Postgr
caretTokenIndex + tokenIndexOffset + 1 caretTokenIndex + tokenIndexOffset + 1
); );
let syntaxContextType: EntityContextType | StmtContextType; let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
switch (ruleType) { switch (ruleType) {
case PostgreSqlParser.RULE_table_name_create: { case PostgreSqlParser.RULE_table_name_create: {
syntaxContextType = EntityContextType.TABLE_CREATE; syntaxContextType = EntityContextType.TABLE_CREATE;
@ -134,7 +134,7 @@ export class PostgreSQL extends BasicSQL<PostgreSqlLexer, ProgramContext, Postgr
for (let candidate of candidates.tokens) { for (let candidate of candidates.tokens) {
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]); const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]); const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
if (symbolicName && symbolicName.startsWith('KW_')) { if (displayName && symbolicName && symbolicName.startsWith('KW_')) {
const keyword = const keyword =
displayName.startsWith("'") && displayName.endsWith("'") displayName.startsWith("'") && displayName.endsWith("'")
? displayName.slice(1, -1) ? displayName.slice(1, -1)

View File

@ -1,4 +1,4 @@
import { Token } from 'antlr4ng'; import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3'; import { CandidatesCollection } from 'antlr4-c3';
import { SparkSqlLexer } from '../../lib/spark/SparkSqlLexer'; import { SparkSqlLexer } from '../../lib/spark/SparkSqlLexer';
import { SparkSqlParser, ProgramContext } from '../../lib/spark/SparkSqlParser'; import { SparkSqlParser, ProgramContext } from '../../lib/spark/SparkSqlParser';
@ -11,11 +11,11 @@ import { SparkEntityCollector } from './sparkEntityCollector';
export { SparkSqlSplitListener, SparkEntityCollector }; export { SparkSqlSplitListener, SparkEntityCollector };
export class SparkSQL extends BasicSQL<SparkSqlLexer, ProgramContext, SparkSqlParser> { export class SparkSQL extends BasicSQL<SparkSqlLexer, ProgramContext, SparkSqlParser> {
protected createLexerFromCharStream(charStreams) { protected createLexerFromCharStream(charStreams: CharStream) {
return new SparkSqlLexer(charStreams); return new SparkSqlLexer(charStreams);
} }
protected createParserFromTokenStream(tokenStream) { protected createParserFromTokenStream(tokenStream: CommonTokenStream) {
return new SparkSqlParser(tokenStream); return new SparkSqlParser(tokenStream);
} }
@ -57,7 +57,7 @@ export class SparkSQL extends BasicSQL<SparkSqlLexer, ProgramContext, SparkSqlPa
caretTokenIndex + tokenIndexOffset + 1 caretTokenIndex + tokenIndexOffset + 1
); );
let syntaxContextType: EntityContextType | StmtContextType; let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
switch (ruleType) { switch (ruleType) {
case SparkSqlParser.RULE_namespaceName: { case SparkSqlParser.RULE_namespaceName: {
syntaxContextType = EntityContextType.DATABASE; syntaxContextType = EntityContextType.DATABASE;
@ -114,7 +114,7 @@ export class SparkSQL extends BasicSQL<SparkSqlLexer, ProgramContext, SparkSqlPa
for (const candidate of candidates.tokens) { for (const candidate of candidates.tokens) {
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]); const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]); const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
if (symbolicName && symbolicName.startsWith('KW_')) { if (displayName && symbolicName && symbolicName.startsWith('KW_')) {
const keyword = const keyword =
displayName.startsWith("'") && displayName.endsWith("'") displayName.startsWith("'") && displayName.endsWith("'")
? displayName.slice(1, -1) ? displayName.slice(1, -1)

View File

@ -1,4 +1,4 @@
import { Token } from 'antlr4ng'; import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3'; import { CandidatesCollection } from 'antlr4-c3';
import { TrinoSqlLexer } from '../../lib/trino/TrinoSqlLexer'; import { TrinoSqlLexer } from '../../lib/trino/TrinoSqlLexer';
import { TrinoSqlParser, ProgramContext } from '../../lib/trino/TrinoSqlParser'; import { TrinoSqlParser, ProgramContext } from '../../lib/trino/TrinoSqlParser';
@ -11,11 +11,11 @@ import { TrinoEntityCollector } from './trinoEntityCollector';
export { TrinoSqlSplitListener, TrinoEntityCollector }; export { TrinoSqlSplitListener, TrinoEntityCollector };
export class TrinoSQL extends BasicSQL<TrinoSqlLexer, ProgramContext, TrinoSqlParser> { export class TrinoSQL extends BasicSQL<TrinoSqlLexer, ProgramContext, TrinoSqlParser> {
protected createLexerFromCharStream(charStreams) { protected createLexerFromCharStream(charStreams: CharStream) {
return new TrinoSqlLexer(charStreams); return new TrinoSqlLexer(charStreams);
} }
protected createParserFromTokenStream(tokenStream) { protected createParserFromTokenStream(tokenStream: CommonTokenStream) {
return new TrinoSqlParser(tokenStream); return new TrinoSqlParser(tokenStream);
} }
@ -58,7 +58,7 @@ export class TrinoSQL extends BasicSQL<TrinoSqlLexer, ProgramContext, TrinoSqlPa
caretTokenIndex + tokenIndexOffset + 1 caretTokenIndex + tokenIndexOffset + 1
); );
let syntaxContextType: EntityContextType | StmtContextType; let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
switch (ruleType) { switch (ruleType) {
case TrinoSqlParser.RULE_catalogName: { case TrinoSqlParser.RULE_catalogName: {
syntaxContextType = EntityContextType.CATALOG; syntaxContextType = EntityContextType.CATALOG;
@ -115,7 +115,7 @@ export class TrinoSQL extends BasicSQL<TrinoSqlLexer, ProgramContext, TrinoSqlPa
for (let candidate of candidates.tokens) { for (let candidate of candidates.tokens) {
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]); const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]); const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
if (symbolicName && symbolicName.startsWith('KW_')) { if (displayName && symbolicName && symbolicName.startsWith('KW_')) {
const keyword = const keyword =
displayName.startsWith("'") && displayName.endsWith("'") displayName.startsWith("'") && displayName.endsWith("'")
? displayName.slice(1, -1) ? displayName.slice(1, -1)

View File

@ -41,7 +41,7 @@ export enum Legacy_TokenType {
export interface Legacy_Token { export interface Legacy_Token {
type: Legacy_TokenType; type: Legacy_TokenType;
value: string; value: string;
start: number; start?: number;
end: number; end: number;
lineNumber: number; lineNumber: number;
message?: string; message?: string;

View File

@ -15,8 +15,8 @@ describe('BasicSQL unit tests', () => {
test('Create lexer with errorListener', () => { test('Create lexer with errorListener', () => {
const sql = '袋鼠云数栈UED团队'; const sql = '袋鼠云数栈UED团队';
const errors: any[] = []; const errors = [];
const errorListener: ErrorListener<any> = (err) => { const errorListener: ErrorListener = (err) => {
errors.push(err); errors.push(err);
}; };
const lexer = flink.createLexer(sql, errorListener); const lexer = flink.createLexer(sql, errorListener);
@ -35,8 +35,8 @@ describe('BasicSQL unit tests', () => {
test('Create parser with errorListener (lexer error)', () => { test('Create parser with errorListener (lexer error)', () => {
const sql = '袋鼠云数栈UED团队'; const sql = '袋鼠云数栈UED团队';
const errors: any[] = []; const errors = [];
const errorListener: ErrorListener<any> = (err) => { const errorListener: ErrorListener = (err) => {
errors.push(err); errors.push(err);
}; };
const parser = flink.createParser(sql, errorListener); const parser = flink.createParser(sql, errorListener);
@ -46,8 +46,8 @@ describe('BasicSQL unit tests', () => {
test('Create parser with errorListener (parse error)', () => { test('Create parser with errorListener (parse error)', () => {
const sql = 'SHOW TA'; const sql = 'SHOW TA';
const errors: any[] = []; const errors = [];
const errorListener: ErrorListener<any> = (err) => { const errorListener: ErrorListener = (err) => {
errors.push(err); errors.push(err);
}; };
const parser = flink.createParser(sql, errorListener); const parser = flink.createParser(sql, errorListener);
@ -57,8 +57,8 @@ describe('BasicSQL unit tests', () => {
test('Parse right input', () => { test('Parse right input', () => {
const sql = 'SELECT * FROM tb1'; const sql = 'SELECT * FROM tb1';
const errors: any[] = []; const errors = [];
const errorListener: ErrorListener<any> = (err) => { const errorListener: ErrorListener = (err) => {
errors.push(err); errors.push(err);
}; };
const parseTree = flink.parse(sql, errorListener); const parseTree = flink.parse(sql, errorListener);
@ -70,8 +70,8 @@ describe('BasicSQL unit tests', () => {
test('Parse wrong input', () => { test('Parse wrong input', () => {
const sql = '袋鼠云数栈UED团队'; const sql = '袋鼠云数栈UED团队';
const errors: any[] = []; const errors = [];
const errorListener: ErrorListener<any> = (err) => { const errorListener: ErrorListener = (err) => {
errors.push(err); errors.push(err);
}; };
const parseTree = flink.parse(sql, errorListener); const parseTree = flink.parse(sql, errorListener);

View File

@ -19,7 +19,7 @@ describe('FlinkSQL benchmark tests', () => {
const reportData: string[] = []; const reportData: string[] = [];
test('createTable Over 100 Rows', async () => { test('createTable Over 100 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 100 Rows', () => { const [totalTimes, averageTimes] = benchmark('CreateTable Over 100 Rows', () => {
const testSQL = features.createTable[0]; const testSQL = features.createTable[0];
const res = flink.validate(testSQL); const res = flink.validate(testSQL);
expect(res).toEqual([]); expect(res).toEqual([]);
@ -28,7 +28,7 @@ describe('FlinkSQL benchmark tests', () => {
}); });
test('createTable Over 1000 Rows', async () => { test('createTable Over 1000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 1000 Rows', () => { const [totalTimes, averageTimes] = benchmark('CreateTable Over 1000 Rows', () => {
const testSQL = features.createTable[1]; const testSQL = features.createTable[1];
const res = flink.validate(testSQL); const res = flink.validate(testSQL);
expect(res).toEqual([]); expect(res).toEqual([]);
@ -37,7 +37,7 @@ describe('FlinkSQL benchmark tests', () => {
}); });
test('createTable Over 5000 Rows', async () => { test('createTable Over 5000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 5000 Rows', () => { const [totalTimes, averageTimes] = benchmark('CreateTable Over 5000 Rows', () => {
const testSQL = features.createTable[2]; const testSQL = features.createTable[2];
const res = flink.validate(testSQL); const res = flink.validate(testSQL);
expect(res).toEqual([]); expect(res).toEqual([]);
@ -46,7 +46,7 @@ describe('FlinkSQL benchmark tests', () => {
}); });
test('selectTable Over 100 Rows', async () => { test('selectTable Over 100 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 100 Rows', () => { const [totalTimes, averageTimes] = benchmark('SelectTable Over 100 Rows', () => {
const testSQL = features.selectTable[0]; const testSQL = features.selectTable[0];
const res = flink.validate(testSQL); const res = flink.validate(testSQL);
expect(res).toEqual([]); expect(res).toEqual([]);
@ -55,7 +55,7 @@ describe('FlinkSQL benchmark tests', () => {
}); });
test('selectTable Over 1000 Rows', async () => { test('selectTable Over 1000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 1000 Rows', () => { const [totalTimes, averageTimes] = benchmark('SelectTable Over 1000 Rows', () => {
const testSQL = features.selectTable[1]; const testSQL = features.selectTable[1];
const res = flink.validate(testSQL); const res = flink.validate(testSQL);
expect(res).toEqual([]); expect(res).toEqual([]);
@ -64,7 +64,7 @@ describe('FlinkSQL benchmark tests', () => {
}); });
test('selectTable Over 5000 Rows', async () => { test('selectTable Over 5000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 5000 Rows', () => { const [totalTimes, averageTimes] = benchmark('SelectTable Over 5000 Rows', () => {
const testSQL = features.selectTable[2]; const testSQL = features.selectTable[2];
const res = flink.validate(testSQL); const res = flink.validate(testSQL);
expect(res).toEqual([]); expect(res).toEqual([]);
@ -73,7 +73,7 @@ describe('FlinkSQL benchmark tests', () => {
}); });
test('insertTable Over 100 Rows', async () => { test('insertTable Over 100 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 100 Rows', () => { const [totalTimes, averageTimes] = benchmark('InsertTable Over 100 Rows', () => {
const testSQL = features.insertTable[0]; const testSQL = features.insertTable[0];
const res = flink.validate(testSQL); const res = flink.validate(testSQL);
expect(res).toEqual([]); expect(res).toEqual([]);
@ -82,7 +82,7 @@ describe('FlinkSQL benchmark tests', () => {
}); });
test('insertTable Over 1000 Rows', async () => { test('insertTable Over 1000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 1000 Rows', () => { const [totalTimes, averageTimes] = benchmark('InsertTable Over 1000 Rows', () => {
const testSQL = features.insertTable[1]; const testSQL = features.insertTable[1];
const res = flink.validate(testSQL); const res = flink.validate(testSQL);
expect(res).toEqual([]); expect(res).toEqual([]);
@ -91,7 +91,7 @@ describe('FlinkSQL benchmark tests', () => {
}); });
test('insertTable Over 5000 Rows', async () => { test('insertTable Over 5000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 5000 Rows', () => { const [totalTimes, averageTimes] = benchmark('InsertTable Over 5000 Rows', () => {
const testSQL = features.insertTable[2]; const testSQL = features.insertTable[2];
const res = flink.validate(testSQL); const res = flink.validate(testSQL);
expect(res).toEqual([]); expect(res).toEqual([]);

View File

@ -1,4 +1,3 @@
import { ErrorNode, ParserRuleContext, TerminalNode } from 'antlr4ng';
import { FlinkSQL } from 'src/parser/flink'; import { FlinkSQL } from 'src/parser/flink';
import { FlinkSqlParserListener } from 'src/lib/flink/FlinkSqlParserListener'; import { FlinkSqlParserListener } from 'src/lib/flink/FlinkSqlParserListener';
import { TableExpressionContext } from 'src/lib/flink/FlinkSqlParser'; import { TableExpressionContext } from 'src/lib/flink/FlinkSqlParser';
@ -11,20 +10,17 @@ describe('Flink SQL Listener Tests', () => {
const parseTree = flink.parse(sql); const parseTree = flink.parse(sql);
test('Listener enterTableName', async () => { test('Listener enterTableName', async () => {
let result = ''; class MyListener extends FlinkSqlParserListener {
class MyListener implements FlinkSqlParserListener { result = '';
enterTableExpression = (ctx: TableExpressionContext): void => {
result = ctx.getText().toLowerCase();
};
visitTerminal(node: TerminalNode): void {}
visitErrorNode(node: ErrorNode): void {}
enterEveryRule(node: ParserRuleContext): void {}
exitEveryRule(node: ParserRuleContext): void {}
}
const listenTableName = new MyListener();
await flink.listen(listenTableName, parseTree); enterTableExpression = (ctx: TableExpressionContext): void => {
expect(result).toBe(expectTableName); this.result = ctx.getText().toLowerCase();
};
}
const listener = new MyListener();
flink.listen(listener, parseTree);
expect(listener.result).toBe(expectTableName);
}); });
test('Split sql listener', async () => { test('Split sql listener', async () => {

View File

@ -23,8 +23,6 @@ describe('Flink SQL Syntax Suggestion with collect entity', () => {
}; };
const sql = commentOtherLine(syntaxSql, pos.lineNumber); const sql = commentOtherLine(syntaxSql, pos.lineNumber);
const parseTree = flink.parse(sql);
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax; const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
const suggestion = syntaxes?.find( const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.COLUMN (syn) => syn.syntaxContextType === EntityContextType.COLUMN

View File

@ -1,7 +1,6 @@
import { FlinkSQL } from 'src/parser/flink'; import { FlinkSQL } from 'src/parser/flink';
import { FlinkSqlParserVisitor } from 'src/lib/flink/FlinkSqlParserVisitor'; import { FlinkSqlParserVisitor } from 'src/lib/flink/FlinkSqlParserVisitor';
import { AbstractParseTreeVisitor } from 'antlr4ng'; import { ProgramContext, TableExpressionContext } from 'src/lib/flink/FlinkSqlParser';
import { TableExpressionContext } from 'src/lib/flink/FlinkSqlParser';
describe('Flink SQL Visitor Tests', () => { describe('Flink SQL Visitor Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -13,20 +12,22 @@ describe('Flink SQL Visitor Tests', () => {
}); });
test('Visitor visitTableName', () => { test('Visitor visitTableName', () => {
let result = ''; class MyVisitor extends FlinkSqlParserVisitor<string> {
class MyVisitor defaultResult(): string {
extends AbstractParseTreeVisitor<any> return '';
implements FlinkSqlParserVisitor<any>
{
protected defaultResult() {
return result;
} }
visitTableExpression(ctx: TableExpressionContext) { aggregateResult(aggregate: string, nextResult: string): string {
result = ctx.getText().toLowerCase(); return aggregate + nextResult;
} }
visitProgram = (ctx: ProgramContext) => {
return this.visitChildren(ctx);
};
visitTableExpression = (ctx: TableExpressionContext) => {
return ctx.getText().toLowerCase();
};
} }
const visitor: any = new MyVisitor(); const visitor = new MyVisitor();
visitor.visit(parseTree); const result = visitor.visit(parseTree);
expect(result).toBe(expectTableName); expect(result).toBe(expectTableName);
}); });

View File

@ -1,6 +1,5 @@
import { ParseTreeListener } from 'antlr4ng';
import { HiveSQL } from 'src/parser/hive'; import { HiveSQL } from 'src/parser/hive';
import { ProgramContext, SelectItemContext } from 'src/lib/hive/HiveSqlParser'; import { SelectItemContext } from 'src/lib/hive/HiveSqlParser';
import { HiveSqlParserListener } from 'src/lib/hive/HiveSqlParserListener'; import { HiveSqlParserListener } from 'src/lib/hive/HiveSqlParserListener';
describe('HiveSQL Listener Tests', () => { describe('HiveSQL Listener Tests', () => {
@ -10,39 +9,33 @@ describe('HiveSQL Listener Tests', () => {
const sql = `select ${expectTableName} from tablename where inc_day='20190601' limit 1000;`; const sql = `select ${expectTableName} from tablename where inc_day='20190601' limit 1000;`;
const parseTree = hive.parse(sql); const parseTree = hive.parse(sql);
let result = ''; class MyListener extends HiveSqlParserListener {
class MyListener implements HiveSqlParserListener { result = '';
enterSelectItem(ctx: SelectItemContext) {
result = ctx.getText();
}
visitTerminal() {}
visitErrorNode() {}
enterEveryRule() {}
exitEveryRule() {}
}
const listenTableName = new MyListener();
await hive.listen(listenTableName as ParseTreeListener, parseTree as ProgramContext); enterSelectItem = (ctx: SelectItemContext) => {
expect(result).toBe(expectTableName); this.result = ctx.getText();
};
}
const listener = new MyListener();
hive.listen(listener, parseTree);
expect(listener.result).toBe(expectTableName);
}); });
test('Listener enterCreateTable', async () => { test('Listener enterCreateTable', async () => {
const sql = `drop table table_name;`; const sql = `drop table table_name;`;
const parseTree = hive.parse(sql); const parseTree = hive.parse(sql);
let result = ''; class MyListener extends HiveSqlParserListener {
class MyListener implements HiveSqlParserListener { result = '';
enterDropTableStatement(ctx) {
result = ctx.getText();
}
visitTerminal() {} enterDropTableStatement = (ctx) => {
visitErrorNode() {} this.result = ctx.getText();
enterEveryRule() {} };
exitEveryRule() {}
} }
const listenTableName = new MyListener(); const listener = new MyListener();
await hive.listen(listenTableName as ParseTreeListener, parseTree as ProgramContext); hive.listen(listener, parseTree);
expect(result).toBe('droptabletable_name'); expect(listener.result).toBe('droptabletable_name');
}); });
test('Split sql listener', async () => { test('Split sql listener', async () => {

View File

@ -1,5 +1,3 @@
import { AbstractParseTreeVisitor } from 'antlr4ng';
import { HiveSQL } from 'src/parser/hive'; import { HiveSQL } from 'src/parser/hive';
import { HiveSqlParserVisitor } from 'src/lib/hive/HiveSqlParserVisitor'; import { HiveSqlParserVisitor } from 'src/lib/hive/HiveSqlParserVisitor';
import { ProgramContext, TableNameContext } from 'src/lib/hive/HiveSqlParser'; import { ProgramContext, TableNameContext } from 'src/lib/hive/HiveSqlParser';
@ -14,19 +12,23 @@ describe('HiveSQL Visitor Tests', () => {
}); });
test('Visitor visitTableName', () => { test('Visitor visitTableName', () => {
let result = ''; class MyVisitor extends HiveSqlParserVisitor<string> {
class MyVisitor extends AbstractParseTreeVisitor<any> implements HiveSqlParserVisitor<any> { defaultResult(): string {
defaultResult() { return '';
return result;
} }
aggregateResult(aggregate: string, nextResult: string): string {
visitTableName(ctx: TableNameContext) { return aggregate + nextResult;
result = ctx.getText().toLowerCase();
} }
visitProgram = (ctx: ProgramContext) => {
return this.visitChildren(ctx);
};
visitTableName = (ctx: TableNameContext) => {
return ctx.getText().toLowerCase();
};
} }
const visitor = new MyVisitor(); const visitor = new MyVisitor();
visitor.visit(parseTree as ProgramContext); const result = visitor.visit(parseTree);
expect(result).toBe(expectTableName); expect(result).toBe(expectTableName);
}); });

View File

@ -1,6 +1,5 @@
import { ImpalaSQL } from 'src/parser/impala'; import { ImpalaSQL } from 'src/parser/impala';
import { ImpalaSqlParserListener } from 'src/lib/impala/ImpalaSqlParserListener'; import { ImpalaSqlParserListener } from 'src/lib/impala/ImpalaSqlParserListener';
import { ParseTreeListener } from 'antlr4ng';
describe('impala SQL Listener Tests', () => { describe('impala SQL Listener Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -11,19 +10,14 @@ describe('impala SQL Listener Tests', () => {
test('Listener enterTableNamePath', async () => { test('Listener enterTableNamePath', async () => {
let result = ''; let result = '';
class MyListener implements ImpalaSqlParserListener { class MyListener extends ImpalaSqlParserListener {
enterTableNamePath = (ctx): void => { enterTableNamePath = (ctx): void => {
result = ctx.getText().toLowerCase(); result = ctx.getText().toLowerCase();
}; };
visitTerminal() {}
visitErrorNode() {}
enterEveryRule() {}
exitEveryRule() {}
} }
const listenTableName = new MyListener(); const listener = new MyListener();
await impala.listen(listenTableName as ParseTreeListener, parseTree); impala.listen(listener, parseTree);
expect(result).toBe(expectTableName); expect(result).toBe(expectTableName);
}); });

View File

@ -1,5 +1,4 @@
import { ImpalaSQL } from 'src/parser/impala'; import { ImpalaSQL } from 'src/parser/impala';
import { AbstractParseTreeVisitor } from 'antlr4ng';
import { ImpalaSqlParserVisitor } from 'src/lib/impala/ImpalaSqlParserVisitor'; import { ImpalaSqlParserVisitor } from 'src/lib/impala/ImpalaSqlParserVisitor';
describe('impala SQL Visitor Tests', () => { describe('impala SQL Visitor Tests', () => {
@ -12,20 +11,22 @@ describe('impala SQL Visitor Tests', () => {
}); });
test('Visitor visitTableNamePath', () => { test('Visitor visitTableNamePath', () => {
let result = ''; class MyVisitor extends ImpalaSqlParserVisitor<string> {
class MyVisitor defaultResult(): string {
extends AbstractParseTreeVisitor<any> return '';
implements ImpalaSqlParserVisitor<any>
{
protected defaultResult() {
return result;
} }
visitTableNamePath = (ctx): void => { aggregateResult(aggregate: string, nextResult: string): string {
result = ctx.getText().toLowerCase(); return aggregate + nextResult;
}
visitProgram = (ctx) => {
return this.visitChildren(ctx);
};
visitTableNamePath = (ctx) => {
return ctx.getText().toLowerCase();
}; };
} }
const visitor: any = new MyVisitor(); const visitor = new MyVisitor();
visitor.visit(parseTree); const result = visitor.visit(parseTree);
expect(result).toBe(expectTableName); expect(result).toBe(expectTableName);
}); });

View File

@ -1,6 +1,5 @@
import { MySQL } from 'src/parser/mysql'; import { MySQL } from 'src/parser/mysql';
import { MySqlParserListener } from 'src/lib/mysql/MySqlParserListener'; import { MySqlParserListener } from 'src/lib/mysql/MySqlParserListener';
import { ParseTreeListener } from 'antlr4ng';
describe('MySQL Listener Tests', () => { describe('MySQL Listener Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -10,20 +9,17 @@ describe('MySQL Listener Tests', () => {
const parseTree = mysql.parse(sql); const parseTree = mysql.parse(sql);
test('Listener enterTableName', async () => { test('Listener enterTableName', async () => {
let result = ''; class MyListener extends MySqlParserListener {
class MyListener implements MySqlParserListener { result = '';
enterTableName = (ctx): void => {
result = ctx.getText().toLowerCase();
};
visitTerminal() {}
visitErrorNode() {}
enterEveryRule() {}
exitEveryRule() {}
}
const listenTableName: any = new MyListener();
await mysql.listen(listenTableName as ParseTreeListener, parseTree); enterTableName = (ctx): void => {
expect(result).toBe(expectTableName); this.result = ctx.getText().toLowerCase();
};
}
const listener = new MyListener();
mysql.listen(listener, parseTree);
expect(listener.result).toBe(expectTableName);
}); });
test('Split sql listener', async () => { test('Split sql listener', async () => {

View File

@ -1,6 +1,5 @@
import { MySQL } from 'src/parser/mysql'; import { MySQL } from 'src/parser/mysql';
import { MySqlParserVisitor } from 'src/lib/mysql/MySqlParserVisitor'; import { MySqlParserVisitor } from 'src/lib/mysql/MySqlParserVisitor';
import { AbstractParseTreeVisitor } from 'antlr4ng';
describe('MySQL Visitor Tests', () => { describe('MySQL Visitor Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -12,18 +11,22 @@ describe('MySQL Visitor Tests', () => {
}); });
test('Visitor visitTableName', () => { test('Visitor visitTableName', () => {
let result = ''; class MyVisitor extends MySqlParserVisitor<string> {
class MyVisitor extends AbstractParseTreeVisitor<any> implements MySqlParserVisitor<any> { defaultResult(): string {
protected defaultResult() { return '';
return result;
} }
aggregateResult(aggregate: string, nextResult: string): string {
visitTableName = (ctx): void => { return aggregate + nextResult;
result = ctx.getText().toLowerCase(); }
visitProgram = (ctx) => {
return this.visitChildren(ctx);
};
visitTableName = (ctx) => {
return ctx.getText().toLowerCase();
}; };
} }
const visitor = new MyVisitor(); const visitor = new MyVisitor();
visitor.visit(parseTree); const result = visitor.visit(parseTree);
expect(result).toBe(expectTableName); expect(result).toBe(expectTableName);
}); });

View File

@ -1,5 +1,4 @@
import { PLSQL } from 'src/parser/plsql'; import { PLSQL } from 'src/parser/plsql';
import { ParseTreeListener } from 'antlr4ng';
import { PlSqlParserListener } from 'src/lib/plsql/PlSqlParserListener'; import { PlSqlParserListener } from 'src/lib/plsql/PlSqlParserListener';
describe('PLSQL Listener Tests', () => { describe('PLSQL Listener Tests', () => {
@ -10,19 +9,16 @@ describe('PLSQL Listener Tests', () => {
const parseTree = plsql.parse(sql); const parseTree = plsql.parse(sql);
test('Listener enterTableName', async () => { test('Listener enterTableName', async () => {
let result = ''; class MyListener extends PlSqlParserListener {
class MyListener implements PlSqlParserListener { result = '';
enterTable_ref_list = (ctx): void => {
result = ctx.getText().toLowerCase();
};
visitTerminal() {}
visitErrorNode() {}
enterEveryRule() {}
exitEveryRule() {}
}
const listenTableName = new MyListener();
await plsql.listen(listenTableName as ParseTreeListener, parseTree); enterTable_ref_list = (ctx) => {
expect(result).toBe(expectTableName); this.result = ctx.getText().toLowerCase();
};
}
const listener = new MyListener();
plsql.listen(listener, parseTree);
expect(listener.result).toBe(expectTableName);
}); });
}); });

View File

@ -1,5 +1,4 @@
import { PLSQL } from 'src/parser/plsql'; import { PLSQL } from 'src/parser/plsql';
import { AbstractParseTreeVisitor } from 'antlr4ng';
import { PlSqlParserVisitor } from 'src/lib/plsql/PlSqlParserVisitor'; import { PlSqlParserVisitor } from 'src/lib/plsql/PlSqlParserVisitor';
describe('PLSQL Visitor Tests', () => { describe('PLSQL Visitor Tests', () => {
@ -10,17 +9,22 @@ describe('PLSQL Visitor Tests', () => {
const parseTree = plsql.parse(sql); const parseTree = plsql.parse(sql);
test('Visitor visitTable_ref_list', () => { test('Visitor visitTable_ref_list', () => {
let result = ''; class MyVisitor extends PlSqlParserVisitor<string> {
class MyVisitor extends AbstractParseTreeVisitor<any> implements PlSqlParserVisitor<any> { defaultResult(): string {
protected defaultResult() { return '';
return result;
} }
visitTable_ref_list = (ctx): void => { aggregateResult(aggregate: string, nextResult: string): string {
result = ctx.getText().toLowerCase(); return aggregate + nextResult;
}
visitProgram = (ctx) => {
return this.visitChildren(ctx);
};
visitTable_ref_list = (ctx) => {
return ctx.getText().toLowerCase();
}; };
} }
const visitor: any = new MyVisitor(); const visitor = new MyVisitor();
visitor.visit(parseTree); const result = visitor.visit(parseTree);
expect(result).toBe(expectTableName); expect(result).toBe(expectTableName);
}); });

View File

@ -1,6 +1,5 @@
import { PostgreSQL } from 'src/parser/postgresql'; import { PostgreSQL } from 'src/parser/postgresql';
import { PostgreSqlParserListener } from 'src/lib/postgresql/PostgreSqlParserListener'; import { PostgreSqlParserListener } from 'src/lib/postgresql/PostgreSqlParserListener';
import { ParseTreeListener } from 'antlr4ng';
describe('PostgreSQL Listener Tests', () => { describe('PostgreSQL Listener Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -10,20 +9,16 @@ describe('PostgreSQL Listener Tests', () => {
const parseTree = postgresql.parse(sql); const parseTree = postgresql.parse(sql);
test('Listener enterTableName', async () => { test('Listener enterTableName', async () => {
let result = ''; class MyListener extends PostgreSqlParserListener {
class MyListener implements PostgreSqlParserListener { result = '';
enterTable_ref(ctx) { enterTable_ref = (ctx) => {
result = ctx.getText().toLowerCase(); this.result = ctx.getText().toLowerCase();
};
} }
visitTerminal() {} const listener = new MyListener();
visitErrorNode() {}
enterEveryRule() {}
exitEveryRule() {}
}
const listenTableName = new MyListener();
await postgresql.listen(listenTableName as ParseTreeListener, parseTree); postgresql.listen(listener, parseTree);
expect(result).toBe(expectTableName); expect(listener.result).toBe(expectTableName);
}); });
test('Split sql listener', async () => { test('Split sql listener', async () => {

View File

@ -1,5 +1,4 @@
import { PostgreSQL } from 'src/parser/postgresql'; import { PostgreSQL } from 'src/parser/postgresql';
import { AbstractParseTreeVisitor } from 'antlr4ng';
import { PostgreSqlParserVisitor } from 'src/lib/postgresql/PostgreSqlParserVisitor'; import { PostgreSqlParserVisitor } from 'src/lib/postgresql/PostgreSqlParserVisitor';
describe('MySQL Visitor Tests', () => { describe('MySQL Visitor Tests', () => {
@ -12,21 +11,22 @@ describe('MySQL Visitor Tests', () => {
}); });
test('Visitor visitTableName', () => { test('Visitor visitTableName', () => {
let result = ''; class MyVisitor extends PostgreSqlParserVisitor<string> {
class MyVisitor defaultResult(): string {
extends AbstractParseTreeVisitor<any> return '';
implements PostgreSqlParserVisitor<any>
{
protected defaultResult() {
return result;
} }
aggregateResult(aggregate: string, nextResult: string): string {
visitTable_ref(ctx) { return aggregate + nextResult;
result = ctx.getText().toLowerCase();
} }
visitProgram = (ctx) => {
return this.visitChildren(ctx);
};
visitTable_ref = (ctx) => {
return ctx.getText().toLowerCase();
};
} }
const visitor: any = new MyVisitor(); const visitor = new MyVisitor();
visitor.visit(parseTree); const result = visitor.visit(parseTree);
expect(result).toBe(expectTableName); expect(result).toBe(expectTableName);
}); });

View File

@ -1,6 +1,5 @@
import { SparkSQL } from 'src/parser/spark'; import { SparkSQL } from 'src/parser/spark';
import { SparkSqlParserListener } from 'src/lib/spark/SparkSqlParserListener'; import { SparkSqlParserListener } from 'src/lib/spark/SparkSqlParserListener';
import { ParseTreeListener } from 'antlr4ng';
describe('Spark SQL Listener Tests', () => { describe('Spark SQL Listener Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -10,20 +9,16 @@ describe('Spark SQL Listener Tests', () => {
const parseTree = spark.parse(sql); const parseTree = spark.parse(sql);
test('Listener exitTableName', () => { test('Listener exitTableName', () => {
let result = ''; class MyListener extends SparkSqlParserListener {
class MyListener implements SparkSqlParserListener { result = '';
exitTableName = (ctx): void => { exitTableName = (ctx): void => {
result = ctx.getText().toLowerCase(); this.result = ctx.getText().toLowerCase();
}; };
visitTerminal() {}
visitErrorNode() {}
enterEveryRule() {}
exitEveryRule() {}
} }
const listenTableName = new MyListener(); const listener = new MyListener();
spark.listen(listenTableName as ParseTreeListener, parseTree); spark.listen(listener, parseTree);
expect(result).toBe(expectTableName); expect(listener.result).toBe(expectTableName);
}); });
test('Split sql listener', async () => { test('Split sql listener', async () => {

View File

@ -1,6 +1,5 @@
import { SparkSQL } from 'src/parser/spark'; import { SparkSQL } from 'src/parser/spark';
import { SparkSqlParserVisitor } from 'src/lib/spark/SparkSqlParserVisitor'; import { SparkSqlParserVisitor } from 'src/lib/spark/SparkSqlParserVisitor';
import { AbstractParseTreeVisitor } from 'antlr4ng';
describe('Spark SQL Visitor Tests', () => { describe('Spark SQL Visitor Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -12,21 +11,23 @@ describe('Spark SQL Visitor Tests', () => {
}); });
test('Visitor visitTableName', () => { test('Visitor visitTableName', () => {
class MyVisitor class MyVisitor extends SparkSqlParserVisitor<string> {
extends AbstractParseTreeVisitor<any> defaultResult(): string {
implements SparkSqlParserVisitor<any> return '';
{
result: string = '';
protected defaultResult() {
return this.result;
} }
visitTableName = (ctx): void => { aggregateResult(aggregate: string, nextResult: string): string {
this.result = ctx.getText().toLowerCase(); return aggregate + nextResult;
}
visitProgram = (ctx) => {
return this.visitChildren(ctx);
};
visitTableName = (ctx) => {
return ctx.getText().toLowerCase();
}; };
} }
const visitor = new MyVisitor(); const visitor = new MyVisitor();
visitor.visit(parseTree); const result = visitor.visit(parseTree);
expect(visitor.result).toBe(expectTableName); expect(result).toBe(expectTableName);
}); });
}); });

View File

@ -1,6 +1,5 @@
import { TrinoSQL } from 'src/parser/trino'; import { TrinoSQL } from 'src/parser/trino';
import { TrinoSqlListener } from 'src/lib/trino/TrinoSqlListener'; import { TrinoSqlListener } from 'src/lib/trino/TrinoSqlListener';
import { ParseTreeListener } from 'antlr4ng';
describe('trino SQL Listener Tests', () => { describe('trino SQL Listener Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -10,20 +9,16 @@ describe('trino SQL Listener Tests', () => {
const parseTree = trino.parse(sql); const parseTree = trino.parse(sql);
test('Listener enterTableName', async () => { test('Listener enterTableName', async () => {
let result = ''; class MyListener extends TrinoSqlListener {
class MyListener implements TrinoSqlListener { result = '';
enterTableName = (ctx): void => { enterTableName = (ctx): void => {
result = ctx.getText().toLowerCase(); this.result = ctx.getText().toLowerCase();
}; };
visitTerminal() {}
visitErrorNode() {}
enterEveryRule() {}
exitEveryRule() {}
} }
const listenTableName = new MyListener(); const listener = new MyListener();
await trino.listen(listenTableName as ParseTreeListener, parseTree); trino.listen(listener, parseTree);
expect(result).toBe(expectTableName); expect(listener.result).toBe(expectTableName);
}); });
test('Split sql listener', async () => { test('Split sql listener', async () => {

View File

@ -1,6 +1,5 @@
import { TrinoSQL } from 'src/parser/trino'; import { TrinoSQL } from 'src/parser/trino';
import { TrinoSqlVisitor } from 'src/lib/trino/TrinoSqlVisitor'; import { TrinoSqlVisitor } from 'src/lib/trino/TrinoSqlVisitor';
import { AbstractParseTreeVisitor } from 'antlr4ng';
describe('trino SQL Visitor Tests', () => { describe('trino SQL Visitor Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -12,17 +11,22 @@ describe('trino SQL Visitor Tests', () => {
}); });
test('Visitor visitTableName', () => { test('Visitor visitTableName', () => {
let result = ''; class MyVisitor extends TrinoSqlVisitor<string> {
class MyVisitor extends AbstractParseTreeVisitor<any> implements TrinoSqlVisitor<any> { defaultResult(): string {
protected defaultResult() { return '';
return result;
} }
visitTableName = (ctx): void => { aggregateResult(aggregate: string, nextResult: string): string {
result = ctx.getText().toLowerCase(); return aggregate + nextResult;
}
visitProgram = (ctx) => {
return this.visitChildren(ctx);
};
visitTableName = (ctx) => {
return ctx.getText().toLowerCase();
}; };
} }
const visitor: any = new MyVisitor(); const visitor = new MyVisitor();
visitor.visit(parseTree); const result = visitor.visit(parseTree);
expect(result).toBe(expectTableName); expect(result).toBe(expectTableName);
}); });

View File

@ -3,6 +3,13 @@
"compilerOptions": { "compilerOptions": {
"baseUrl": "../", "baseUrl": "../",
"noEmit": true, "noEmit": true,
"allowSyntheticDefaultImports": true,
"strictNullChecks": false,
"noUnusedLocals": true,
"noImplicitAny": false,
"noImplicitOverride": false,
"noImplicitReturns": true,
"noImplicitThis": true,
"paths": { "paths": {
"src/*": ["src/*"], "src/*": ["src/*"],
"test/*": ["test/*"] "test/*": ["test/*"]

View File

@ -7,10 +7,17 @@
"module": "ESNext", "module": "ESNext",
"moduleResolution": "node", "moduleResolution": "node",
"declaration": true, "declaration": true,
"noUnusedLocals": false, "strictNullChecks": true,
"noUnusedParameters": false, "noUnusedLocals": true,
"allowSyntheticDefaultImports": true, "noImplicitAny": true,
"esModuleInterop": true, "noImplicitOverride": true,
"noImplicitReturns": true,
"noImplicitThis": true,
"forceConsistentCasingInFileNames": true,
"lib": [
"ESNext",
"DOM"
],
"skipLibCheck": true, "skipLibCheck": true,
"types": [ "types": [
"node", "node",
@ -19,7 +26,6 @@
"typeRoots": [ "typeRoots": [
"node", "node",
"node_modules/@types", "node_modules/@types",
"./src/typings"
] ]
}, },
"isolatedModules": true, "isolatedModules": true,