build: open ts strict check (#279)
* feat: no check lib dir * feat: open strict check to src dir * test: update test tsconfig.json * feat: remove any type * feat: do not export AbstractParseTreeVisitor * feat: export StmtContextType as enum * build: improve antlr4 script
This commit is contained in:
parent
bb0fad1dbe
commit
c6615aecac
@ -67,9 +67,16 @@ function main() {
|
||||
});
|
||||
} else if (argv.lang) {
|
||||
// compile single: yarn antlr4 --lang=mysql
|
||||
const supportedLanguage = languageEntries.some((language) => language === argv.lang);
|
||||
if (supportedLanguage) {
|
||||
compile(argv.lang);
|
||||
const supportedLanguage = languageEntries.find((language) =>
|
||||
language.startsWith(argv.lang)
|
||||
);
|
||||
|
||||
if (argv.lang === 'all') {
|
||||
languageEntries.forEach((language) => {
|
||||
compile(language);
|
||||
});
|
||||
} else if (supportedLanguage) {
|
||||
compile(supportedLanguage);
|
||||
} else {
|
||||
console.error(
|
||||
chalk.bold.red('\n[Invalid language]:'),
|
||||
|
@ -19,7 +19,9 @@ function processFile(filePath) {
|
||||
if (slices.length !== 2) return;
|
||||
firstLineContent = `// Generated from dt-sql-parser/src/grammar/` + slices[1];
|
||||
|
||||
fs.writeFileSync(filePath, firstLineContent + restContent, 'utf-8');
|
||||
const tsNoCheckComment = '\n\n// @ts-nocheck';
|
||||
|
||||
fs.writeFileSync(filePath, firstLineContent + tsNoCheckComment + restContent, 'utf-8');
|
||||
} catch (error) {
|
||||
console.error(error);
|
||||
}
|
||||
|
@ -1,5 +1,3 @@
|
||||
export { AbstractParseTreeVisitor } from 'antlr4ng';
|
||||
|
||||
export {
|
||||
MySQL,
|
||||
FlinkSQL,
|
||||
@ -40,13 +38,15 @@ export {
|
||||
EntityContextType as SyntaxContextType,
|
||||
} from './parser/common/types';
|
||||
|
||||
export { StmtContextType } from './parser/common/entityCollector';
|
||||
|
||||
export type { CaretPosition, Suggestions, SyntaxSuggestion } from './parser/common/types';
|
||||
|
||||
export type { WordRange, TextSlice } from './parser/common/textAndWord';
|
||||
|
||||
export type { SyntaxError, ParseError, ErrorListener } from './parser/common/parseErrorListener';
|
||||
|
||||
export type { StmtContextType, StmtContext, EntityContext } from './parser/common/entityCollector';
|
||||
export type { StmtContext, EntityContext } from './parser/common/entityCollector';
|
||||
|
||||
/**
|
||||
* @deprecated Legacy utils will be removed when the stable version is released.
|
||||
|
@ -12,7 +12,7 @@ export abstract class SQLParserBase<T = antlr.ParserRuleContext> extends antlr.P
|
||||
|
||||
public shouldMatchEmpty () {
|
||||
return this.entityCollecting
|
||||
&& this.tokenStream.LT(-1).tokenIndex <= this.caretTokenIndex
|
||||
&& this.tokenStream.LT(1).tokenIndex >= this.caretTokenIndex
|
||||
&& (this.tokenStream.LT(-1)?.tokenIndex ?? Infinity) <= this.caretTokenIndex
|
||||
&& (this.tokenStream.LT(1)?.tokenIndex ?? -Infinity) >= this.caretTokenIndex
|
||||
}
|
||||
}
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/flink/FlinkSqlLexer.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import * as antlr from "antlr4ng";
|
||||
import { Token } from "antlr4ng";
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/flink/FlinkSqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import * as antlr from "antlr4ng";
|
||||
import { Token } from "antlr4ng";
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/flink/FlinkSqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
||||
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/flink/FlinkSqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import { AbstractParseTreeVisitor } from "antlr4ng";
|
||||
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/hive/HiveSqlLexer.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import * as antlr from "antlr4ng";
|
||||
import { Token } from "antlr4ng";
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import * as antlr from "antlr4ng";
|
||||
import { Token } from "antlr4ng";
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
||||
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import { AbstractParseTreeVisitor } from "antlr4ng";
|
||||
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/impala/ImpalaSqlLexer.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import * as antlr from "antlr4ng";
|
||||
import { Token } from "antlr4ng";
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/impala/ImpalaSqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import * as antlr from "antlr4ng";
|
||||
import { Token } from "antlr4ng";
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/impala/ImpalaSqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
||||
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/impala/ImpalaSqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import { AbstractParseTreeVisitor } from "antlr4ng";
|
||||
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/mysql/MySqlLexer.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import * as antlr from "antlr4ng";
|
||||
import { Token } from "antlr4ng";
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/mysql/MySqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import * as antlr from "antlr4ng";
|
||||
import { Token } from "antlr4ng";
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/mysql/MySqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
||||
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/mysql/MySqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import { AbstractParseTreeVisitor } from "antlr4ng";
|
||||
|
||||
|
||||
|
@ -1,9 +1,6 @@
|
||||
import { Lexer } from "antlr4ng";
|
||||
|
||||
export abstract class PlSqlBaseLexer extends Lexer {
|
||||
|
||||
_interp: any;
|
||||
|
||||
IsNewlineAtPos(pos: number): boolean {
|
||||
const la = this._input.LA(pos);
|
||||
return la == -1 || String.fromCharCode(la) == '\n';
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/plsql/PlSqlLexer.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import * as antlr from "antlr4ng";
|
||||
import { Token } from "antlr4ng";
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/plsql/PlSqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import * as antlr from "antlr4ng";
|
||||
import { Token } from "antlr4ng";
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/plsql/PlSqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
||||
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/plsql/PlSqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import { AbstractParseTreeVisitor } from "antlr4ng";
|
||||
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/postgresql/PostgreSqlLexer.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import * as antlr from "antlr4ng";
|
||||
import { Token } from "antlr4ng";
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/postgresql/PostgreSqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import * as antlr from "antlr4ng";
|
||||
import { Token } from "antlr4ng";
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/postgresql/PostgreSqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
||||
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/postgresql/PostgreSqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import { AbstractParseTreeVisitor } from "antlr4ng";
|
||||
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/spark/SparkSqlLexer.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import * as antlr from "antlr4ng";
|
||||
import { Token } from "antlr4ng";
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/spark/SparkSqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import * as antlr from "antlr4ng";
|
||||
import { Token } from "antlr4ng";
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/spark/SparkSqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
||||
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/spark/SparkSqlParser.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import { AbstractParseTreeVisitor } from "antlr4ng";
|
||||
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/trino/TrinoSql.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import * as antlr from "antlr4ng";
|
||||
import { Token } from "antlr4ng";
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/trino/TrinoSql.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
||||
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/trino/TrinoSql.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import * as antlr from "antlr4ng";
|
||||
import { Token } from "antlr4ng";
|
||||
|
||||
|
@ -1,5 +1,7 @@
|
||||
// Generated from dt-sql-parser/src/grammar/trino/TrinoSql.g4 by ANTLR 4.13.1
|
||||
|
||||
// @ts-nocheck
|
||||
|
||||
import { AbstractParseTreeVisitor } from "antlr4ng";
|
||||
|
||||
|
||||
|
@ -33,12 +33,12 @@ export abstract class BasicSQL<
|
||||
protected _lexer: L;
|
||||
protected _tokenStream: CommonTokenStream;
|
||||
protected _parser: P;
|
||||
protected _parseTree: PRC;
|
||||
protected _parsedInput: string = null;
|
||||
protected _parseTree: PRC | null;
|
||||
protected _parsedInput: string;
|
||||
protected _parseErrors: ParseError[] = [];
|
||||
/** members for cache end */
|
||||
|
||||
private _errorListener: ErrorListener<any> = (error) => {
|
||||
private _errorListener: ErrorListener = (error) => {
|
||||
this._parseErrors.push(error);
|
||||
};
|
||||
|
||||
@ -90,7 +90,7 @@ export abstract class BasicSQL<
|
||||
* Create an antlr4 lexer from input.
|
||||
* @param input string
|
||||
*/
|
||||
public createLexer(input: string, errorListener?: ErrorListener<any>) {
|
||||
public createLexer(input: string, errorListener?: ErrorListener) {
|
||||
const charStreams = CharStreams.fromString(input);
|
||||
const lexer = this.createLexerFromCharStream(charStreams);
|
||||
if (errorListener) {
|
||||
@ -104,7 +104,7 @@ export abstract class BasicSQL<
|
||||
* Create an antlr4 parser from input.
|
||||
* @param input string
|
||||
*/
|
||||
public createParser(input: string, errorListener?: ErrorListener<any>) {
|
||||
public createParser(input: string, errorListener?: ErrorListener) {
|
||||
const lexer = this.createLexer(input, errorListener);
|
||||
const tokenStream = new CommonTokenStream(lexer);
|
||||
const parser = this.createParserFromTokenStream(tokenStream);
|
||||
@ -123,7 +123,7 @@ export abstract class BasicSQL<
|
||||
* @param errorListener listen parse errors and lexer errors.
|
||||
* @returns parseTree
|
||||
*/
|
||||
public parse(input: string, errorListener?: ErrorListener<any>) {
|
||||
public parse(input: string, errorListener?: ErrorListener) {
|
||||
const parser = this.createParser(input, errorListener);
|
||||
parser.buildParseTrees = true;
|
||||
parser.errorHandler = new ErrorStrategy();
|
||||
@ -168,9 +168,9 @@ export abstract class BasicSQL<
|
||||
* @param errorListener listen errors
|
||||
* @returns parseTree
|
||||
*/
|
||||
private parseWithCache(input: string, errorListener?: ErrorListener<any>) {
|
||||
private parseWithCache(input: string, errorListener?: ErrorListener): PRC {
|
||||
// Avoid parsing the same input repeatedly.
|
||||
if (this._parsedInput === input && !errorListener) {
|
||||
if (this._parsedInput === input && !errorListener && this._parseTree) {
|
||||
return this._parseTree;
|
||||
}
|
||||
this._parseErrors = [];
|
||||
@ -225,9 +225,9 @@ export abstract class BasicSQL<
|
||||
* If exist syntax error it will return null.
|
||||
* @param input source string
|
||||
*/
|
||||
public splitSQLByStatement(input): TextSlice[] {
|
||||
public splitSQLByStatement(input: string): TextSlice[] | null {
|
||||
const errors = this.validate(input);
|
||||
if (errors.length) {
|
||||
if (errors.length || !this._parseTree) {
|
||||
return null;
|
||||
}
|
||||
const splitListener = this.splitListener;
|
||||
@ -236,9 +236,11 @@ export abstract class BasicSQL<
|
||||
|
||||
this.listen(splitListener, this._parseTree);
|
||||
|
||||
const res = splitListener.statementsContext.map((context) => {
|
||||
return ctxToText(context, this._parsedInput);
|
||||
});
|
||||
const res = splitListener.statementsContext
|
||||
.map((context) => {
|
||||
return ctxToText(context, this._parsedInput);
|
||||
})
|
||||
.filter(Boolean) as TextSlice[];
|
||||
|
||||
return res;
|
||||
}
|
||||
@ -258,6 +260,8 @@ export abstract class BasicSQL<
|
||||
if (!splitListener) return null;
|
||||
|
||||
this.parseWithCache(input);
|
||||
if (!this._parseTree) return null;
|
||||
|
||||
let sqlParserIns = this._parser;
|
||||
const allTokens = this.getAllTokens(input);
|
||||
let caretTokenIndex = findCaretTokenIndex(caretPosition, allTokens);
|
||||
@ -281,8 +285,8 @@ export abstract class BasicSQL<
|
||||
* The boundaries of this range must be statements with no syntax errors.
|
||||
* This can ensure the stable performance of the C3.
|
||||
*/
|
||||
let startStatement: ParserRuleContext;
|
||||
let stopStatement: ParserRuleContext;
|
||||
let startStatement: ParserRuleContext | null = null;
|
||||
let stopStatement: ParserRuleContext | null = null;
|
||||
|
||||
for (let index = 0; index < statementCount; index++) {
|
||||
const ctx = statementsContext[index];
|
||||
@ -297,11 +301,16 @@ export abstract class BasicSQL<
|
||||
const isNextCtxValid =
|
||||
index === statementCount - 1 || !statementsContext[index + 1]?.exception;
|
||||
|
||||
if (ctx.stop.tokenIndex < caretTokenIndex && isPrevCtxValid) {
|
||||
if (ctx.stop && ctx.stop.tokenIndex < caretTokenIndex && isPrevCtxValid) {
|
||||
startStatement = ctx;
|
||||
}
|
||||
|
||||
if (!stopStatement && ctx.start.tokenIndex > caretTokenIndex && isNextCtxValid) {
|
||||
if (
|
||||
ctx.start &&
|
||||
!stopStatement &&
|
||||
ctx.start.tokenIndex > caretTokenIndex &&
|
||||
isNextCtxValid
|
||||
) {
|
||||
stopStatement = ctx;
|
||||
break;
|
||||
}
|
||||
@ -369,7 +378,9 @@ export abstract class BasicSQL<
|
||||
|
||||
public getAllEntities(input: string, caretPosition?: CaretPosition): EntityContext[] | null {
|
||||
const allTokens = this.getAllTokens(input);
|
||||
const caretTokenIndex = findCaretTokenIndex(caretPosition, allTokens);
|
||||
const caretTokenIndex = caretPosition
|
||||
? findCaretTokenIndex(caretPosition, allTokens)
|
||||
: void 0;
|
||||
|
||||
const collectListener = this.createEntityCollector(input, caretTokenIndex);
|
||||
// TODO: add entityCollector to all sqlParser implements and remove following if
|
||||
|
@ -34,8 +34,10 @@ export function toStmtContext(
|
||||
rootStmt: StmtContext | null,
|
||||
parentStmt: StmtContext | null,
|
||||
isContainCaret?: boolean
|
||||
): StmtContext {
|
||||
const { text: _, ...position } = ctxToText(ctx, input);
|
||||
): StmtContext | null {
|
||||
const text = ctxToText(ctx, input);
|
||||
if (!text) return null;
|
||||
const { text: _, ...position } = text;
|
||||
return {
|
||||
stmtContextType: type,
|
||||
position,
|
||||
@ -72,8 +74,10 @@ export function toEntityContext(
|
||||
input: string,
|
||||
belongStmt: StmtContext,
|
||||
alias?: BaseAliasContext
|
||||
): EntityContext {
|
||||
const { text, ...position } = ctxToWord(ctx, input);
|
||||
): EntityContext | null {
|
||||
const word = ctxToWord(ctx, input);
|
||||
if (!word) return null;
|
||||
const { text, ...position } = word;
|
||||
const finalAlias = Object.assign({}, baseAlias, alias ?? {});
|
||||
return {
|
||||
entityContextType: type,
|
||||
@ -110,7 +114,7 @@ export abstract class EntityCollector {
|
||||
* Always point to the first non-commonStmt at the bottom of the _stmtStack,
|
||||
* unless there are only commonStmts in the _stmtStack.
|
||||
* */
|
||||
private _rootStmt: StmtContext;
|
||||
private _rootStmt: StmtContext | null;
|
||||
|
||||
visitTerminal() {}
|
||||
|
||||
@ -132,11 +136,13 @@ export abstract class EntityCollector {
|
||||
}
|
||||
|
||||
protected pushStmt(ctx: ParserRuleContext, type: StmtContextType) {
|
||||
let isContainCaret;
|
||||
let isContainCaret: boolean | undefined;
|
||||
if (this._caretTokenIndex >= 0) {
|
||||
isContainCaret =
|
||||
!!ctx.start &&
|
||||
!!ctx.stop &&
|
||||
ctx.start.tokenIndex <= this._caretTokenIndex &&
|
||||
ctx.stop?.tokenIndex >= this._caretTokenIndex;
|
||||
ctx.stop.tokenIndex >= this._caretTokenIndex;
|
||||
}
|
||||
const stmtContext = toStmtContext(
|
||||
ctx,
|
||||
@ -146,20 +152,22 @@ export abstract class EntityCollector {
|
||||
this._stmtStack.peek(),
|
||||
isContainCaret
|
||||
);
|
||||
if (
|
||||
this._stmtStack.isEmpty() ||
|
||||
this._stmtStack.peek()?.stmtContextType === StmtContextType.COMMON_STMT
|
||||
) {
|
||||
this._rootStmt = stmtContext;
|
||||
if (stmtContext) {
|
||||
if (
|
||||
this._stmtStack.isEmpty() ||
|
||||
this._stmtStack.peek()?.stmtContextType === StmtContextType.COMMON_STMT
|
||||
) {
|
||||
this._rootStmt = stmtContext;
|
||||
}
|
||||
this._stmtStack.push(stmtContext);
|
||||
}
|
||||
this._stmtStack.push(stmtContext);
|
||||
|
||||
return stmtContext;
|
||||
}
|
||||
|
||||
protected popStmt() {
|
||||
const stmtContext = this._stmtStack.pop();
|
||||
if (this._rootStmt === stmtContext) {
|
||||
if (stmtContext && this._rootStmt === stmtContext) {
|
||||
this._rootStmt = this._stmtStack.peek();
|
||||
if (!this._entityStack.isEmpty()) {
|
||||
this.combineEntitiesAndAdd(stmtContext);
|
||||
@ -180,11 +188,13 @@ export abstract class EntityCollector {
|
||||
this._stmtStack.peek(),
|
||||
alias
|
||||
);
|
||||
if (this._stmtStack.isEmpty()) {
|
||||
this._entitiesSet.add(entityContext);
|
||||
} else {
|
||||
// If is inside a statement
|
||||
this._entityStack.push(entityContext);
|
||||
if (entityContext) {
|
||||
if (this._stmtStack.isEmpty()) {
|
||||
this._entitiesSet.add(entityContext);
|
||||
} else {
|
||||
// If is inside a statement
|
||||
this._entityStack.push(entityContext);
|
||||
}
|
||||
}
|
||||
return entityContext;
|
||||
}
|
||||
@ -204,12 +214,11 @@ export abstract class EntityCollector {
|
||||
entitiesInsideStmt.unshift(this._entityStack.pop());
|
||||
}
|
||||
|
||||
let tmpResults = entitiesInsideStmt;
|
||||
const combinedEntities = this.combineRootStmtEntities(stmtContext, entitiesInsideStmt);
|
||||
|
||||
tmpResults = this.combineRootStmtEntities(stmtContext, entitiesInsideStmt);
|
||||
|
||||
while (tmpResults.length) {
|
||||
this._entitiesSet.add(tmpResults.shift());
|
||||
while (combinedEntities.length) {
|
||||
const entity = combinedEntities.shift();
|
||||
entity && this._entitiesSet.add(entity);
|
||||
}
|
||||
}
|
||||
|
||||
@ -235,7 +244,7 @@ export abstract class EntityCollector {
|
||||
): EntityContext[] {
|
||||
const columns: EntityContext[] = [];
|
||||
const relatedEntities: EntityContext[] = [];
|
||||
let mainEntity: EntityContext = null;
|
||||
let mainEntity: EntityContext | null = null;
|
||||
const finalEntities = entitiesInsideStmt.reduce((result, entity) => {
|
||||
if (entity.belongStmt !== stmtContext) {
|
||||
if (
|
||||
@ -262,14 +271,14 @@ export abstract class EntityCollector {
|
||||
result.push(entity);
|
||||
}
|
||||
return result;
|
||||
}, []);
|
||||
}, [] as EntityContext[]);
|
||||
|
||||
if (columns.length) {
|
||||
mainEntity.columns = columns;
|
||||
if (mainEntity && columns.length) {
|
||||
(mainEntity as EntityContext).columns = columns;
|
||||
}
|
||||
|
||||
if (relatedEntities.length) {
|
||||
mainEntity.relatedEntities = relatedEntities;
|
||||
if (mainEntity && relatedEntities.length) {
|
||||
(mainEntity as EntityContext).relatedEntities = relatedEntities;
|
||||
}
|
||||
|
||||
return finalEntities;
|
||||
|
@ -13,10 +13,10 @@ import {
|
||||
* The difference is that it assigns exception to the context.exception when it encounters error.
|
||||
*/
|
||||
export class ErrorStrategy extends DefaultErrorStrategy {
|
||||
public recover(recognizer: Parser, e: RecognitionException): void {
|
||||
public override recover(recognizer: Parser, e: RecognitionException): void {
|
||||
// Mark the context as an anomaly
|
||||
for (
|
||||
let context: ParserRuleContext | undefined = recognizer.context;
|
||||
let context: ParserRuleContext | null = recognizer.context;
|
||||
context;
|
||||
context = context.parent
|
||||
) {
|
||||
@ -40,7 +40,7 @@ export class ErrorStrategy extends DefaultErrorStrategy {
|
||||
this.consumeUntil(recognizer, followSet);
|
||||
}
|
||||
|
||||
public recoverInline(recognizer: Parser): Token {
|
||||
public override recoverInline(recognizer: Parser): Token {
|
||||
let e: RecognitionException;
|
||||
if (this.nextTokensContext === undefined) {
|
||||
e = new InputMismatchException(recognizer);
|
||||
@ -50,7 +50,7 @@ export class ErrorStrategy extends DefaultErrorStrategy {
|
||||
|
||||
// Mark the context as an anomaly
|
||||
for (
|
||||
let context: ParserRuleContext | undefined = recognizer.context;
|
||||
let context: ParserRuleContext | null = recognizer.context;
|
||||
context;
|
||||
context = context.parent
|
||||
) {
|
||||
|
@ -7,7 +7,10 @@ import { CaretPosition } from './types';
|
||||
* @param allTokens all the tokens
|
||||
* @returns caretTokenIndex
|
||||
*/
|
||||
export function findCaretTokenIndex(caretPosition: CaretPosition, allTokens: Token[]) {
|
||||
export function findCaretTokenIndex(
|
||||
caretPosition: CaretPosition,
|
||||
allTokens: Token[]
|
||||
): number | undefined {
|
||||
const { lineNumber: caretLine, column: caretCol } = caretPosition;
|
||||
let left = 0;
|
||||
let right = allTokens.length - 1;
|
||||
@ -19,12 +22,12 @@ export function findCaretTokenIndex(caretPosition: CaretPosition, allTokens: Tok
|
||||
right = mid - 1;
|
||||
} else if (
|
||||
token.line < caretLine ||
|
||||
(token.line === caretLine && token.column + token.text.length + 1 < caretCol)
|
||||
(token.line === caretLine && token.column + (token.text?.length ?? 0) + 1 < caretCol)
|
||||
) {
|
||||
left = mid + 1;
|
||||
} else {
|
||||
return allTokens[mid].tokenIndex;
|
||||
}
|
||||
}
|
||||
return null;
|
||||
return void 0;
|
||||
}
|
||||
|
@ -24,9 +24,9 @@ export interface ParseError {
|
||||
/**
|
||||
* The type of error resulting from lexical parsing and parsing.
|
||||
*/
|
||||
export interface SyntaxError<T> {
|
||||
export interface SyntaxError {
|
||||
readonly recognizer: Recognizer<ATNSimulator>;
|
||||
readonly offendingSymbol: Token;
|
||||
readonly offendingSymbol: Token | null;
|
||||
readonly line: number;
|
||||
readonly charPositionInLine: number;
|
||||
readonly msg: string;
|
||||
@ -37,12 +37,12 @@ export interface SyntaxError<T> {
|
||||
* ErrorListener will be invoked when it encounters a parsing error.
|
||||
* Includes lexical errors and parsing errors.
|
||||
*/
|
||||
export type ErrorListener<T> = (parseError: ParseError, originalError: SyntaxError<T>) => void;
|
||||
export type ErrorListener = (parseError: ParseError, originalError: SyntaxError) => void;
|
||||
|
||||
export class ParseErrorListener implements ANTLRErrorListener {
|
||||
private _errorListener: ErrorListener<Token>;
|
||||
private _errorListener: ErrorListener;
|
||||
|
||||
constructor(errorListener: ErrorListener<Token>) {
|
||||
constructor(errorListener: ErrorListener) {
|
||||
this._errorListener = errorListener;
|
||||
}
|
||||
|
||||
@ -54,7 +54,7 @@ export class ParseErrorListener implements ANTLRErrorListener {
|
||||
|
||||
syntaxError(
|
||||
recognizer: Recognizer<ATNSimulator>,
|
||||
offendingSymbol,
|
||||
offendingSymbol: Token | null,
|
||||
line: number,
|
||||
charPositionInLine: number,
|
||||
msg: string,
|
||||
|
@ -9,7 +9,7 @@ export class SimpleStack<T> {
|
||||
}
|
||||
|
||||
pop(): T {
|
||||
return this.stack.pop();
|
||||
return this.stack.pop() as T;
|
||||
}
|
||||
|
||||
peek(): T {
|
||||
|
@ -43,20 +43,27 @@ export interface TextSlice extends TextPosition {
|
||||
export function tokenToWord(token: Token, input: string): WordPosition & { text: string } {
|
||||
const startIndex = token.start;
|
||||
const endIndex = token.stop;
|
||||
const text = token.text ?? '';
|
||||
return {
|
||||
text: token.text,
|
||||
text,
|
||||
line: token.line,
|
||||
startIndex,
|
||||
endIndex,
|
||||
startColumn: token.column + 1,
|
||||
endColumn: token.column + 1 + token.text.length,
|
||||
endColumn: token.column + 1 + text.length,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert ParserRuleContext to Word
|
||||
*/
|
||||
export function ctxToWord(ctx: ParserRuleContext, input: string): WordPosition & { text: string } {
|
||||
export function ctxToWord(
|
||||
ctx: ParserRuleContext,
|
||||
input: string
|
||||
): (WordPosition & { text: string }) | null {
|
||||
if (!ctx.start || !ctx.stop) {
|
||||
return null;
|
||||
}
|
||||
const startIndex = ctx.start.start;
|
||||
const endIndex = ctx.stop.stop;
|
||||
const text = input.slice(startIndex, endIndex + 1);
|
||||
@ -66,14 +73,20 @@ export function ctxToWord(ctx: ParserRuleContext, input: string): WordPosition &
|
||||
startIndex,
|
||||
endIndex,
|
||||
startColumn: ctx.start.column + 1,
|
||||
endColumn: ctx.stop.column + 1 + ctx.stop.text.length,
|
||||
endColumn: ctx.stop.column + 1 + (ctx.stop.text?.length ?? 0),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert ParserRuleContext to Text
|
||||
*/
|
||||
export function ctxToText(ctx: ParserRuleContext, input: string): TextPosition & { text: string } {
|
||||
export function ctxToText(
|
||||
ctx: ParserRuleContext,
|
||||
input: string
|
||||
): (TextPosition & { text: string }) | null {
|
||||
if (!ctx.start || !ctx.stop) {
|
||||
return null;
|
||||
}
|
||||
const startIndex = ctx.start.start;
|
||||
const endIndex = ctx.stop.stop;
|
||||
const text = input.slice(startIndex, endIndex + 1);
|
||||
@ -84,6 +97,6 @@ export function ctxToText(ctx: ParserRuleContext, input: string): TextPosition &
|
||||
startIndex,
|
||||
endIndex,
|
||||
startColumn: ctx.start.column + 1,
|
||||
endColumn: ctx.stop.column + 1 + ctx.stop.text.length,
|
||||
endColumn: ctx.stop.column + 1 + (ctx.stop.text?.length ?? 0),
|
||||
};
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { Token } from 'antlr4ng';
|
||||
import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
|
||||
import { CandidatesCollection } from 'antlr4-c3';
|
||||
import { FlinkSqlLexer } from '../../lib/flink/FlinkSqlLexer';
|
||||
import { FlinkSqlParser, ProgramContext } from '../../lib/flink/FlinkSqlParser';
|
||||
@ -11,11 +11,11 @@ import { FlinkEntityCollector } from './flinkEntityCollector';
|
||||
export { FlinkSqlSplitListener, FlinkEntityCollector };
|
||||
|
||||
export class FlinkSQL extends BasicSQL<FlinkSqlLexer, ProgramContext, FlinkSqlParser> {
|
||||
protected createLexerFromCharStream(charStreams) {
|
||||
protected createLexerFromCharStream(charStreams: CharStream) {
|
||||
return new FlinkSqlLexer(charStreams);
|
||||
}
|
||||
|
||||
protected createParserFromTokenStream(tokenStream) {
|
||||
protected createParserFromTokenStream(tokenStream: CommonTokenStream) {
|
||||
return new FlinkSqlParser(tokenStream);
|
||||
}
|
||||
|
||||
@ -58,7 +58,7 @@ export class FlinkSQL extends BasicSQL<FlinkSqlLexer, ProgramContext, FlinkSqlPa
|
||||
caretTokenIndex + tokenIndexOffset + 1
|
||||
);
|
||||
|
||||
let syntaxContextType: EntityContextType | StmtContextType;
|
||||
let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
|
||||
switch (ruleType) {
|
||||
case FlinkSqlParser.RULE_catalogPath: {
|
||||
syntaxContextType = EntityContextType.CATALOG;
|
||||
@ -119,7 +119,7 @@ export class FlinkSQL extends BasicSQL<FlinkSqlLexer, ProgramContext, FlinkSqlPa
|
||||
for (let candidate of candidates.tokens) {
|
||||
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
|
||||
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
|
||||
if (symbolicName && symbolicName.startsWith('KW_')) {
|
||||
if (displayName && symbolicName && symbolicName.startsWith('KW_')) {
|
||||
const keyword =
|
||||
displayName.startsWith("'") && displayName.endsWith("'")
|
||||
? displayName.slice(1, -1)
|
||||
|
@ -11,7 +11,6 @@ import {
|
||||
DbSchemaNameCreateContext,
|
||||
FromInsertStmtContext,
|
||||
FromSelectStmtContext,
|
||||
FromStatementContext,
|
||||
FunctionNameCreateContext,
|
||||
InsertStmtContext,
|
||||
SelectStatementContext,
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { Token } from 'antlr4ng';
|
||||
import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
|
||||
import { CandidatesCollection } from 'antlr4-c3';
|
||||
import { HiveSqlLexer } from '../../lib/hive/HiveSqlLexer';
|
||||
import { HiveSqlParser, ProgramContext } from '../../lib/hive/HiveSqlParser';
|
||||
@ -12,11 +12,11 @@ import { HiveEntityCollector } from './hiveEntityCollector';
|
||||
export { HiveEntityCollector, HiveSqlSplitListener };
|
||||
|
||||
export class HiveSQL extends BasicSQL<HiveSqlLexer, ProgramContext, HiveSqlParser> {
|
||||
protected createLexerFromCharStream(charStreams) {
|
||||
protected createLexerFromCharStream(charStreams: CharStream) {
|
||||
return new HiveSqlLexer(charStreams);
|
||||
}
|
||||
|
||||
protected createParserFromTokenStream(tokenStream) {
|
||||
protected createParserFromTokenStream(tokenStream: CommonTokenStream) {
|
||||
return new HiveSqlParser(tokenStream);
|
||||
}
|
||||
|
||||
@ -58,7 +58,7 @@ export class HiveSQL extends BasicSQL<HiveSqlLexer, ProgramContext, HiveSqlParse
|
||||
caretTokenIndex + tokenIndexOffset + 1
|
||||
);
|
||||
|
||||
let syntaxContextType: EntityContextType | StmtContextType;
|
||||
let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
|
||||
switch (ruleType) {
|
||||
case HiveSqlParser.RULE_dbSchemaName: {
|
||||
syntaxContextType = EntityContextType.DATABASE;
|
||||
@ -116,7 +116,7 @@ export class HiveSQL extends BasicSQL<HiveSqlLexer, ProgramContext, HiveSqlParse
|
||||
for (let candidate of candidates.tokens) {
|
||||
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
|
||||
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
|
||||
if (symbolicName && symbolicName.startsWith('KW_')) {
|
||||
if (displayName && symbolicName && symbolicName.startsWith('KW_')) {
|
||||
const keyword =
|
||||
displayName.startsWith("'") && displayName.endsWith("'")
|
||||
? displayName.slice(1, -1)
|
||||
|
@ -11,7 +11,6 @@ import {
|
||||
DatabaseNameCreateContext,
|
||||
DatabaseNamePathContext,
|
||||
FunctionNameCreateContext,
|
||||
FunctionNamePathContext,
|
||||
InsertStatementContext,
|
||||
QueryStatementContext,
|
||||
SingleStatementContext,
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { Token } from 'antlr4ng';
|
||||
import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
|
||||
import { CandidatesCollection } from 'antlr4-c3';
|
||||
import { ImpalaSqlLexer } from '../../lib/impala/ImpalaSqlLexer';
|
||||
import { ImpalaSqlParser, ProgramContext } from '../../lib/impala/ImpalaSqlParser';
|
||||
@ -11,11 +11,11 @@ import { ImpalaEntityCollector } from './impalaEntityCollector';
|
||||
export { ImpalaEntityCollector, ImpalaSqlSplitListener };
|
||||
|
||||
export class ImpalaSQL extends BasicSQL<ImpalaSqlLexer, ProgramContext, ImpalaSqlParser> {
|
||||
protected createLexerFromCharStream(charStreams) {
|
||||
protected createLexerFromCharStream(charStreams: CharStream) {
|
||||
return new ImpalaSqlLexer(charStreams);
|
||||
}
|
||||
|
||||
protected createParserFromTokenStream(tokenStream) {
|
||||
protected createParserFromTokenStream(tokenStream: CommonTokenStream) {
|
||||
return new ImpalaSqlParser(tokenStream);
|
||||
}
|
||||
|
||||
@ -56,7 +56,7 @@ export class ImpalaSQL extends BasicSQL<ImpalaSqlLexer, ProgramContext, ImpalaSq
|
||||
caretTokenIndex + tokenIndexOffset + 1
|
||||
);
|
||||
|
||||
let syntaxContextType: EntityContextType | StmtContextType;
|
||||
let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
|
||||
switch (ruleType) {
|
||||
case ImpalaSqlParser.RULE_functionNameCreate: {
|
||||
syntaxContextType = EntityContextType.FUNCTION_CREATE;
|
||||
@ -112,7 +112,7 @@ export class ImpalaSQL extends BasicSQL<ImpalaSqlLexer, ProgramContext, ImpalaSq
|
||||
for (let candidate of candidates.tokens) {
|
||||
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
|
||||
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
|
||||
if (symbolicName && symbolicName.startsWith('KW_')) {
|
||||
if (displayName && symbolicName && symbolicName.startsWith('KW_')) {
|
||||
const keyword =
|
||||
displayName.startsWith("'") && displayName.endsWith("'")
|
||||
? displayName.slice(1, -1)
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { Token } from 'antlr4ng';
|
||||
import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
|
||||
import { CandidatesCollection } from 'antlr4-c3';
|
||||
import { MySqlLexer } from '../../lib/mysql/MySqlLexer';
|
||||
import { MySqlParser, ProgramContext } from '../../lib/mysql/MySqlParser';
|
||||
@ -11,11 +11,11 @@ import { MySqlEntityCollector } from './mysqlEntityCollector';
|
||||
export { MySqlEntityCollector, MysqlSplitListener };
|
||||
|
||||
export class MySQL extends BasicSQL<MySqlLexer, ProgramContext, MySqlParser> {
|
||||
protected createLexerFromCharStream(charStreams): MySqlLexer {
|
||||
protected createLexerFromCharStream(charStreams: CharStream): MySqlLexer {
|
||||
return new MySqlLexer(charStreams);
|
||||
}
|
||||
|
||||
protected createParserFromTokenStream(tokenStream): MySqlParser {
|
||||
protected createParserFromTokenStream(tokenStream: CommonTokenStream): MySqlParser {
|
||||
return new MySqlParser(tokenStream);
|
||||
}
|
||||
|
||||
@ -57,7 +57,7 @@ export class MySQL extends BasicSQL<MySqlLexer, ProgramContext, MySqlParser> {
|
||||
caretTokenIndex + tokenIndexOffset + 1
|
||||
);
|
||||
|
||||
let syntaxContextType: EntityContextType | StmtContextType;
|
||||
let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
|
||||
switch (ruleType) {
|
||||
case MySqlParser.RULE_databaseName: {
|
||||
syntaxContextType = EntityContextType.DATABASE;
|
||||
@ -114,7 +114,7 @@ export class MySQL extends BasicSQL<MySqlLexer, ProgramContext, MySqlParser> {
|
||||
for (const candidate of candidates.tokens) {
|
||||
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
|
||||
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
|
||||
if (symbolicName && symbolicName.startsWith('KW_')) {
|
||||
if (displayName && symbolicName && symbolicName.startsWith('KW_')) {
|
||||
const keyword =
|
||||
displayName.startsWith("'") && displayName.endsWith("'")
|
||||
? displayName.slice(1, -1)
|
||||
|
@ -10,7 +10,6 @@ import type {
|
||||
FunctionNameCreateContext,
|
||||
InsertStatementContext,
|
||||
QueryCreateTableContext,
|
||||
SelectExpressionContext,
|
||||
SelectStatementContext,
|
||||
SingleStatementContext,
|
||||
TableNameContext,
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { Token } from 'antlr4ng';
|
||||
import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
|
||||
import { CandidatesCollection } from 'antlr4-c3';
|
||||
import { PlSqlLexer } from '../lib/plsql/PlSqlLexer';
|
||||
import { PlSqlParser, ProgramContext } from '../lib/plsql/PlSqlParser';
|
||||
@ -6,11 +6,11 @@ import { BasicSQL } from './common/basicSQL';
|
||||
import { Suggestions } from './common/types';
|
||||
|
||||
export class PLSQL extends BasicSQL<PlSqlLexer, ProgramContext, PlSqlParser> {
|
||||
protected createLexerFromCharStream(charStreams) {
|
||||
protected createLexerFromCharStream(charStreams: CharStream) {
|
||||
return new PlSqlLexer(charStreams);
|
||||
}
|
||||
|
||||
protected createParserFromTokenStream(tokenStream) {
|
||||
protected createParserFromTokenStream(tokenStream: CommonTokenStream) {
|
||||
return new PlSqlParser(tokenStream);
|
||||
}
|
||||
|
||||
@ -21,7 +21,7 @@ export class PLSQL extends BasicSQL<PlSqlLexer, ProgramContext, PlSqlParser> {
|
||||
}
|
||||
|
||||
protected createEntityCollector(input: string, caretTokenIndex?: number) {
|
||||
return null;
|
||||
return null as any;
|
||||
}
|
||||
|
||||
protected processCandidates(
|
||||
|
@ -1,5 +1,5 @@
|
||||
import { CandidatesCollection } from 'antlr4-c3';
|
||||
import { Token } from 'antlr4ng';
|
||||
import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
|
||||
|
||||
import { PostgreSqlLexer } from '../../lib/postgresql/PostgreSqlLexer';
|
||||
import { PostgreSqlParser, ProgramContext } from '../../lib/postgresql/PostgreSqlParser';
|
||||
@ -12,11 +12,11 @@ import { PostgreSqlSplitListener } from './postgreSplitListener';
|
||||
export { PostgreSqlEntityCollector, PostgreSqlSplitListener };
|
||||
|
||||
export class PostgreSQL extends BasicSQL<PostgreSqlLexer, ProgramContext, PostgreSqlParser> {
|
||||
protected createLexerFromCharStream(charStreams) {
|
||||
protected createLexerFromCharStream(charStreams: CharStream) {
|
||||
return new PostgreSqlLexer(charStreams);
|
||||
}
|
||||
|
||||
protected createParserFromTokenStream(tokenStream) {
|
||||
protected createParserFromTokenStream(tokenStream: CommonTokenStream) {
|
||||
return new PostgreSqlParser(tokenStream);
|
||||
}
|
||||
|
||||
@ -61,7 +61,7 @@ export class PostgreSQL extends BasicSQL<PostgreSqlLexer, ProgramContext, Postgr
|
||||
caretTokenIndex + tokenIndexOffset + 1
|
||||
);
|
||||
|
||||
let syntaxContextType: EntityContextType | StmtContextType;
|
||||
let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
|
||||
switch (ruleType) {
|
||||
case PostgreSqlParser.RULE_table_name_create: {
|
||||
syntaxContextType = EntityContextType.TABLE_CREATE;
|
||||
@ -134,7 +134,7 @@ export class PostgreSQL extends BasicSQL<PostgreSqlLexer, ProgramContext, Postgr
|
||||
for (let candidate of candidates.tokens) {
|
||||
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
|
||||
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
|
||||
if (symbolicName && symbolicName.startsWith('KW_')) {
|
||||
if (displayName && symbolicName && symbolicName.startsWith('KW_')) {
|
||||
const keyword =
|
||||
displayName.startsWith("'") && displayName.endsWith("'")
|
||||
? displayName.slice(1, -1)
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { Token } from 'antlr4ng';
|
||||
import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
|
||||
import { CandidatesCollection } from 'antlr4-c3';
|
||||
import { SparkSqlLexer } from '../../lib/spark/SparkSqlLexer';
|
||||
import { SparkSqlParser, ProgramContext } from '../../lib/spark/SparkSqlParser';
|
||||
@ -11,11 +11,11 @@ import { SparkEntityCollector } from './sparkEntityCollector';
|
||||
export { SparkSqlSplitListener, SparkEntityCollector };
|
||||
|
||||
export class SparkSQL extends BasicSQL<SparkSqlLexer, ProgramContext, SparkSqlParser> {
|
||||
protected createLexerFromCharStream(charStreams) {
|
||||
protected createLexerFromCharStream(charStreams: CharStream) {
|
||||
return new SparkSqlLexer(charStreams);
|
||||
}
|
||||
|
||||
protected createParserFromTokenStream(tokenStream) {
|
||||
protected createParserFromTokenStream(tokenStream: CommonTokenStream) {
|
||||
return new SparkSqlParser(tokenStream);
|
||||
}
|
||||
|
||||
@ -57,7 +57,7 @@ export class SparkSQL extends BasicSQL<SparkSqlLexer, ProgramContext, SparkSqlPa
|
||||
caretTokenIndex + tokenIndexOffset + 1
|
||||
);
|
||||
|
||||
let syntaxContextType: EntityContextType | StmtContextType;
|
||||
let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
|
||||
switch (ruleType) {
|
||||
case SparkSqlParser.RULE_namespaceName: {
|
||||
syntaxContextType = EntityContextType.DATABASE;
|
||||
@ -114,7 +114,7 @@ export class SparkSQL extends BasicSQL<SparkSqlLexer, ProgramContext, SparkSqlPa
|
||||
for (const candidate of candidates.tokens) {
|
||||
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
|
||||
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
|
||||
if (symbolicName && symbolicName.startsWith('KW_')) {
|
||||
if (displayName && symbolicName && symbolicName.startsWith('KW_')) {
|
||||
const keyword =
|
||||
displayName.startsWith("'") && displayName.endsWith("'")
|
||||
? displayName.slice(1, -1)
|
||||
|
@ -1,4 +1,4 @@
|
||||
import { Token } from 'antlr4ng';
|
||||
import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
|
||||
import { CandidatesCollection } from 'antlr4-c3';
|
||||
import { TrinoSqlLexer } from '../../lib/trino/TrinoSqlLexer';
|
||||
import { TrinoSqlParser, ProgramContext } from '../../lib/trino/TrinoSqlParser';
|
||||
@ -11,11 +11,11 @@ import { TrinoEntityCollector } from './trinoEntityCollector';
|
||||
export { TrinoSqlSplitListener, TrinoEntityCollector };
|
||||
|
||||
export class TrinoSQL extends BasicSQL<TrinoSqlLexer, ProgramContext, TrinoSqlParser> {
|
||||
protected createLexerFromCharStream(charStreams) {
|
||||
protected createLexerFromCharStream(charStreams: CharStream) {
|
||||
return new TrinoSqlLexer(charStreams);
|
||||
}
|
||||
|
||||
protected createParserFromTokenStream(tokenStream) {
|
||||
protected createParserFromTokenStream(tokenStream: CommonTokenStream) {
|
||||
return new TrinoSqlParser(tokenStream);
|
||||
}
|
||||
|
||||
@ -58,7 +58,7 @@ export class TrinoSQL extends BasicSQL<TrinoSqlLexer, ProgramContext, TrinoSqlPa
|
||||
caretTokenIndex + tokenIndexOffset + 1
|
||||
);
|
||||
|
||||
let syntaxContextType: EntityContextType | StmtContextType;
|
||||
let syntaxContextType: EntityContextType | StmtContextType | undefined = void 0;
|
||||
switch (ruleType) {
|
||||
case TrinoSqlParser.RULE_catalogName: {
|
||||
syntaxContextType = EntityContextType.CATALOG;
|
||||
@ -115,7 +115,7 @@ export class TrinoSQL extends BasicSQL<TrinoSqlLexer, ProgramContext, TrinoSqlPa
|
||||
for (let candidate of candidates.tokens) {
|
||||
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
|
||||
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
|
||||
if (symbolicName && symbolicName.startsWith('KW_')) {
|
||||
if (displayName && symbolicName && symbolicName.startsWith('KW_')) {
|
||||
const keyword =
|
||||
displayName.startsWith("'") && displayName.endsWith("'")
|
||||
? displayName.slice(1, -1)
|
||||
|
@ -41,7 +41,7 @@ export enum Legacy_TokenType {
|
||||
export interface Legacy_Token {
|
||||
type: Legacy_TokenType;
|
||||
value: string;
|
||||
start: number;
|
||||
start?: number;
|
||||
end: number;
|
||||
lineNumber: number;
|
||||
message?: string;
|
||||
|
@ -15,8 +15,8 @@ describe('BasicSQL unit tests', () => {
|
||||
|
||||
test('Create lexer with errorListener', () => {
|
||||
const sql = '袋鼠云数栈UED团队';
|
||||
const errors: any[] = [];
|
||||
const errorListener: ErrorListener<any> = (err) => {
|
||||
const errors = [];
|
||||
const errorListener: ErrorListener = (err) => {
|
||||
errors.push(err);
|
||||
};
|
||||
const lexer = flink.createLexer(sql, errorListener);
|
||||
@ -35,8 +35,8 @@ describe('BasicSQL unit tests', () => {
|
||||
|
||||
test('Create parser with errorListener (lexer error)', () => {
|
||||
const sql = '袋鼠云数栈UED团队';
|
||||
const errors: any[] = [];
|
||||
const errorListener: ErrorListener<any> = (err) => {
|
||||
const errors = [];
|
||||
const errorListener: ErrorListener = (err) => {
|
||||
errors.push(err);
|
||||
};
|
||||
const parser = flink.createParser(sql, errorListener);
|
||||
@ -46,8 +46,8 @@ describe('BasicSQL unit tests', () => {
|
||||
|
||||
test('Create parser with errorListener (parse error)', () => {
|
||||
const sql = 'SHOW TA';
|
||||
const errors: any[] = [];
|
||||
const errorListener: ErrorListener<any> = (err) => {
|
||||
const errors = [];
|
||||
const errorListener: ErrorListener = (err) => {
|
||||
errors.push(err);
|
||||
};
|
||||
const parser = flink.createParser(sql, errorListener);
|
||||
@ -57,8 +57,8 @@ describe('BasicSQL unit tests', () => {
|
||||
|
||||
test('Parse right input', () => {
|
||||
const sql = 'SELECT * FROM tb1';
|
||||
const errors: any[] = [];
|
||||
const errorListener: ErrorListener<any> = (err) => {
|
||||
const errors = [];
|
||||
const errorListener: ErrorListener = (err) => {
|
||||
errors.push(err);
|
||||
};
|
||||
const parseTree = flink.parse(sql, errorListener);
|
||||
@ -70,8 +70,8 @@ describe('BasicSQL unit tests', () => {
|
||||
|
||||
test('Parse wrong input', () => {
|
||||
const sql = '袋鼠云数栈UED团队';
|
||||
const errors: any[] = [];
|
||||
const errorListener: ErrorListener<any> = (err) => {
|
||||
const errors = [];
|
||||
const errorListener: ErrorListener = (err) => {
|
||||
errors.push(err);
|
||||
};
|
||||
const parseTree = flink.parse(sql, errorListener);
|
||||
|
@ -19,7 +19,7 @@ describe('FlinkSQL benchmark tests', () => {
|
||||
const reportData: string[] = [];
|
||||
|
||||
test('createTable Over 100 Rows', async () => {
|
||||
const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 100 Rows', () => {
|
||||
const [totalTimes, averageTimes] = benchmark('CreateTable Over 100 Rows', () => {
|
||||
const testSQL = features.createTable[0];
|
||||
const res = flink.validate(testSQL);
|
||||
expect(res).toEqual([]);
|
||||
@ -28,7 +28,7 @@ describe('FlinkSQL benchmark tests', () => {
|
||||
});
|
||||
|
||||
test('createTable Over 1000 Rows', async () => {
|
||||
const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 1000 Rows', () => {
|
||||
const [totalTimes, averageTimes] = benchmark('CreateTable Over 1000 Rows', () => {
|
||||
const testSQL = features.createTable[1];
|
||||
const res = flink.validate(testSQL);
|
||||
expect(res).toEqual([]);
|
||||
@ -37,7 +37,7 @@ describe('FlinkSQL benchmark tests', () => {
|
||||
});
|
||||
|
||||
test('createTable Over 5000 Rows', async () => {
|
||||
const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 5000 Rows', () => {
|
||||
const [totalTimes, averageTimes] = benchmark('CreateTable Over 5000 Rows', () => {
|
||||
const testSQL = features.createTable[2];
|
||||
const res = flink.validate(testSQL);
|
||||
expect(res).toEqual([]);
|
||||
@ -46,7 +46,7 @@ describe('FlinkSQL benchmark tests', () => {
|
||||
});
|
||||
|
||||
test('selectTable Over 100 Rows', async () => {
|
||||
const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 100 Rows', () => {
|
||||
const [totalTimes, averageTimes] = benchmark('SelectTable Over 100 Rows', () => {
|
||||
const testSQL = features.selectTable[0];
|
||||
const res = flink.validate(testSQL);
|
||||
expect(res).toEqual([]);
|
||||
@ -55,7 +55,7 @@ describe('FlinkSQL benchmark tests', () => {
|
||||
});
|
||||
|
||||
test('selectTable Over 1000 Rows', async () => {
|
||||
const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 1000 Rows', () => {
|
||||
const [totalTimes, averageTimes] = benchmark('SelectTable Over 1000 Rows', () => {
|
||||
const testSQL = features.selectTable[1];
|
||||
const res = flink.validate(testSQL);
|
||||
expect(res).toEqual([]);
|
||||
@ -64,7 +64,7 @@ describe('FlinkSQL benchmark tests', () => {
|
||||
});
|
||||
|
||||
test('selectTable Over 5000 Rows', async () => {
|
||||
const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 5000 Rows', () => {
|
||||
const [totalTimes, averageTimes] = benchmark('SelectTable Over 5000 Rows', () => {
|
||||
const testSQL = features.selectTable[2];
|
||||
const res = flink.validate(testSQL);
|
||||
expect(res).toEqual([]);
|
||||
@ -73,7 +73,7 @@ describe('FlinkSQL benchmark tests', () => {
|
||||
});
|
||||
|
||||
test('insertTable Over 100 Rows', async () => {
|
||||
const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 100 Rows', () => {
|
||||
const [totalTimes, averageTimes] = benchmark('InsertTable Over 100 Rows', () => {
|
||||
const testSQL = features.insertTable[0];
|
||||
const res = flink.validate(testSQL);
|
||||
expect(res).toEqual([]);
|
||||
@ -82,7 +82,7 @@ describe('FlinkSQL benchmark tests', () => {
|
||||
});
|
||||
|
||||
test('insertTable Over 1000 Rows', async () => {
|
||||
const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 1000 Rows', () => {
|
||||
const [totalTimes, averageTimes] = benchmark('InsertTable Over 1000 Rows', () => {
|
||||
const testSQL = features.insertTable[1];
|
||||
const res = flink.validate(testSQL);
|
||||
expect(res).toEqual([]);
|
||||
@ -91,7 +91,7 @@ describe('FlinkSQL benchmark tests', () => {
|
||||
});
|
||||
|
||||
test('insertTable Over 5000 Rows', async () => {
|
||||
const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 5000 Rows', () => {
|
||||
const [totalTimes, averageTimes] = benchmark('InsertTable Over 5000 Rows', () => {
|
||||
const testSQL = features.insertTable[2];
|
||||
const res = flink.validate(testSQL);
|
||||
expect(res).toEqual([]);
|
||||
|
@ -1,4 +1,3 @@
|
||||
import { ErrorNode, ParserRuleContext, TerminalNode } from 'antlr4ng';
|
||||
import { FlinkSQL } from 'src/parser/flink';
|
||||
import { FlinkSqlParserListener } from 'src/lib/flink/FlinkSqlParserListener';
|
||||
import { TableExpressionContext } from 'src/lib/flink/FlinkSqlParser';
|
||||
@ -11,20 +10,17 @@ describe('Flink SQL Listener Tests', () => {
|
||||
const parseTree = flink.parse(sql);
|
||||
|
||||
test('Listener enterTableName', async () => {
|
||||
let result = '';
|
||||
class MyListener implements FlinkSqlParserListener {
|
||||
enterTableExpression = (ctx: TableExpressionContext): void => {
|
||||
result = ctx.getText().toLowerCase();
|
||||
};
|
||||
visitTerminal(node: TerminalNode): void {}
|
||||
visitErrorNode(node: ErrorNode): void {}
|
||||
enterEveryRule(node: ParserRuleContext): void {}
|
||||
exitEveryRule(node: ParserRuleContext): void {}
|
||||
}
|
||||
const listenTableName = new MyListener();
|
||||
class MyListener extends FlinkSqlParserListener {
|
||||
result = '';
|
||||
|
||||
await flink.listen(listenTableName, parseTree);
|
||||
expect(result).toBe(expectTableName);
|
||||
enterTableExpression = (ctx: TableExpressionContext): void => {
|
||||
this.result = ctx.getText().toLowerCase();
|
||||
};
|
||||
}
|
||||
const listener = new MyListener();
|
||||
|
||||
flink.listen(listener, parseTree);
|
||||
expect(listener.result).toBe(expectTableName);
|
||||
});
|
||||
|
||||
test('Split sql listener', async () => {
|
||||
|
@ -23,8 +23,6 @@ describe('Flink SQL Syntax Suggestion with collect entity', () => {
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const parseTree = flink.parse(sql);
|
||||
|
||||
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
|
@ -1,7 +1,6 @@
|
||||
import { FlinkSQL } from 'src/parser/flink';
|
||||
import { FlinkSqlParserVisitor } from 'src/lib/flink/FlinkSqlParserVisitor';
|
||||
import { AbstractParseTreeVisitor } from 'antlr4ng';
|
||||
import { TableExpressionContext } from 'src/lib/flink/FlinkSqlParser';
|
||||
import { ProgramContext, TableExpressionContext } from 'src/lib/flink/FlinkSqlParser';
|
||||
|
||||
describe('Flink SQL Visitor Tests', () => {
|
||||
const expectTableName = 'user1';
|
||||
@ -13,20 +12,22 @@ describe('Flink SQL Visitor Tests', () => {
|
||||
});
|
||||
|
||||
test('Visitor visitTableName', () => {
|
||||
let result = '';
|
||||
class MyVisitor
|
||||
extends AbstractParseTreeVisitor<any>
|
||||
implements FlinkSqlParserVisitor<any>
|
||||
{
|
||||
protected defaultResult() {
|
||||
return result;
|
||||
class MyVisitor extends FlinkSqlParserVisitor<string> {
|
||||
defaultResult(): string {
|
||||
return '';
|
||||
}
|
||||
visitTableExpression(ctx: TableExpressionContext) {
|
||||
result = ctx.getText().toLowerCase();
|
||||
aggregateResult(aggregate: string, nextResult: string): string {
|
||||
return aggregate + nextResult;
|
||||
}
|
||||
visitProgram = (ctx: ProgramContext) => {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
visitTableExpression = (ctx: TableExpressionContext) => {
|
||||
return ctx.getText().toLowerCase();
|
||||
};
|
||||
}
|
||||
const visitor: any = new MyVisitor();
|
||||
visitor.visit(parseTree);
|
||||
const visitor = new MyVisitor();
|
||||
const result = visitor.visit(parseTree);
|
||||
|
||||
expect(result).toBe(expectTableName);
|
||||
});
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { ProgramContext, SelectItemContext } from 'src/lib/hive/HiveSqlParser';
|
||||
import { SelectItemContext } from 'src/lib/hive/HiveSqlParser';
|
||||
import { HiveSqlParserListener } from 'src/lib/hive/HiveSqlParserListener';
|
||||
|
||||
describe('HiveSQL Listener Tests', () => {
|
||||
@ -10,39 +9,33 @@ describe('HiveSQL Listener Tests', () => {
|
||||
const sql = `select ${expectTableName} from tablename where inc_day='20190601' limit 1000;`;
|
||||
const parseTree = hive.parse(sql);
|
||||
|
||||
let result = '';
|
||||
class MyListener implements HiveSqlParserListener {
|
||||
enterSelectItem(ctx: SelectItemContext) {
|
||||
result = ctx.getText();
|
||||
}
|
||||
visitTerminal() {}
|
||||
visitErrorNode() {}
|
||||
enterEveryRule() {}
|
||||
exitEveryRule() {}
|
||||
}
|
||||
const listenTableName = new MyListener();
|
||||
class MyListener extends HiveSqlParserListener {
|
||||
result = '';
|
||||
|
||||
await hive.listen(listenTableName as ParseTreeListener, parseTree as ProgramContext);
|
||||
expect(result).toBe(expectTableName);
|
||||
enterSelectItem = (ctx: SelectItemContext) => {
|
||||
this.result = ctx.getText();
|
||||
};
|
||||
}
|
||||
const listener = new MyListener();
|
||||
|
||||
hive.listen(listener, parseTree);
|
||||
expect(listener.result).toBe(expectTableName);
|
||||
});
|
||||
|
||||
test('Listener enterCreateTable', async () => {
|
||||
const sql = `drop table table_name;`;
|
||||
const parseTree = hive.parse(sql);
|
||||
let result = '';
|
||||
class MyListener implements HiveSqlParserListener {
|
||||
enterDropTableStatement(ctx) {
|
||||
result = ctx.getText();
|
||||
}
|
||||
class MyListener extends HiveSqlParserListener {
|
||||
result = '';
|
||||
|
||||
visitTerminal() {}
|
||||
visitErrorNode() {}
|
||||
enterEveryRule() {}
|
||||
exitEveryRule() {}
|
||||
enterDropTableStatement = (ctx) => {
|
||||
this.result = ctx.getText();
|
||||
};
|
||||
}
|
||||
const listenTableName = new MyListener();
|
||||
const listener = new MyListener();
|
||||
|
||||
await hive.listen(listenTableName as ParseTreeListener, parseTree as ProgramContext);
|
||||
expect(result).toBe('droptabletable_name');
|
||||
hive.listen(listener, parseTree);
|
||||
expect(listener.result).toBe('droptabletable_name');
|
||||
});
|
||||
|
||||
test('Split sql listener', async () => {
|
||||
|
@ -1,5 +1,3 @@
|
||||
import { AbstractParseTreeVisitor } from 'antlr4ng';
|
||||
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { HiveSqlParserVisitor } from 'src/lib/hive/HiveSqlParserVisitor';
|
||||
import { ProgramContext, TableNameContext } from 'src/lib/hive/HiveSqlParser';
|
||||
@ -14,19 +12,23 @@ describe('HiveSQL Visitor Tests', () => {
|
||||
});
|
||||
|
||||
test('Visitor visitTableName', () => {
|
||||
let result = '';
|
||||
class MyVisitor extends AbstractParseTreeVisitor<any> implements HiveSqlParserVisitor<any> {
|
||||
defaultResult() {
|
||||
return result;
|
||||
class MyVisitor extends HiveSqlParserVisitor<string> {
|
||||
defaultResult(): string {
|
||||
return '';
|
||||
}
|
||||
|
||||
visitTableName(ctx: TableNameContext) {
|
||||
result = ctx.getText().toLowerCase();
|
||||
aggregateResult(aggregate: string, nextResult: string): string {
|
||||
return aggregate + nextResult;
|
||||
}
|
||||
visitProgram = (ctx: ProgramContext) => {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
visitTableName = (ctx: TableNameContext) => {
|
||||
return ctx.getText().toLowerCase();
|
||||
};
|
||||
}
|
||||
|
||||
const visitor = new MyVisitor();
|
||||
visitor.visit(parseTree as ProgramContext);
|
||||
const result = visitor.visit(parseTree);
|
||||
|
||||
expect(result).toBe(expectTableName);
|
||||
});
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { ImpalaSQL } from 'src/parser/impala';
|
||||
import { ImpalaSqlParserListener } from 'src/lib/impala/ImpalaSqlParserListener';
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
|
||||
describe('impala SQL Listener Tests', () => {
|
||||
const expectTableName = 'user1';
|
||||
@ -11,19 +10,14 @@ describe('impala SQL Listener Tests', () => {
|
||||
|
||||
test('Listener enterTableNamePath', async () => {
|
||||
let result = '';
|
||||
class MyListener implements ImpalaSqlParserListener {
|
||||
class MyListener extends ImpalaSqlParserListener {
|
||||
enterTableNamePath = (ctx): void => {
|
||||
result = ctx.getText().toLowerCase();
|
||||
};
|
||||
|
||||
visitTerminal() {}
|
||||
visitErrorNode() {}
|
||||
enterEveryRule() {}
|
||||
exitEveryRule() {}
|
||||
}
|
||||
const listenTableName = new MyListener();
|
||||
const listener = new MyListener();
|
||||
|
||||
await impala.listen(listenTableName as ParseTreeListener, parseTree);
|
||||
impala.listen(listener, parseTree);
|
||||
expect(result).toBe(expectTableName);
|
||||
});
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
import { ImpalaSQL } from 'src/parser/impala';
|
||||
import { AbstractParseTreeVisitor } from 'antlr4ng';
|
||||
import { ImpalaSqlParserVisitor } from 'src/lib/impala/ImpalaSqlParserVisitor';
|
||||
|
||||
describe('impala SQL Visitor Tests', () => {
|
||||
@ -12,20 +11,22 @@ describe('impala SQL Visitor Tests', () => {
|
||||
});
|
||||
|
||||
test('Visitor visitTableNamePath', () => {
|
||||
let result = '';
|
||||
class MyVisitor
|
||||
extends AbstractParseTreeVisitor<any>
|
||||
implements ImpalaSqlParserVisitor<any>
|
||||
{
|
||||
protected defaultResult() {
|
||||
return result;
|
||||
class MyVisitor extends ImpalaSqlParserVisitor<string> {
|
||||
defaultResult(): string {
|
||||
return '';
|
||||
}
|
||||
visitTableNamePath = (ctx): void => {
|
||||
result = ctx.getText().toLowerCase();
|
||||
aggregateResult(aggregate: string, nextResult: string): string {
|
||||
return aggregate + nextResult;
|
||||
}
|
||||
visitProgram = (ctx) => {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
visitTableNamePath = (ctx) => {
|
||||
return ctx.getText().toLowerCase();
|
||||
};
|
||||
}
|
||||
const visitor: any = new MyVisitor();
|
||||
visitor.visit(parseTree);
|
||||
const visitor = new MyVisitor();
|
||||
const result = visitor.visit(parseTree);
|
||||
|
||||
expect(result).toBe(expectTableName);
|
||||
});
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { MySQL } from 'src/parser/mysql';
|
||||
import { MySqlParserListener } from 'src/lib/mysql/MySqlParserListener';
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
|
||||
describe('MySQL Listener Tests', () => {
|
||||
const expectTableName = 'user1';
|
||||
@ -10,20 +9,17 @@ describe('MySQL Listener Tests', () => {
|
||||
const parseTree = mysql.parse(sql);
|
||||
|
||||
test('Listener enterTableName', async () => {
|
||||
let result = '';
|
||||
class MyListener implements MySqlParserListener {
|
||||
enterTableName = (ctx): void => {
|
||||
result = ctx.getText().toLowerCase();
|
||||
};
|
||||
visitTerminal() {}
|
||||
visitErrorNode() {}
|
||||
enterEveryRule() {}
|
||||
exitEveryRule() {}
|
||||
}
|
||||
const listenTableName: any = new MyListener();
|
||||
class MyListener extends MySqlParserListener {
|
||||
result = '';
|
||||
|
||||
await mysql.listen(listenTableName as ParseTreeListener, parseTree);
|
||||
expect(result).toBe(expectTableName);
|
||||
enterTableName = (ctx): void => {
|
||||
this.result = ctx.getText().toLowerCase();
|
||||
};
|
||||
}
|
||||
const listener = new MyListener();
|
||||
|
||||
mysql.listen(listener, parseTree);
|
||||
expect(listener.result).toBe(expectTableName);
|
||||
});
|
||||
|
||||
test('Split sql listener', async () => {
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { MySQL } from 'src/parser/mysql';
|
||||
import { MySqlParserVisitor } from 'src/lib/mysql/MySqlParserVisitor';
|
||||
import { AbstractParseTreeVisitor } from 'antlr4ng';
|
||||
|
||||
describe('MySQL Visitor Tests', () => {
|
||||
const expectTableName = 'user1';
|
||||
@ -12,18 +11,22 @@ describe('MySQL Visitor Tests', () => {
|
||||
});
|
||||
|
||||
test('Visitor visitTableName', () => {
|
||||
let result = '';
|
||||
class MyVisitor extends AbstractParseTreeVisitor<any> implements MySqlParserVisitor<any> {
|
||||
protected defaultResult() {
|
||||
return result;
|
||||
class MyVisitor extends MySqlParserVisitor<string> {
|
||||
defaultResult(): string {
|
||||
return '';
|
||||
}
|
||||
|
||||
visitTableName = (ctx): void => {
|
||||
result = ctx.getText().toLowerCase();
|
||||
aggregateResult(aggregate: string, nextResult: string): string {
|
||||
return aggregate + nextResult;
|
||||
}
|
||||
visitProgram = (ctx) => {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
visitTableName = (ctx) => {
|
||||
return ctx.getText().toLowerCase();
|
||||
};
|
||||
}
|
||||
const visitor = new MyVisitor();
|
||||
visitor.visit(parseTree);
|
||||
const result = visitor.visit(parseTree);
|
||||
|
||||
expect(result).toBe(expectTableName);
|
||||
});
|
||||
|
@ -1,5 +1,4 @@
|
||||
import { PLSQL } from 'src/parser/plsql';
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
import { PlSqlParserListener } from 'src/lib/plsql/PlSqlParserListener';
|
||||
|
||||
describe('PLSQL Listener Tests', () => {
|
||||
@ -10,19 +9,16 @@ describe('PLSQL Listener Tests', () => {
|
||||
const parseTree = plsql.parse(sql);
|
||||
|
||||
test('Listener enterTableName', async () => {
|
||||
let result = '';
|
||||
class MyListener implements PlSqlParserListener {
|
||||
enterTable_ref_list = (ctx): void => {
|
||||
result = ctx.getText().toLowerCase();
|
||||
};
|
||||
visitTerminal() {}
|
||||
visitErrorNode() {}
|
||||
enterEveryRule() {}
|
||||
exitEveryRule() {}
|
||||
}
|
||||
const listenTableName = new MyListener();
|
||||
class MyListener extends PlSqlParserListener {
|
||||
result = '';
|
||||
|
||||
await plsql.listen(listenTableName as ParseTreeListener, parseTree);
|
||||
expect(result).toBe(expectTableName);
|
||||
enterTable_ref_list = (ctx) => {
|
||||
this.result = ctx.getText().toLowerCase();
|
||||
};
|
||||
}
|
||||
const listener = new MyListener();
|
||||
|
||||
plsql.listen(listener, parseTree);
|
||||
expect(listener.result).toBe(expectTableName);
|
||||
});
|
||||
});
|
||||
|
@ -1,5 +1,4 @@
|
||||
import { PLSQL } from 'src/parser/plsql';
|
||||
import { AbstractParseTreeVisitor } from 'antlr4ng';
|
||||
import { PlSqlParserVisitor } from 'src/lib/plsql/PlSqlParserVisitor';
|
||||
|
||||
describe('PLSQL Visitor Tests', () => {
|
||||
@ -10,17 +9,22 @@ describe('PLSQL Visitor Tests', () => {
|
||||
const parseTree = plsql.parse(sql);
|
||||
|
||||
test('Visitor visitTable_ref_list', () => {
|
||||
let result = '';
|
||||
class MyVisitor extends AbstractParseTreeVisitor<any> implements PlSqlParserVisitor<any> {
|
||||
protected defaultResult() {
|
||||
return result;
|
||||
class MyVisitor extends PlSqlParserVisitor<string> {
|
||||
defaultResult(): string {
|
||||
return '';
|
||||
}
|
||||
visitTable_ref_list = (ctx): void => {
|
||||
result = ctx.getText().toLowerCase();
|
||||
aggregateResult(aggregate: string, nextResult: string): string {
|
||||
return aggregate + nextResult;
|
||||
}
|
||||
visitProgram = (ctx) => {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
visitTable_ref_list = (ctx) => {
|
||||
return ctx.getText().toLowerCase();
|
||||
};
|
||||
}
|
||||
const visitor: any = new MyVisitor();
|
||||
visitor.visit(parseTree);
|
||||
const visitor = new MyVisitor();
|
||||
const result = visitor.visit(parseTree);
|
||||
|
||||
expect(result).toBe(expectTableName);
|
||||
});
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { PostgreSQL } from 'src/parser/postgresql';
|
||||
import { PostgreSqlParserListener } from 'src/lib/postgresql/PostgreSqlParserListener';
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
|
||||
describe('PostgreSQL Listener Tests', () => {
|
||||
const expectTableName = 'user1';
|
||||
@ -10,20 +9,16 @@ describe('PostgreSQL Listener Tests', () => {
|
||||
const parseTree = postgresql.parse(sql);
|
||||
|
||||
test('Listener enterTableName', async () => {
|
||||
let result = '';
|
||||
class MyListener implements PostgreSqlParserListener {
|
||||
enterTable_ref(ctx) {
|
||||
result = ctx.getText().toLowerCase();
|
||||
}
|
||||
visitTerminal() {}
|
||||
visitErrorNode() {}
|
||||
enterEveryRule() {}
|
||||
exitEveryRule() {}
|
||||
class MyListener extends PostgreSqlParserListener {
|
||||
result = '';
|
||||
enterTable_ref = (ctx) => {
|
||||
this.result = ctx.getText().toLowerCase();
|
||||
};
|
||||
}
|
||||
const listenTableName = new MyListener();
|
||||
const listener = new MyListener();
|
||||
|
||||
await postgresql.listen(listenTableName as ParseTreeListener, parseTree);
|
||||
expect(result).toBe(expectTableName);
|
||||
postgresql.listen(listener, parseTree);
|
||||
expect(listener.result).toBe(expectTableName);
|
||||
});
|
||||
|
||||
test('Split sql listener', async () => {
|
||||
|
@ -1,5 +1,4 @@
|
||||
import { PostgreSQL } from 'src/parser/postgresql';
|
||||
import { AbstractParseTreeVisitor } from 'antlr4ng';
|
||||
import { PostgreSqlParserVisitor } from 'src/lib/postgresql/PostgreSqlParserVisitor';
|
||||
|
||||
describe('MySQL Visitor Tests', () => {
|
||||
@ -12,21 +11,22 @@ describe('MySQL Visitor Tests', () => {
|
||||
});
|
||||
|
||||
test('Visitor visitTableName', () => {
|
||||
let result = '';
|
||||
class MyVisitor
|
||||
extends AbstractParseTreeVisitor<any>
|
||||
implements PostgreSqlParserVisitor<any>
|
||||
{
|
||||
protected defaultResult() {
|
||||
return result;
|
||||
class MyVisitor extends PostgreSqlParserVisitor<string> {
|
||||
defaultResult(): string {
|
||||
return '';
|
||||
}
|
||||
|
||||
visitTable_ref(ctx) {
|
||||
result = ctx.getText().toLowerCase();
|
||||
aggregateResult(aggregate: string, nextResult: string): string {
|
||||
return aggregate + nextResult;
|
||||
}
|
||||
visitProgram = (ctx) => {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
visitTable_ref = (ctx) => {
|
||||
return ctx.getText().toLowerCase();
|
||||
};
|
||||
}
|
||||
const visitor: any = new MyVisitor();
|
||||
visitor.visit(parseTree);
|
||||
const visitor = new MyVisitor();
|
||||
const result = visitor.visit(parseTree);
|
||||
|
||||
expect(result).toBe(expectTableName);
|
||||
});
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { SparkSqlParserListener } from 'src/lib/spark/SparkSqlParserListener';
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
|
||||
describe('Spark SQL Listener Tests', () => {
|
||||
const expectTableName = 'user1';
|
||||
@ -10,20 +9,16 @@ describe('Spark SQL Listener Tests', () => {
|
||||
const parseTree = spark.parse(sql);
|
||||
|
||||
test('Listener exitTableName', () => {
|
||||
let result = '';
|
||||
class MyListener implements SparkSqlParserListener {
|
||||
class MyListener extends SparkSqlParserListener {
|
||||
result = '';
|
||||
exitTableName = (ctx): void => {
|
||||
result = ctx.getText().toLowerCase();
|
||||
this.result = ctx.getText().toLowerCase();
|
||||
};
|
||||
visitTerminal() {}
|
||||
visitErrorNode() {}
|
||||
enterEveryRule() {}
|
||||
exitEveryRule() {}
|
||||
}
|
||||
const listenTableName = new MyListener();
|
||||
const listener = new MyListener();
|
||||
|
||||
spark.listen(listenTableName as ParseTreeListener, parseTree);
|
||||
expect(result).toBe(expectTableName);
|
||||
spark.listen(listener, parseTree);
|
||||
expect(listener.result).toBe(expectTableName);
|
||||
});
|
||||
|
||||
test('Split sql listener', async () => {
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { SparkSqlParserVisitor } from 'src/lib/spark/SparkSqlParserVisitor';
|
||||
import { AbstractParseTreeVisitor } from 'antlr4ng';
|
||||
|
||||
describe('Spark SQL Visitor Tests', () => {
|
||||
const expectTableName = 'user1';
|
||||
@ -12,21 +11,23 @@ describe('Spark SQL Visitor Tests', () => {
|
||||
});
|
||||
|
||||
test('Visitor visitTableName', () => {
|
||||
class MyVisitor
|
||||
extends AbstractParseTreeVisitor<any>
|
||||
implements SparkSqlParserVisitor<any>
|
||||
{
|
||||
result: string = '';
|
||||
protected defaultResult() {
|
||||
return this.result;
|
||||
class MyVisitor extends SparkSqlParserVisitor<string> {
|
||||
defaultResult(): string {
|
||||
return '';
|
||||
}
|
||||
visitTableName = (ctx): void => {
|
||||
this.result = ctx.getText().toLowerCase();
|
||||
aggregateResult(aggregate: string, nextResult: string): string {
|
||||
return aggregate + nextResult;
|
||||
}
|
||||
visitProgram = (ctx) => {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
visitTableName = (ctx) => {
|
||||
return ctx.getText().toLowerCase();
|
||||
};
|
||||
}
|
||||
const visitor = new MyVisitor();
|
||||
visitor.visit(parseTree);
|
||||
const result = visitor.visit(parseTree);
|
||||
|
||||
expect(visitor.result).toBe(expectTableName);
|
||||
expect(result).toBe(expectTableName);
|
||||
});
|
||||
});
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { TrinoSQL } from 'src/parser/trino';
|
||||
import { TrinoSqlListener } from 'src/lib/trino/TrinoSqlListener';
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
|
||||
describe('trino SQL Listener Tests', () => {
|
||||
const expectTableName = 'user1';
|
||||
@ -10,20 +9,16 @@ describe('trino SQL Listener Tests', () => {
|
||||
const parseTree = trino.parse(sql);
|
||||
|
||||
test('Listener enterTableName', async () => {
|
||||
let result = '';
|
||||
class MyListener implements TrinoSqlListener {
|
||||
class MyListener extends TrinoSqlListener {
|
||||
result = '';
|
||||
enterTableName = (ctx): void => {
|
||||
result = ctx.getText().toLowerCase();
|
||||
this.result = ctx.getText().toLowerCase();
|
||||
};
|
||||
visitTerminal() {}
|
||||
visitErrorNode() {}
|
||||
enterEveryRule() {}
|
||||
exitEveryRule() {}
|
||||
}
|
||||
const listenTableName = new MyListener();
|
||||
const listener = new MyListener();
|
||||
|
||||
await trino.listen(listenTableName as ParseTreeListener, parseTree);
|
||||
expect(result).toBe(expectTableName);
|
||||
trino.listen(listener, parseTree);
|
||||
expect(listener.result).toBe(expectTableName);
|
||||
});
|
||||
|
||||
test('Split sql listener', async () => {
|
||||
|
@ -1,6 +1,5 @@
|
||||
import { TrinoSQL } from 'src/parser/trino';
|
||||
import { TrinoSqlVisitor } from 'src/lib/trino/TrinoSqlVisitor';
|
||||
import { AbstractParseTreeVisitor } from 'antlr4ng';
|
||||
|
||||
describe('trino SQL Visitor Tests', () => {
|
||||
const expectTableName = 'user1';
|
||||
@ -12,17 +11,22 @@ describe('trino SQL Visitor Tests', () => {
|
||||
});
|
||||
|
||||
test('Visitor visitTableName', () => {
|
||||
let result = '';
|
||||
class MyVisitor extends AbstractParseTreeVisitor<any> implements TrinoSqlVisitor<any> {
|
||||
protected defaultResult() {
|
||||
return result;
|
||||
class MyVisitor extends TrinoSqlVisitor<string> {
|
||||
defaultResult(): string {
|
||||
return '';
|
||||
}
|
||||
visitTableName = (ctx): void => {
|
||||
result = ctx.getText().toLowerCase();
|
||||
aggregateResult(aggregate: string, nextResult: string): string {
|
||||
return aggregate + nextResult;
|
||||
}
|
||||
visitProgram = (ctx) => {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
visitTableName = (ctx) => {
|
||||
return ctx.getText().toLowerCase();
|
||||
};
|
||||
}
|
||||
const visitor: any = new MyVisitor();
|
||||
visitor.visit(parseTree);
|
||||
const visitor = new MyVisitor();
|
||||
const result = visitor.visit(parseTree);
|
||||
|
||||
expect(result).toBe(expectTableName);
|
||||
});
|
||||
|
@ -3,6 +3,13 @@
|
||||
"compilerOptions": {
|
||||
"baseUrl": "../",
|
||||
"noEmit": true,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"strictNullChecks": false,
|
||||
"noUnusedLocals": true,
|
||||
"noImplicitAny": false,
|
||||
"noImplicitOverride": false,
|
||||
"noImplicitReturns": true,
|
||||
"noImplicitThis": true,
|
||||
"paths": {
|
||||
"src/*": ["src/*"],
|
||||
"test/*": ["test/*"]
|
||||
|
@ -7,10 +7,17 @@
|
||||
"module": "ESNext",
|
||||
"moduleResolution": "node",
|
||||
"declaration": true,
|
||||
"noUnusedLocals": false,
|
||||
"noUnusedParameters": false,
|
||||
"allowSyntheticDefaultImports": true,
|
||||
"esModuleInterop": true,
|
||||
"strictNullChecks": true,
|
||||
"noUnusedLocals": true,
|
||||
"noImplicitAny": true,
|
||||
"noImplicitOverride": true,
|
||||
"noImplicitReturns": true,
|
||||
"noImplicitThis": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"lib": [
|
||||
"ESNext",
|
||||
"DOM"
|
||||
],
|
||||
"skipLibCheck": true,
|
||||
"types": [
|
||||
"node",
|
||||
@ -19,7 +26,6 @@
|
||||
"typeRoots": [
|
||||
"node",
|
||||
"node_modules/@types",
|
||||
"./src/typings"
|
||||
]
|
||||
},
|
||||
"isolatedModules": true,
|
||||
|
Loading…
Reference in New Issue
Block a user