chroe: devops (#180)

* ci: add dependencies about lint tool

* ci: replace eslint with prettier

* ci: add husky, cz and commitlint

* style: lint fix via prettier

* ci: add prettier and check-types to github workflow

'
This commit is contained in:
Hayden 2023-10-13 11:16:36 +08:00 committed by GitHub
parent 4d1dfa676f
commit 7de192d486
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
105 changed files with 2615 additions and 1823 deletions

3
.czrc Normal file
View File

@ -0,0 +1,3 @@
{
"path": "./node_modules/@commitlint/cz-commitlint"
}

View File

@ -1,37 +0,0 @@
module.exports = {
'env': {
'browser': true,
'es6': true,
},
'extends': [
'google',
],
'globals': {
'expect': 'readable',
'test': 'readable',
'describe': 'readable',
'beforeEach': 'readable',
'afterEach': 'readable',
'jest': 'readable',
'Atomics': 'readonly',
'SharedArrayBuffer': 'readonly',
},
'parser': '@typescript-eslint/parser',
'parserOptions': {
'ecmaFeatures': {
},
'ecmaVersion': 11,
'sourceType': 'module',
},
'plugins': [
'@typescript-eslint',
],
'rules': {
'indent': ['error', 4],
'object-curly-spacing': ['error', 'always'],
'max-len': 0,
'require-jsdoc': 0,
'valid-jsdoc': 0,
'no-unused-vars': 0,
},
};

View File

@ -43,7 +43,50 @@ jobs:
- name: install - name: install
if: steps.node_modules_cache_id.outputs.cache-hit != 'true' if: steps.node_modules_cache_id.outputs.cache-hit != 'true'
run: yarn run: yarn
prettier:
runs-on: ubuntu-latest
needs: [setup]
steps:
- uses: actions/checkout@v2
- name: Restore cache from yarn.lock
uses: actions/cache@v2
with:
path: package-temp-dir
key: lock-${{ github.sha }}
- name: Restore cache from node_modules
uses: actions/cache@v2
with:
path: node_modules
key: node_modules-${{ hashFiles('**/package-temp-dir/yarn.lock') }}
- name: Run prettier lint
run: |
export NODE_OPTIONS="--max_old_space_size=4096"
yarn lint
check-types:
runs-on: ubuntu-latest
needs: [setup]
steps:
- uses: actions/checkout@v2
- name: Restore cache from yarn.lock
uses: actions/cache@v2
with:
path: package-temp-dir
key: lock-${{ github.sha }}
- name: Restore cache from node_modules
uses: actions/cache@v2
with:
path: node_modules
key: node_modules-${{ hashFiles('**/package-temp-dir/yarn.lock') }}
- name: Run tsc check
run: |
export NODE_OPTIONS="--max_old_space_size=4096"
yarn check-types
test: test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [setup] needs: [setup]

6
.husky/commit-msg Executable file
View File

@ -0,0 +1,6 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
echo 'commitlint'
npx --no -- commitlint --edit

4
.husky/pre-commit Executable file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
npx lint-staged

3
.lintstagedrc.js Normal file
View File

@ -0,0 +1,3 @@
module.exports = {
'*.js|ts': [`prettier --write`],
};

7
.prettierignore Normal file
View File

@ -0,0 +1,7 @@
**/*.md
**/*.ejs
**/package.json
**/lib
**/dist
node_modules
coverage

10
.prettierrc Normal file
View File

@ -0,0 +1,10 @@
{
"semi": true,
"tabWidth": 4,
"printWidth": 100,
"singleQuote": true,
"useTabs": false,
"bracketSpacing": true,
"arrowParens": "always",
"trailingComma": "es5"
}

12
commitlint.config.js Normal file
View File

@ -0,0 +1,12 @@
module.exports = {
extends: ['@commitlint/config-conventional'],
rules: {
'type-enum': [
2,
'always',
['feat', 'fix', 'docs', 'style', 'refactor', 'test', 'build', 'ci', 'chore'],
],
'scope-case': [0, 'always'],
'scope-empty': [0, 'always'],
},
};

View File

@ -18,28 +18,33 @@
"dist" "dist"
], ],
"scripts": { "scripts": {
"prepare": "husky install",
"antlr4": "node ./scripts/antlr4.js", "antlr4": "node ./scripts/antlr4.js",
"build": "rm -rf dist && tsc", "build": "rm -rf dist && tsc",
"eslint": "eslint ./src/**/*.ts", "check-types": "tsc -p ./tsconfig.check.json",
"check-types": "tsc --skipLibCheck",
"test": "NODE_OPTIONS=--max_old_space_size=4096 && jest", "test": "NODE_OPTIONS=--max_old_space_size=4096 && jest",
"release": "npm run build && node ./scripts/release.js" "release": "npm run build && node ./scripts/release.js",
"lint": "prettier --check '**/*.ts' --config ./.prettierrc",
"lint-fix": "prettier --write '**/*.ts' --config ./.prettierrc"
}, },
"author": "dt-insight-front", "author": "dt-insight-front",
"license": "MIT", "license": "MIT",
"devDependencies": { "devDependencies": {
"@commitlint/cli": "^17.7.2",
"@commitlint/config-conventional": "^17.7.0",
"@commitlint/cz-commitlint": "^17.7.2",
"@swc/core": "^1.3.60", "@swc/core": "^1.3.60",
"@swc/jest": "^0.2.26", "@swc/jest": "^0.2.26",
"@types/jest": "^29.5.1", "@types/jest": "^29.5.1",
"@types/node": "^18.15.11", "@types/node": "^18.15.11",
"@typescript-eslint/eslint-plugin": "^3.10.1",
"@typescript-eslint/parser": "^3.10.1",
"antlr4ts-cli": "^0.5.0-alpha.4", "antlr4ts-cli": "^0.5.0-alpha.4",
"chalk": "4.1.2", "chalk": "4.1.2",
"eslint": "^7.32.0", "commitizen": "^4.3.0",
"eslint-config-google": "^0.14.0", "husky": "^8.0.3",
"inquirer": "^8.2.2", "inquirer": "^8.2.2",
"jest": "^29.5.0", "jest": "^29.5.0",
"lint-staged": "12.5.0",
"prettier": "^3.0.3",
"standard-version": "^9.5.0", "standard-version": "^9.5.0",
"typescript": "^5.0.4", "typescript": "^5.0.4",
"yargs-parser": "^21.1.1" "yargs-parser": "^21.1.1"

File diff suppressed because it is too large Load Diff

View File

@ -16,6 +16,5 @@ export * from './lib/trinosql/TrinoSqlListener';
export * from './lib/trinosql/TrinoSqlVisitor'; export * from './lib/trinosql/TrinoSqlVisitor';
export { SyntaxContextType } from './parser/common/basic-parser-types'; export { SyntaxContextType } from './parser/common/basic-parser-types';
export type * from './parser/common/basic-parser-types'; export type * from './parser/common/basic-parser-types';
export type { SyntaxError, ParserError } from './parser/common/parserErrorListener'; export type { SyntaxError, ParserError } from './parser/common/parserErrorListener';

View File

@ -30,12 +30,10 @@ export default abstract class PostgreSQLLexerBase extends Lexer {
return this._input; return this._input;
} }
checkLA( c) { checkLA( c) {
// eslint-disable-next-line new-cap
return this.getInputStream().LA(1) !== c; return this.getInputStream().LA(1) !== c;
} }
charIsLetter() { charIsLetter() {
// eslint-disable-next-line new-cap
return isLetter(this.getInputStream().LA(-1)); return isLetter(this.getInputStream().LA(-1));
} }
@ -53,11 +51,9 @@ export default abstract class PostgreSQLLexerBase extends Lexer {
} }
UnterminatedBlockCommentDebugAssert() { UnterminatedBlockCommentDebugAssert() {
// Debug.Assert(InputStream.LA(1) == -1 /*EOF*/);
} }
CheckIfUtf32Letter() { CheckIfUtf32Letter() {
// eslint-disable-next-line new-cap
let codePoint = this.getInputStream().LA(-2) << 8 + this.getInputStream().LA(-1); let codePoint = this.getInputStream().LA(-2) << 8 + this.getInputStream().LA(-1);
let c; let c;
if (codePoint < 0x10000) { if (codePoint < 0x10000) {

View File

@ -1,9 +1,7 @@
/* eslint-disable new-cap,camelcase */
import { CharStreams, CommonTokenStream, Parser } from 'antlr4ts'; import { CharStreams, CommonTokenStream, Parser } from 'antlr4ts';
import { PostgreSQLLexer } from './PostgreSQLLexer'; import { PostgreSQLLexer } from './PostgreSQLLexer';
import { PostgreSQLParser } from './PostgreSQLParser'; import { PostgreSQLParser } from './PostgreSQLParser';
// @ts-ignore
export default abstract class PostgreSQLParserBase extends Parser { export default abstract class PostgreSQLParserBase extends Parser {
constructor( input) { constructor( input) {
super(input); super(input);
@ -32,16 +30,13 @@ export default abstract class PostgreSQLParserBase extends Parser {
} }
} }
if (!lang) return; if (!lang) return;
// eslint-disable-next-line camelcase
let func_as = null; let func_as = null;
for (const a of _localctx.createfunc_opt_item()) { for (const a of _localctx.createfunc_opt_item()) {
if (!a.func_as()) { if (!a.func_as()) {
// eslint-disable-next-line camelcase
func_as = a; func_as = a;
break; break;
} }
} }
// eslint-disable-next-line camelcase
if (!func_as) { if (!func_as) {
const txt = this.GetRoutineBodyString(func_as.func_as().sconst(0)); const txt = this.GetRoutineBodyString(func_as.func_as().sconst(0));
const line = func_as.func_as().sconst(0).start.getLine(); const line = func_as.func_as().sconst(0).start.getLine();
@ -76,7 +71,6 @@ export default abstract class PostgreSQLParserBase extends Parser {
GetRoutineBodyString( rule) { GetRoutineBodyString( rule) {
const anysconst = rule.anysconst(); const anysconst = rule.anysconst();
// eslint-disable-next-line new-cap
const StringConstant = anysconst.StringConstant(); const StringConstant = anysconst.StringConstant();
if (null !== StringConstant) return this.unquote(this.TrimQuotes(StringConstant.getText())); if (null !== StringConstant) return this.unquote(this.TrimQuotes(StringConstant.getText()));
const UnicodeEscapeStringConstant = anysconst.UnicodeEscapeStringConstant(); const UnicodeEscapeStringConstant = anysconst.UnicodeEscapeStringConstant();

View File

@ -78,4 +78,4 @@ export interface TextSlice {
startColumn: number; startColumn: number;
endColumn: number; endColumn: number;
text: string; text: string;
} }

View File

@ -1,21 +1,21 @@
import { import {
Parser, Parser,
Lexer, Lexer,
Token, Token,
CharStreams, CharStreams,
CommonTokenStream, CommonTokenStream,
CodePointCharStream, CodePointCharStream,
ParserRuleContext ParserRuleContext,
} from 'antlr4ts'; } from 'antlr4ts';
import { ParseTreeWalker, ParseTreeListener } from 'antlr4ts/tree'; import { ParseTreeWalker, ParseTreeListener } from 'antlr4ts/tree';
import { CandidatesCollection, CodeCompletionCore } from 'antlr4-c3'; import { CandidatesCollection, CodeCompletionCore } from 'antlr4-c3';
import { findCaretTokenIndex } from '../../utils/findCaretTokenIndex'; import { findCaretTokenIndex } from '../../utils/findCaretTokenIndex';
import { import {
CaretPosition, CaretPosition,
Suggestions, Suggestions,
SyntaxSuggestion, SyntaxSuggestion,
WordRange, WordRange,
TextSlice TextSlice,
} from './basic-parser-types'; } from './basic-parser-types';
import ParserErrorListener, { import ParserErrorListener, {
ParserError, ParserError,
@ -36,10 +36,10 @@ interface SplitListener extends ParseTreeListener {
* Custom Parser class, subclass needs extends it. * Custom Parser class, subclass needs extends it.
*/ */
export default abstract class BasicParser< export default abstract class BasicParser<
L extends Lexer = Lexer, L extends Lexer = Lexer,
PRC extends ParserRuleContext = ParserRuleContext, PRC extends ParserRuleContext = ParserRuleContext,
P extends IParser<PRC> = IParser<PRC> P extends IParser<PRC> = IParser<PRC>,
> { > {
protected _charStreams: CodePointCharStream; protected _charStreams: CodePointCharStream;
protected _lexer: L; protected _lexer: L;
protected _tokenStream: CommonTokenStream; protected _tokenStream: CommonTokenStream;
@ -52,7 +52,7 @@ export default abstract class BasicParser<
* PreferredRules for antlr4-c3 * PreferredRules for antlr4-c3
*/ */
protected abstract preferredRules: Set<number>; protected abstract preferredRules: Set<number>;
/** /**
* Create a antrl4 Lexer instance * Create a antrl4 Lexer instance
* @param input source string * @param input source string
@ -64,26 +64,26 @@ export default abstract class BasicParser<
* @param tokenStream CommonTokenStream * @param tokenStream CommonTokenStream
*/ */
protected abstract createParserFromTokenStream(tokenStream: CommonTokenStream): P; protected abstract createParserFromTokenStream(tokenStream: CommonTokenStream): P;
/** /**
* Convert candidates to suggestions * Convert candidates to suggestions
* @param candidates candidate list * @param candidates candidate list
* @param allTokens all tokens from input * @param allTokens all tokens from input
* @param caretTokenIndex tokenIndex of caretPosition * @param caretTokenIndex tokenIndex of caretPosition
* @param tokenIndexOffset offset of the tokenIndex in the candidates * @param tokenIndexOffset offset of the tokenIndex in the candidates
* compared to the tokenIndex in allTokens * compared to the tokenIndex in allTokens
*/ */
protected abstract processCandidates( protected abstract processCandidates(
candidates: CandidatesCollection, candidates: CandidatesCollection,
allTokens: Token[], allTokens: Token[],
caretTokenIndex: number, caretTokenIndex: number,
tokenIndexOffset: number, tokenIndexOffset: number
): Suggestions<Token>; ): Suggestions<Token>;
/** /**
* Get splitListener instance. * Get splitListener instance.
*/ */
protected abstract get splitListener (): SplitListener; protected abstract get splitListener(): SplitListener;
/** /**
* Create an anltr4 lexer from input. * Create an anltr4 lexer from input.
@ -92,8 +92,8 @@ export default abstract class BasicParser<
public createLexer(input: string) { public createLexer(input: string) {
const charStreams = CharStreams.fromString(input.toUpperCase()); const charStreams = CharStreams.fromString(input.toUpperCase());
const lexer = this.createLexerFormCharStream(charStreams); const lexer = this.createLexerFormCharStream(charStreams);
return lexer; return lexer;
} }
/** /**
@ -104,7 +104,7 @@ export default abstract class BasicParser<
const lexer = this.createLexer(input); const lexer = this.createLexer(input);
const tokenStream = new CommonTokenStream(lexer); const tokenStream = new CommonTokenStream(lexer);
const parser = this.createParserFromTokenStream(tokenStream); const parser = this.createParserFromTokenStream(tokenStream);
return parser; return parser;
} }
/** /**
@ -119,11 +119,11 @@ export default abstract class BasicParser<
this._tokenStream = new CommonTokenStream(this._lexer); this._tokenStream = new CommonTokenStream(this._lexer);
this._tokenStream.fill(); this._tokenStream.fill();
this._parser = this.createParserFromTokenStream(this._tokenStream); this._parser = this.createParserFromTokenStream(this._tokenStream);
this._parser.buildParseTree = true; this._parser.buildParseTree = true;
return this._parser return this._parser;
} }
/** /**
@ -134,13 +134,10 @@ export default abstract class BasicParser<
* @param errorListener listen errors * @param errorListener listen errors
* @returns parserTree * @returns parserTree
*/ */
public parse( public parse(input: string, errorListener?: ErrorHandler<any>) {
input: string,
errorListener?: ErrorHandler<any>
) {
// Avoid parsing the same input repeatedly. // Avoid parsing the same input repeatedly.
if(this._parsedInput === input && !errorListener) { if (this._parsedInput === input && !errorListener) {
return; return this._parserTree;
} }
const parser = this.createParserWithCache(input); const parser = this.createParserWithCache(input);
@ -150,12 +147,12 @@ export default abstract class BasicParser<
this._errorCollector.clear(); this._errorCollector.clear();
parser.addErrorListener(this._errorCollector); parser.addErrorListener(this._errorCollector);
if(errorListener) { if (errorListener) {
parser.addErrorListener(new ParserErrorListener(errorListener)); parser.addErrorListener(new ParserErrorListener(errorListener));
} }
this._parserTree = parser.program(); this._parserTree = parser.program();
return this._parserTree; return this._parserTree;
} }
@ -178,11 +175,11 @@ export default abstract class BasicParser<
public getAllTokens(input: string): Token[] { public getAllTokens(input: string): Token[] {
this.parse(input); this.parse(input);
let allTokens = this._tokenStream.getTokens(); let allTokens = this._tokenStream.getTokens();
if(allTokens[allTokens.length - 1].text === '<EOF>') { if (allTokens[allTokens.length - 1].text === '<EOF>') {
allTokens = allTokens.slice(0, -1) allTokens = allTokens.slice(0, -1);
} }
return allTokens return allTokens;
}; }
/** /**
* It convert tree to string, it's convenient to use in unit test. * It convert tree to string, it's convenient to use in unit test.
* @param string input * @param string input
@ -204,7 +201,10 @@ export default abstract class BasicParser<
* @param listener Listener instance extends ParserListener * @param listener Listener instance extends ParserListener
* @param parserTree parser Tree * @param parserTree parser Tree
*/ */
public listen<PTL extends ParseTreeListener = ParseTreeListener>(listener: PTL, parserTree: PRC) { public listen<PTL extends ParseTreeListener = ParseTreeListener>(
listener: PTL,
parserTree: PRC
) {
ParseTreeWalker.DEFAULT.walk(listener, parserTree); ParseTreeWalker.DEFAULT.walk(listener, parserTree);
} }
@ -217,8 +217,8 @@ export default abstract class BasicParser<
this.parse(input); this.parse(input);
const splitListener = this.splitListener; const splitListener = this.splitListener;
this.listen(splitListener, this._parserTree); this.listen(splitListener, this._parserTree);
const res = splitListener.statementsContext.map(context => { const res = splitListener.statementsContext.map((context) => {
const { start, stop } = context; const { start, stop } = context;
return { return {
startIndex: start.startIndex, startIndex: start.startIndex,
@ -228,8 +228,8 @@ export default abstract class BasicParser<
startColumn: start.charPositionInLine + 1, startColumn: start.charPositionInLine + 1,
endColumn: stop.charPositionInLine + stop.text.length, endColumn: stop.charPositionInLine + stop.text.length,
text: this._parsedInput.slice(start.startIndex, stop.stopIndex + 1), text: this._parsedInput.slice(start.startIndex, stop.stopIndex + 1),
} };
}) });
return res; return res;
} }
@ -240,10 +240,13 @@ export default abstract class BasicParser<
* @param caretPosition caret position, such as cursor position * @param caretPosition caret position, such as cursor position
* @returns suggestion * @returns suggestion
*/ */
public getSuggestionAtCaretPosition(input: string, caretPosition: CaretPosition): Suggestions | null { public getSuggestionAtCaretPosition(
input: string,
caretPosition: CaretPosition
): Suggestions | null {
const splitListener = this.splitListener; const splitListener = this.splitListener;
// TODO: add splitListener to all sqlParser implements add remove following if // TODO: add splitListener to all sqlParser implements add remove following if
if(!splitListener) return null; if (!splitListener) return null;
this.parse(input); this.parse(input);
let sqlParserIns = this._parser; let sqlParserIns = this._parser;
@ -252,8 +255,8 @@ export default abstract class BasicParser<
let c3Context: ParserRuleContext = this._parserTree; let c3Context: ParserRuleContext = this._parserTree;
let tokenIndexOffset: number = 0; let tokenIndexOffset: number = 0;
if(!caretTokenIndex && caretTokenIndex !== 0) return null; if (!caretTokenIndex && caretTokenIndex !== 0) return null;
/** /**
* Split sql by statement. * Split sql by statement.
* Try to collect candidates from the caret statement only. * Try to collect candidates from the caret statement only.
@ -263,16 +266,19 @@ export default abstract class BasicParser<
// If there are multiple statements. // If there are multiple statements.
if (splitListener.statementsContext.length > 1) { if (splitListener.statementsContext.length > 1) {
// find statement rule context where caretPosition is located. // find statement rule context where caretPosition is located.
const caretStatementContext = splitListener?.statementsContext.find(ctx => { const caretStatementContext = splitListener?.statementsContext.find((ctx) => {
return caretTokenIndex <= ctx.stop?.tokenIndex && caretTokenIndex >= ctx.start.tokenIndex; return (
caretTokenIndex <= ctx.stop?.tokenIndex &&
caretTokenIndex >= ctx.start.tokenIndex
);
}); });
if(caretStatementContext) { if (caretStatementContext) {
c3Context = caretStatementContext c3Context = caretStatementContext;
} else { } else {
const lastStatementToken= splitListener const lastStatementToken =
.statementsContext[splitListener?.statementsContext.length - 1] splitListener.statementsContext[splitListener?.statementsContext.length - 1]
.start; .start;
/** /**
* If caretStatementContext is not found and it follows all statements. * If caretStatementContext is not found and it follows all statements.
* Reparses part of the input following the penultimate statement. * Reparses part of the input following the penultimate statement.
@ -281,8 +287,8 @@ export default abstract class BasicParser<
if (caretTokenIndex > lastStatementToken?.tokenIndex) { if (caretTokenIndex > lastStatementToken?.tokenIndex) {
/** /**
* Save offset of the tokenIndex in the partInput * Save offset of the tokenIndex in the partInput
* compared to the tokenIndex in the whole input * compared to the tokenIndex in the whole input
*/ */
tokenIndexOffset = lastStatementToken?.tokenIndex; tokenIndexOffset = lastStatementToken?.tokenIndex;
// Correct caretTokenIndex // Correct caretTokenIndex
caretTokenIndex = caretTokenIndex - tokenIndexOffset; caretTokenIndex = caretTokenIndex - tokenIndexOffset;
@ -304,28 +310,34 @@ export default abstract class BasicParser<
core.preferredRules = this.preferredRules; core.preferredRules = this.preferredRules;
const candidates = core.collectCandidates(caretTokenIndex, c3Context); const candidates = core.collectCandidates(caretTokenIndex, c3Context);
const originalSuggestions = this.processCandidates(candidates, allTokens, caretTokenIndex, tokenIndexOffset); const originalSuggestions = this.processCandidates(
candidates,
allTokens,
caretTokenIndex,
tokenIndexOffset
);
const syntaxSuggestions: SyntaxSuggestion<WordRange>[] = originalSuggestions.syntax const syntaxSuggestions: SyntaxSuggestion<WordRange>[] = originalSuggestions.syntax.map(
.map(syntaxCtx => { (syntaxCtx) => {
const wordRanges: WordRange[] = syntaxCtx.wordRanges.map(token => { const wordRanges: WordRange[] = syntaxCtx.wordRanges.map((token) => {
return { return {
text: this._parsedInput.slice(token.startIndex, token.stopIndex + 1), text: this._parsedInput.slice(token.startIndex, token.stopIndex + 1),
startIndex: token.startIndex, startIndex: token.startIndex,
stopIndex: token.stopIndex, stopIndex: token.stopIndex,
line: token.line, line: token.line,
startColumn: token.charPositionInLine + 1, startColumn: token.charPositionInLine + 1,
stopColumn: token.charPositionInLine + token.text.length stopColumn: token.charPositionInLine + token.text.length,
} };
}) });
return { return {
syntaxContextType: syntaxCtx.syntaxContextType, syntaxContextType: syntaxCtx.syntaxContextType,
wordRanges, wordRanges,
} };
}) }
);
return { return {
syntax: syntaxSuggestions, syntax: syntaxSuggestions,
keywords: originalSuggestions.keywords keywords: originalSuggestions.keywords,
} };
} }
} }

View File

@ -1,5 +1,5 @@
import { Token, Recognizer, ParserErrorListener, RecognitionException } from 'antlr4ts'; import { Token, Recognizer, ParserErrorListener, RecognitionException } from 'antlr4ts';
import { ATNSimulator } from 'antlr4ts/atn/ATNSimulator' import { ATNSimulator } from 'antlr4ts/atn/ATNSimulator';
export interface ParserError { export interface ParserError {
startLine: number; startLine: number;
@ -26,15 +26,15 @@ export class ParserErrorCollector implements ParserErrorListener {
syntaxError( syntaxError(
recognizer: Recognizer<Token, ATNSimulator>, recognizer: Recognizer<Token, ATNSimulator>,
offendingSymbol: Token, offendingSymbol: Token,
line: number, line: number,
charPositionInLine: number, charPositionInLine: number,
msg: string, msg: string,
e: RecognitionException, e: RecognitionException
) { ) {
let endCol = charPositionInLine + 1; let endCol = charPositionInLine + 1;
if (offendingSymbol && offendingSymbol.text !== null) { if (offendingSymbol && offendingSymbol.text !== null) {
endCol = charPositionInLine + offendingSymbol.text.length; endCol = charPositionInLine + (offendingSymbol.text?.length ?? 0);
} }
this._parseErrors.push({ this._parseErrors.push({
startLine: line, startLine: line,
@ -51,7 +51,7 @@ export class ParserErrorCollector implements ParserErrorListener {
recognizer, recognizer,
offendingSymbol, offendingSymbol,
charPositionInLine, charPositionInLine,
}) });
} }
clear() { clear() {
@ -59,12 +59,12 @@ export class ParserErrorCollector implements ParserErrorListener {
this._syntaxErrors = []; this._syntaxErrors = [];
} }
get parserErrors () { get parserErrors() {
return this._parseErrors return this._parseErrors;
} }
} }
export default class CustomParserErrorListener implements ParserErrorListener { export default class CustomParserErrorListener implements ParserErrorListener {
private _errorHandler; private _errorHandler;
constructor(errorListener: ErrorHandler<Token>) { constructor(errorListener: ErrorHandler<Token>) {
@ -72,28 +72,35 @@ export default class CustomParserErrorListener implements ParserErrorListener {
} }
syntaxError( syntaxError(
recognizer: Recognizer<Token, ATNSimulator>, offendingSymbol: Token, line: number, recognizer: Recognizer<Token, ATNSimulator>,
charPositionInLine: number, msg: string, e: RecognitionException, offendingSymbol: Token,
line: number,
charPositionInLine: number,
msg: string,
e: RecognitionException
) { ) {
let endCol = charPositionInLine + 1; let endCol = charPositionInLine + 1;
if (offendingSymbol && offendingSymbol.text !== null) { if (offendingSymbol && offendingSymbol.text !== null) {
endCol = charPositionInLine + offendingSymbol.text.length; endCol = charPositionInLine + offendingSymbol.text.length;
} }
if (this._errorHandler) { if (this._errorHandler) {
this._errorHandler({ this._errorHandler(
startLine: line, {
endLine: line, startLine: line,
startCol: charPositionInLine, endLine: line,
endCol: endCol, startCol: charPositionInLine,
message: msg, endCol: endCol,
}, { message: msg,
e, },
line, {
msg, e,
recognizer, line,
offendingSymbol, msg,
charPositionInLine, recognizer,
}); offendingSymbol,
charPositionInLine,
}
);
} }
} }
} }

View File

@ -5,7 +5,7 @@ import {
FlinkSqlParser, FlinkSqlParser,
ProgramContext, ProgramContext,
SqlStatementContext, SqlStatementContext,
SqlStatementsContext SqlStatementsContext,
} from '../lib/flinksql/FlinkSqlParser'; } from '../lib/flinksql/FlinkSqlParser';
import { FlinkSqlParserListener } from '../lib/flinksql/FlinkSqlParserListener'; import { FlinkSqlParserListener } from '../lib/flinksql/FlinkSqlParserListener';
import { SyntaxContextType, Suggestions, SyntaxSuggestion } from './common/basic-parser-types'; import { SyntaxContextType, Suggestions, SyntaxSuggestion } from './common/basic-parser-types';
@ -34,7 +34,7 @@ export default class FlinkSQL extends BasicParser<FlinkSqlLexer, ProgramContext,
FlinkSqlParser.RULE_functionNameCreate, // functionName that will be created FlinkSqlParser.RULE_functionNameCreate, // functionName that will be created
]); ]);
protected get splitListener () { protected get splitListener() {
return new FlinkSqlSplitListener(); return new FlinkSqlSplitListener();
} }
@ -50,7 +50,10 @@ export default class FlinkSQL extends BasicParser<FlinkSqlLexer, ProgramContext,
for (let candidate of candidates.rules) { for (let candidate of candidates.rules) {
const [ruleType, candidateRule] = candidate; const [ruleType, candidateRule] = candidate;
const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset; const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset;
const tokenRanges = allTokens.slice(startTokenIndex, caretTokenIndex + tokenIndexOffset + 1); const tokenRanges = allTokens.slice(
startTokenIndex,
caretTokenIndex + tokenIndexOffset + 1
);
let syntaxContextType: SyntaxContextType; let syntaxContextType: SyntaxContextType;
switch (ruleType) { switch (ruleType) {
@ -78,15 +81,15 @@ export default class FlinkSQL extends BasicParser<FlinkSqlLexer, ProgramContext,
syntaxContextType = SyntaxContextType.VIEW; syntaxContextType = SyntaxContextType.VIEW;
break; break;
} }
case FlinkSqlParser.RULE_viewPathCreate : { case FlinkSqlParser.RULE_viewPathCreate: {
syntaxContextType = SyntaxContextType.VIEW_CREATE; syntaxContextType = SyntaxContextType.VIEW_CREATE;
break; break;
} }
case FlinkSqlParser.RULE_functionName : { case FlinkSqlParser.RULE_functionName: {
syntaxContextType = SyntaxContextType.FUNCTION; syntaxContextType = SyntaxContextType.FUNCTION;
break; break;
} }
case FlinkSqlParser.RULE_functionNameCreate : { case FlinkSqlParser.RULE_functionNameCreate: {
syntaxContextType = SyntaxContextType.FUNCTION_CREATE; syntaxContextType = SyntaxContextType.FUNCTION_CREATE;
break; break;
} }
@ -97,25 +100,26 @@ export default class FlinkSQL extends BasicParser<FlinkSqlLexer, ProgramContext,
if (syntaxContextType) { if (syntaxContextType) {
originalSyntaxSuggestions.push({ originalSyntaxSuggestions.push({
syntaxContextType, syntaxContextType,
wordRanges: tokenRanges wordRanges: tokenRanges,
}) });
} }
} }
for (let candidate of candidates.tokens) { for (let candidate of candidates.tokens) {
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]); const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]); const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
if(symbolicName && symbolicName.startsWith('KW_')) { if (symbolicName && symbolicName.startsWith('KW_')) {
const keyword = displayName.startsWith("'") && displayName.endsWith("'") const keyword =
? displayName.slice(1, -1) displayName.startsWith("'") && displayName.endsWith("'")
: displayName ? displayName.slice(1, -1)
: displayName;
keywords.push(keyword); keywords.push(keyword);
} }
} }
return { return {
syntax: originalSyntaxSuggestions, syntax: originalSyntaxSuggestions,
keywords, keywords,
} };
} }
} }
@ -124,12 +128,11 @@ export class FlinkSqlSplitListener implements FlinkSqlParserListener {
exitSqlStatement = (ctx: SqlStatementContext) => { exitSqlStatement = (ctx: SqlStatementContext) => {
this._statementsContext.push(ctx); this._statementsContext.push(ctx);
}
enterSqlStatements = (ctx: SqlStatementsContext) => {
}; };
get statementsContext () { enterSqlStatements = (ctx: SqlStatementsContext) => {};
get statementsContext() {
return this._statementsContext; return this._statementsContext;
} }
} }

View File

@ -17,19 +17,18 @@ export default class GenericSQL extends BasicParser<SqlLexer, ProgramContext, Sq
protected preferredRules: Set<number> = new Set(); protected preferredRules: Set<number> = new Set();
protected get splitListener () { protected get splitListener() {
return null as any; return null as any;
} }
protected processCandidates( protected processCandidates(
candidates: CandidatesCollection, candidates: CandidatesCollection,
allTokens: Token[], allTokens: Token[],
caretTokenIndex: number caretTokenIndex: number
): Suggestions<Token> { ): Suggestions<Token> {
return { return {
syntax: [], syntax: [],
keywords: [] keywords: [],
} };
} }
} }

View File

@ -1,12 +1,17 @@
import { Token } from 'antlr4ts'; import { Token } from 'antlr4ts';
import { CandidatesCollection } from 'antlr4-c3'; import { CandidatesCollection } from 'antlr4-c3';
import { HiveSqlLexer } from '../lib/hive/HiveSqlLexer'; import { HiveSqlLexer } from '../lib/hive/HiveSqlLexer';
import { HiveSqlParser, ProgramContext, StatementContext, ExplainStatementContext, ExecStatementContext } from '../lib/hive/HiveSqlParser'; import {
HiveSqlParser,
ProgramContext,
StatementContext,
ExplainStatementContext,
ExecStatementContext,
} from '../lib/hive/HiveSqlParser';
import BasicParser from './common/basicParser'; import BasicParser from './common/basicParser';
import { HiveSqlParserListener } from '../lib/hive/HiveSqlParserListener'; import { HiveSqlParserListener } from '../lib/hive/HiveSqlParserListener';
import { SyntaxContextType, Suggestions, SyntaxSuggestion } from './common/basic-parser-types'; import { SyntaxContextType, Suggestions, SyntaxSuggestion } from './common/basic-parser-types';
export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, HiveSqlParser> { export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, HiveSqlParser> {
protected createLexerFormCharStream(charStreams) { protected createLexerFormCharStream(charStreams) {
const lexer = new HiveSqlLexer(charStreams); const lexer = new HiveSqlLexer(charStreams);
@ -27,10 +32,9 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
HiveSqlParser.RULE_functionNameForDDL, // function name HiveSqlParser.RULE_functionNameForDDL, // function name
HiveSqlParser.RULE_functionNameForInvoke, // function name HiveSqlParser.RULE_functionNameForInvoke, // function name
HiveSqlParser.RULE_functionNameCreate, // function name that will be created HiveSqlParser.RULE_functionNameCreate, // function name that will be created
]); ]);
protected get splitListener () { protected get splitListener() {
return new HiveSqlSplitListener(); return new HiveSqlSplitListener();
} }
@ -38,14 +42,17 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
candidates: CandidatesCollection, candidates: CandidatesCollection,
allTokens: Token[], allTokens: Token[],
caretTokenIndex: number, caretTokenIndex: number,
tokenIndexOffset: number, tokenIndexOffset: number
): Suggestions<Token> { ): Suggestions<Token> {
const originalSyntaxSuggestions: SyntaxSuggestion<Token>[] = []; const originalSyntaxSuggestions: SyntaxSuggestion<Token>[] = [];
const keywords: string[] = []; const keywords: string[] = [];
for (let candidate of candidates.rules) { for (let candidate of candidates.rules) {
const [ruleType, candidateRule] = candidate; const [ruleType, candidateRule] = candidate;
const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset; const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset;
const tokenRanges = allTokens.slice(startTokenIndex, caretTokenIndex + tokenIndexOffset + 1); const tokenRanges = allTokens.slice(
startTokenIndex,
caretTokenIndex + tokenIndexOffset + 1
);
let syntaxContextType: SyntaxContextType; let syntaxContextType: SyntaxContextType;
switch (ruleType) { switch (ruleType) {
@ -62,7 +69,7 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
break; break;
} }
case HiveSqlParser.RULE_tableNameCreate: { case HiveSqlParser.RULE_tableNameCreate: {
syntaxContextType = SyntaxContextType.TABLE_CREATE syntaxContextType = SyntaxContextType.TABLE_CREATE;
break; break;
} }
case HiveSqlParser.RULE_viewName: { case HiveSqlParser.RULE_viewName: {
@ -73,7 +80,7 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
syntaxContextType = SyntaxContextType.VIEW_CREATE; syntaxContextType = SyntaxContextType.VIEW_CREATE;
break; break;
} }
case HiveSqlParser.RULE_functionNameForDDL: case HiveSqlParser.RULE_functionNameForDDL:
case HiveSqlParser.RULE_functionNameForInvoke: { case HiveSqlParser.RULE_functionNameForInvoke: {
syntaxContextType = SyntaxContextType.FUNCTION; syntaxContextType = SyntaxContextType.FUNCTION;
break; break;
@ -98,7 +105,10 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]); const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]); const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
if (symbolicName && symbolicName.startsWith('KW_')) { if (symbolicName && symbolicName.startsWith('KW_')) {
const keyword = displayName.startsWith("'") && displayName.endsWith("'") ? displayName.slice(1, -1) : displayName; const keyword =
displayName.startsWith("'") && displayName.endsWith("'")
? displayName.slice(1, -1)
: displayName;
keywords.push(keyword); keywords.push(keyword);
} }
} }
@ -111,16 +121,14 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
export class HiveSqlSplitListener implements HiveSqlParserListener { export class HiveSqlSplitListener implements HiveSqlParserListener {
private _statementContext: StatementContext[] = []; private _statementContext: StatementContext[] = [];
exitStatement = (ctx: StatementContext) => { exitStatement = (ctx: StatementContext) => {
this._statementContext.push(ctx); this._statementContext.push(ctx);
}
enterStatement = (ctx: StatementContext) => {
}; };
enterStatement = (ctx: StatementContext) => {};
get statementsContext() { get statementsContext() {
return this._statementContext; return this._statementContext;
} }
} }

View File

@ -5,7 +5,11 @@ import { PostgreSQLParser, ProgramContext } from '../lib/pgsql/PostgreSQLParser'
import BasicParser from './common/basicParser'; import BasicParser from './common/basicParser';
import { Suggestions } from './common/basic-parser-types'; import { Suggestions } from './common/basic-parser-types';
export default class PostgresSQL extends BasicParser<PostgreSQLLexer, ProgramContext, PostgreSQLParser> { export default class PostgresSQL extends BasicParser<
PostgreSQLLexer,
ProgramContext,
PostgreSQLParser
> {
protected createLexerFormCharStream(charStreams) { protected createLexerFormCharStream(charStreams) {
const lexer = new PostgreSQLLexer(charStreams); const lexer = new PostgreSQLLexer(charStreams);
return lexer; return lexer;
@ -17,18 +21,18 @@ export default class PostgresSQL extends BasicParser<PostgreSQLLexer, ProgramCon
protected preferredRules: Set<number> = new Set(); protected preferredRules: Set<number> = new Set();
protected get splitListener () { protected get splitListener() {
return null as any; return null as any;
} }
protected processCandidates( protected processCandidates(
candidates: CandidatesCollection, candidates: CandidatesCollection,
allTokens: Token[], allTokens: Token[],
caretTokenIndex: number caretTokenIndex: number
): Suggestions<Token> { ): Suggestions<Token> {
return { return {
syntax: [], syntax: [],
keywords: [] keywords: [],
} };
} }
} }

View File

@ -17,18 +17,18 @@ export default class PLSQL extends BasicParser<PlSqlLexer, ProgramContext, PlSql
protected preferredRules: Set<number> = new Set(); protected preferredRules: Set<number> = new Set();
protected get splitListener () { protected get splitListener() {
return null as any; return null as any;
} }
protected processCandidates( protected processCandidates(
candidates: CandidatesCollection, candidates: CandidatesCollection,
allTokens: Token[], allTokens: Token[],
caretTokenIndex: number caretTokenIndex: number
): Suggestions<Token> { ): Suggestions<Token> {
return { return {
syntax: [], syntax: [],
keywords: [] keywords: [],
} };
} }
} }

View File

@ -1,7 +1,11 @@
import { Token } from 'antlr4ts'; import { Token } from 'antlr4ts';
import { CandidatesCollection } from 'antlr4-c3'; import { CandidatesCollection } from 'antlr4-c3';
import { SparkSqlLexer } from '../lib/spark/SparkSqlLexer'; import { SparkSqlLexer } from '../lib/spark/SparkSqlLexer';
import { SparkSqlParser, ProgramContext, SingleStatementContext } from '../lib/spark/SparkSqlParser'; import {
SparkSqlParser,
ProgramContext,
SingleStatementContext,
} from '../lib/spark/SparkSqlParser';
import BasicParser from './common/basicParser'; import BasicParser from './common/basicParser';
import { Suggestions, SyntaxContextType, SyntaxSuggestion } from './common/basic-parser-types'; import { Suggestions, SyntaxContextType, SyntaxSuggestion } from './common/basic-parser-types';
import { SparkSqlParserListener } from 'src/lib/spark/SparkSqlParserListener'; import { SparkSqlParserListener } from 'src/lib/spark/SparkSqlParserListener';
@ -36,7 +40,7 @@ export default class SparkSQL extends BasicParser<SparkSqlLexer, ProgramContext,
candidates: CandidatesCollection, candidates: CandidatesCollection,
allTokens: Token[], allTokens: Token[],
caretTokenIndex: number, caretTokenIndex: number,
tokenIndexOffset: number, tokenIndexOffset: number
): Suggestions<Token> { ): Suggestions<Token> {
const originalSyntaxSuggestions: SyntaxSuggestion<Token>[] = []; const originalSyntaxSuggestions: SyntaxSuggestion<Token>[] = [];
const keywords: string[] = []; const keywords: string[] = [];
@ -44,7 +48,10 @@ export default class SparkSQL extends BasicParser<SparkSqlLexer, ProgramContext,
for (const candidate of candidates.rules) { for (const candidate of candidates.rules) {
const [ruleType, candidateRule] = candidate; const [ruleType, candidateRule] = candidate;
const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset; const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset;
const tokenRanges = allTokens.slice(startTokenIndex, caretTokenIndex + tokenIndexOffset + 1); const tokenRanges = allTokens.slice(
startTokenIndex,
caretTokenIndex + tokenIndexOffset + 1
);
let syntaxContextType: SyntaxContextType; let syntaxContextType: SyntaxContextType;
switch (ruleType) { switch (ruleType) {
@ -96,7 +103,10 @@ export default class SparkSQL extends BasicParser<SparkSqlLexer, ProgramContext,
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]); const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]); const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
if (symbolicName && symbolicName.startsWith('KW_')) { if (symbolicName && symbolicName.startsWith('KW_')) {
const keyword = displayName.startsWith("'") && displayName.endsWith("'") ? displayName.slice(1, -1) : displayName; const keyword =
displayName.startsWith("'") && displayName.endsWith("'")
? displayName.slice(1, -1)
: displayName;
keywords.push(keyword); keywords.push(keyword);
} }
} }
@ -113,11 +123,10 @@ export class SparkSqlSplitListener implements SparkSqlParserListener {
exitSingleStatement = (ctx: SingleStatementContext) => { exitSingleStatement = (ctx: SingleStatementContext) => {
this._statementsContext.push(ctx); this._statementsContext.push(ctx);
}
enterSingleStatement = (ctx: SingleStatementContext) => {
}; };
enterSingleStatement = (ctx: SingleStatementContext) => {};
get statementsContext() { get statementsContext() {
return this._statementsContext; return this._statementsContext;
} }

View File

@ -16,21 +16,20 @@ export default class TrinoSQL extends BasicParser<TrinoSqlLexer, ProgramContext,
return parser; return parser;
} }
protected get splitListener () { protected get splitListener() {
return null as any; return null as any;
} }
protected preferredRules: Set<number> = new Set(); protected preferredRules: Set<number> = new Set();
protected processCandidates( protected processCandidates(
candidates: CandidatesCollection, candidates: CandidatesCollection,
allTokens: Token[], allTokens: Token[],
caretTokenIndex: number caretTokenIndex: number
): Suggestions<Token> { ): Suggestions<Token> {
return { return {
syntax: [], syntax: [],
keywords: [] keywords: [],
} };
} }
} }

View File

@ -1 +0,0 @@
declare type sql = string | string[]

View File

@ -1,9 +1,9 @@
import { Token } from "antlr4ts"; import { Token } from 'antlr4ts';
import { CaretPosition } from "../../src/parser/common/basic-parser-types"; import { CaretPosition } from '../../src/parser/common/basic-parser-types';
/** /**
* find token index via caret position (cursor position) * find token index via caret position (cursor position)
* @param caretPosition * @param caretPosition
* @param allTokens all the tokens * @param allTokens all the tokens
* @returns caretTokenIndex * @returns caretTokenIndex
*/ */
@ -12,22 +12,23 @@ export function findCaretTokenIndex(caretPosition: CaretPosition, allTokens: Tok
let left = 0; let left = 0;
let right = allTokens.length - 1; let right = allTokens.length - 1;
while(left <= right) { while (left <= right) {
const mid = left + ((right - left) >> 1); const mid = left + ((right - left) >> 1);
const token = allTokens[mid]; const token = allTokens[mid];
if (token.line > caretLine || ( if (
token.line === caretLine token.line > caretLine ||
&& token.charPositionInLine + 1 >= caretCol (token.line === caretLine && token.charPositionInLine + 1 >= caretCol)
)) { ) {
right = mid - 1; right = mid - 1;
} else if (token.line < caretLine || ( } else if (
token.line === caretLine token.line < caretLine ||
&& token.charPositionInLine + token.text.length + 1 < caretCol (token.line === caretLine &&
)) { token.charPositionInLine + token.text.length + 1 < caretCol)
) {
left = mid + 1; left = mid + 1;
} else { } else {
return allTokens[mid].tokenIndex return allTokens[mid].tokenIndex;
} }
} }
return null; return null;
} }

View File

@ -1,4 +1,3 @@
import { TokenType, Token, TokenReg } from './token'; import { TokenType, Token, TokenReg } from './token';
/** /**
@ -15,7 +14,6 @@ function lexer(input: string): Token[] {
/** /**
* TokenType * TokenType
*/ */
// eslint-disable-next-line
const extract = (currentChar: string, validator: RegExp, TokenType: TokenType): Token => { const extract = (currentChar: string, validator: RegExp, TokenType: TokenType): Token => {
let value = ''; let value = '';
const start = current; const start = current;
@ -95,19 +93,16 @@ function lexer(input: string): Token[] {
} }
if (TokenReg.BackQuotation.test(char)) { if (TokenReg.BackQuotation.test(char)) {
// eslint-disable-next-line
matchQuotation(char, TokenReg.BackQuotation, TokenType.BackQuotation); matchQuotation(char, TokenReg.BackQuotation, TokenType.BackQuotation);
continue; continue;
} }
if (TokenReg.SingleQuotation.test(char)) { if (TokenReg.SingleQuotation.test(char)) {
// eslint-disable-next-line
matchQuotation(char, TokenReg.SingleQuotation, TokenType.SingleQuotation); matchQuotation(char, TokenReg.SingleQuotation, TokenType.SingleQuotation);
continue; continue;
} }
if (TokenReg.DoubleQuotation.test(char)) { if (TokenReg.DoubleQuotation.test(char)) {
// eslint-disable-next-line
matchQuotation(char, TokenReg.DoubleQuotation, TokenType.DoubleQuotation); matchQuotation(char, TokenReg.DoubleQuotation, TokenType.DoubleQuotation);
continue; continue;
} }
@ -162,7 +157,7 @@ function lexer(input: string): Token[] {
const newToken = extract( const newToken = extract(
char, char,
TokenReg.StatementTerminator, TokenReg.StatementTerminator,
TokenType.StatementTerminator, TokenType.StatementTerminator
); );
tokens.push(newToken); tokens.push(newToken);
continue; continue;
@ -211,8 +206,4 @@ function cleanSql(sql: string) {
resultSql += sql.slice(startIndex); resultSql += sql.slice(startIndex);
return resultSql; return resultSql;
} }
export { export { cleanSql, splitSql, lexer };
cleanSql,
splitSql,
lexer,
};

View File

@ -28,14 +28,14 @@ export enum TokenType {
*/ */
RightSmallBracket = 'RightSmallBracket', RightSmallBracket = 'RightSmallBracket',
Comma = 'Comma', Comma = 'Comma',
FunctionArguments = 'FunctionArguments' FunctionArguments = 'FunctionArguments',
} }
/** /**
* Token object * Token object
*/ */
export interface Token { export interface Token {
type: TokenType, type: TokenType;
value: string; value: string;
start: number; start: number;
end: number; end: number;
@ -44,8 +44,8 @@ export interface Token {
} }
/** /**
* Token recognition rules * Token recognition rules
*/ */
export const TokenReg = { export const TokenReg = {
[TokenType.StatementTerminator]: /[;]/, [TokenType.StatementTerminator]: /[;]/,
[TokenType.SingleQuotation]: /['|\']/, [TokenType.SingleQuotation]: /['|\']/,

View File

@ -10,7 +10,8 @@ export const readSQL = (dirname: string, fileName: string) => {
const char = content[index]; const char = content[index];
tmp += char; tmp += char;
const isMulti = tmp.includes('EXECUTE STATEMENT SET') || tmp.includes('BEGIN STATEMENT SET;'); const isMulti =
tmp.includes('EXECUTE STATEMENT SET') || tmp.includes('BEGIN STATEMENT SET;');
if (!isMulti) { if (!isMulti) {
// 非批量的先简单按照分号切割 // 非批量的先简单按照分号切割
@ -31,14 +32,14 @@ export const readSQL = (dirname: string, fileName: string) => {
/** /**
* Benchmark for a function. * Benchmark for a function.
* @param name * @param name
* @param fn * @param fn
* @param times , default 1 * @param times , default 1
* @returns [ totalTime, averageTime, msg ] * @returns [ totalTime, averageTime, msg ]
*/ */
export function benchmark(name: string, fn: Function, times: number = 1): [number, number, string] { export function benchmark(name: string, fn: Function, times: number = 1): [number, number, string] {
const start = performance.now(); const start = performance.now();
for (let i = 0; i < times; i++) { for (let i = 0; i < times; i++) {
fn(); fn();
} }
@ -46,13 +47,14 @@ export function benchmark(name: string, fn: Function, times: number = 1): [numbe
const totalTime = end - start; const totalTime = end - start;
const averageTime = totalTime / times; const averageTime = totalTime / times;
const msg = `Benchmark: ${name} executed ${times} times. Total time: ${totalTime.toFixed(2)}ms. Average time: ${averageTime.toFixed(2)}ms`; const msg = `Benchmark: ${name} executed ${times} times. Total time: ${totalTime.toFixed(
console.log(msg) 2
)}ms. Average time: ${averageTime.toFixed(2)}ms`;
console.log(msg);
return [totalTime, averageTime, msg]; return [totalTime, averageTime, msg];
} }
export function getReportTableRow(name, rows, times, totalTime, averageTime) { export function getReportTableRow(name, rows, times, totalTime, averageTime) {
return `| ${name} | ${rows} | ${times} | ${totalTime.toFixed(2)} | ${averageTime.toFixed(2)} |`; return `| ${name} | ${rows} | ${times} | ${totalTime.toFixed(2)} | ${averageTime.toFixed(2)} |`;
} }
@ -65,4 +67,4 @@ export function getReportTableHeader(title: string) {
export function exportReportTable(markdown: string, output: string) { export function exportReportTable(markdown: string, output: string) {
fs.writeFileSync(path.join(output, 'benchmark.md'), markdown); fs.writeFileSync(path.join(output, 'benchmark.md'), markdown);
} }

View File

@ -2,7 +2,13 @@ import path from 'path';
import { writeFileSync } from 'node:fs'; import { writeFileSync } from 'node:fs';
import FlinkSQL from '../../../../src/parser/flinksql'; import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL, benchmark, getReportTableHeader, getReportTableRow, exportReportTable } from '../../../helper'; import {
readSQL,
benchmark,
getReportTableHeader,
getReportTableRow,
exportReportTable,
} from '../../../helper';
const features = { const features = {
selectTable: readSQL(__dirname, 'selectTable.sql'), selectTable: readSQL(__dirname, 'selectTable.sql'),
@ -15,13 +21,11 @@ describe('FlinkSQL benchmark tests', () => {
let reportsHeader = getReportTableHeader('FlinkSQL Benchmark'); let reportsHeader = getReportTableHeader('FlinkSQL Benchmark');
const reportData: string[] = []; const reportData: string[] = [];
test('createTable Over 100 Rows', async () => { test('createTable Over 100 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 100 Rows', () => {
const [totalTimes, averageTimes , msg] = benchmark('CreateTable Over 100 Rows', () => {
const testSQL = features.createTable[0]; const testSQL = features.createTable[0];
const res = parser.validate(testSQL); const res = parser.validate(testSQL);
expect(res).toEqual([]) expect(res).toEqual([]);
}); });
reportData.push(getReportTableRow('CreateTable', 100, 1, totalTimes, averageTimes)); reportData.push(getReportTableRow('CreateTable', 100, 1, totalTimes, averageTimes));
}); });
@ -30,91 +34,75 @@ describe('FlinkSQL benchmark tests', () => {
const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 1000 Rows', () => { const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 1000 Rows', () => {
const testSQL = features.createTable[1]; const testSQL = features.createTable[1];
const res = parser.validate(testSQL); const res = parser.validate(testSQL);
expect(res).toEqual([]) expect(res).toEqual([]);
}); });
reportData.push(getReportTableRow('CreateTable', 1000, 1, totalTimes, averageTimes)); reportData.push(getReportTableRow('CreateTable', 1000, 1, totalTimes, averageTimes));
}); });
test('createTable Over 5000 Rows', async () => { test('createTable Over 5000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 5000 Rows', () => { const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 5000 Rows', () => {
const testSQL = features.createTable[2]; const testSQL = features.createTable[2];
const res = parser.validate(testSQL); const res = parser.validate(testSQL);
expect(res).toEqual([]) expect(res).toEqual([]);
}); });
reportData.push(getReportTableRow('CreateTable', 5000, 1, totalTimes, averageTimes)); reportData.push(getReportTableRow('CreateTable', 5000, 1, totalTimes, averageTimes));
}); });
test('selectTable Over 100 Rows', async () => { test('selectTable Over 100 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 100 Rows', () => {
const [totalTimes, averageTimes , msg] = benchmark('SelectTable Over 100 Rows', () => {
const testSQL = features.selectTable[0]; const testSQL = features.selectTable[0];
const res = parser.validate(testSQL); const res = parser.validate(testSQL);
expect(res).toEqual([]) expect(res).toEqual([]);
}); });
reportData.push(getReportTableRow('SelectTable', 100, 1, totalTimes, averageTimes)); reportData.push(getReportTableRow('SelectTable', 100, 1, totalTimes, averageTimes));
}); });
test('selectTable Over 1000 Rows', async () => { test('selectTable Over 1000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 1000 Rows', () => { const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 1000 Rows', () => {
const testSQL = features.selectTable[1]; const testSQL = features.selectTable[1];
const res = parser.validate(testSQL); const res = parser.validate(testSQL);
expect(res).toEqual([]) expect(res).toEqual([]);
}); });
reportData.push(getReportTableRow('SelectTable', 1000, 1, totalTimes, averageTimes)); reportData.push(getReportTableRow('SelectTable', 1000, 1, totalTimes, averageTimes));
}); });
test('selectTable Over 5000 Rows', async () => { test('selectTable Over 5000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 5000 Rows', () => { const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 5000 Rows', () => {
const testSQL = features.selectTable[2]; const testSQL = features.selectTable[2];
const res = parser.validate(testSQL); const res = parser.validate(testSQL);
expect(res).toEqual([]) expect(res).toEqual([]);
}); });
reportData.push(getReportTableRow('SelectTable', 5000, 1, totalTimes, averageTimes)); reportData.push(getReportTableRow('SelectTable', 5000, 1, totalTimes, averageTimes));
}); });
test('insertTable Over 100 Rows', async () => { test('insertTable Over 100 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 100 Rows', () => { const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 100 Rows', () => {
const testSQL = features.insertTable[0]; const testSQL = features.insertTable[0];
const res = parser.validate(testSQL); const res = parser.validate(testSQL);
expect(res).toEqual([]) expect(res).toEqual([]);
}); });
reportData.push(getReportTableRow('InsertTable', 100, 1, totalTimes, averageTimes)); reportData.push(getReportTableRow('InsertTable', 100, 1, totalTimes, averageTimes));
}); });
test('insertTable Over 1000 Rows', async () => { test('insertTable Over 1000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 1000 Rows', () => { const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 1000 Rows', () => {
const testSQL = features.insertTable[1]; const testSQL = features.insertTable[1];
const res = parser.validate(testSQL); const res = parser.validate(testSQL);
expect(res).toEqual([]) expect(res).toEqual([]);
}); });
reportData.push(getReportTableRow('InsertTable', 1000, 1, totalTimes, averageTimes)); reportData.push(getReportTableRow('InsertTable', 1000, 1, totalTimes, averageTimes));
}); });
test('insertTable Over 5000 Rows', async () => { test('insertTable Over 5000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 5000 Rows', () => { const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 5000 Rows', () => {
const testSQL = features.insertTable[2]; const testSQL = features.insertTable[2];
const res = parser.validate(testSQL); const res = parser.validate(testSQL);
expect(res).toEqual([]) expect(res).toEqual([]);
}); });
reportData.push(getReportTableRow('InsertTable', 5000, 1, totalTimes, averageTimes)); reportData.push(getReportTableRow('InsertTable', 5000, 1, totalTimes, averageTimes));
}); });
afterAll(() => { afterAll(() => {
exportReportTable(reportsHeader + reportData.join('\n'), __dirname) exportReportTable(reportsHeader + reportData.join('\n'), __dirname);
}) });
});
});

View File

@ -1,6 +1,7 @@
import FlinkSQL from '../../../src/parser/flinksql'; import FlinkSQL from '../../../src/parser/flinksql';
import { FlinkSqlParserListener } from '../../../src/lib/flinksql/FlinkSqlParserListener'; import { FlinkSqlParserListener } from '../../../src/lib/flinksql/FlinkSqlParserListener';
import { TableExpressionContext } from '../../../src/lib/flinksql/FlinkSqlParser'; import { TableExpressionContext } from '../../../src/lib/flinksql/FlinkSqlParser';
import { ParseTreeListener } from 'antlr4ts/tree';
describe('Flink SQL Listener Tests', () => { describe('Flink SQL Listener Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -12,14 +13,13 @@ describe('Flink SQL Listener Tests', () => {
test('Listener enterTableName', async () => { test('Listener enterTableName', async () => {
let result = ''; let result = '';
class MyListener implements FlinkSqlParserListener { class MyListener implements FlinkSqlParserListener {
enterTableExpression = (ctx: TableExpressionContext): void => { enterTableExpression = (ctx: TableExpressionContext): void => {
result = ctx.text.toLowerCase(); result = ctx.text.toLowerCase();
} };
} }
const listenTableName = new MyListener(); const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree); await parser.listen(listenTableName as ParseTreeListener, parserTree);
expect(result).toBe(expectTableName); expect(result).toBe(expectTableName);
}); });
}); });

View File

@ -1,9 +1,12 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { CaretPosition, SyntaxContextType } from '../../../../src/parser/common/basic-parser-types'; import { CaretPosition, SyntaxContextType } from '../../../../src/parser/common/basic-parser-types';
import FlinkSQL from '../../../../src/parser/flinksql' import FlinkSQL from '../../../../src/parser/flinksql';
const syntaxSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'), 'utf-8'); const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
'utf-8'
);
const multipleSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'multipleSql.sql'), 'utf-8'); const multipleSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'multipleSql.sql'), 'utf-8');
describe('Flink SQL Syntax Suggestion', () => { describe('Flink SQL Syntax Suggestion', () => {
@ -13,187 +16,201 @@ describe('Flink SQL Syntax Suggestion', () => {
expect(parser.validate(syntaxSql).length).not.toBe(0); expect(parser.validate(syntaxSql).length).not.toBe(0);
expect(parser.validate(syntaxSql).length).not.toBe(0); expect(parser.validate(syntaxSql).length).not.toBe(0);
expect(parser.validate(syntaxSql).length).not.toBe(0); expect(parser.validate(syntaxSql).length).not.toBe(0);
}) });
test("Multiple SQL use database", () => { test('Multiple SQL use database', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 19, lineNumber: 19,
column: 10, column: 10,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(multipleSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(multipleSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cat1', '.']);
.toEqual([ 'cat1', '.' ]); });
})
test('Drop catalog', () => { test('Drop catalog', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 1, lineNumber: 1,
column: 17 column: 17,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.CATALOG); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.CATALOG
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cat']);
.toEqual([ 'cat' ]);
}); });
test('Select table', () => { test('Select table', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 3, lineNumber: 3,
column: 19 column: 19,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
console.log(syntaxes); console.log(syntaxes);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cat', '.']);
.toEqual([ 'cat', '.' ]) });
})
test('Create table', () => { test('Create table', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 5, lineNumber: 5,
column: 20 column: 20,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cat', '.', 'db']);
.toEqual([ 'cat', '.', 'db' ]) });
})
test('Show tables from', () => { test('Show tables from', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 7, lineNumber: 7,
column: 21 column: 21,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cat']);
.toEqual([ 'cat' ]) });
})
test('Alter database', () => { test('Alter database', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 9, lineNumber: 9,
column: 20 column: 20,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cat', '.']);
.toEqual([ 'cat', '.' ]) });
})
test('Drop view', () => { test('Drop view', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 11, lineNumber: 11,
column: 12 column: 12,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['v']);
.toEqual([ 'v' ]);
}); });
test('Select view', () => { test('Select view', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 13, lineNumber: 13,
column: 15 column: 15,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
.toEqual([]);
}); });
test('Create view', () => { test('Create view', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 15, lineNumber: 15,
column: 15 column: 15,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cv']);
.toEqual(['cv']);
}); });
test('Function call', () => { test('Function call', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 17, lineNumber: 17,
column: 27 column: 27,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.FUNCTION); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['calculate_age']);
.toEqual(['calculate_age']);
}); });
test('Create Function', () => { test('Create Function', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 19, lineNumber: 19,
column: 20 column: 20,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['fnc']);
.toEqual(['fnc']);
}); });
test('Show columns from view', () => { test('Show columns from view', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 21, lineNumber: 21,
column: 22 column: 22,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['vie']);
.toEqual(['vie']);
}); });
test('Show create table', () => { test('Show create table', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 23, lineNumber: 23,
column: 22 column: 22,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['tb1']);
.toEqual(['tb1']);
}); });
test('Show create view', () => { test('Show create view', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 25, lineNumber: 25,
column: 20 column: 20,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['v1']);
.toEqual(['v1']);
}); });
}) });

View File

@ -1,7 +1,7 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { CaretPosition } from '../../../../src/parser/common/basic-parser-types'; import { CaretPosition } from '../../../../src/parser/common/basic-parser-types';
import FlinkSQL from '../../../../src/parser/flinksql' import FlinkSQL from '../../../../src/parser/flinksql';
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8'); const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
@ -11,47 +11,50 @@ describe('Flink SQL Token Suggestion', () => {
test('Use Statement ', () => { test('Use Statement ', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 3, lineNumber: 3,
column: 5 column: 5,
} };
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords; const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion) expect(suggestion).toEqual(['MODULES', 'CATALOG']);
.toEqual([ 'MODULES', 'CATALOG' ]) });
})
test('Create Statement ', () => { test('Create Statement ', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 5, lineNumber: 5,
column: 8 column: 8,
} };
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords; const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion) expect(suggestion).toEqual([
.toEqual([ 'CATALOG', 'FUNCTION', 'TEMPORARY', 'VIEW', 'DATABASE', 'TABLE' ]) 'CATALOG',
}) 'FUNCTION',
'TEMPORARY',
'VIEW',
'DATABASE',
'TABLE',
]);
});
test('Show Statement ', () => { test('Show Statement ', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 7, lineNumber: 7,
column: 6 column: 6,
} };
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords; const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion)
.toEqual([
'MODULES',
'FULL',
'FUNCTIONS',
'USER',
'CREATE',
'COLUMNS',
'TABLES',
'CURRENT',
'CATALOGS',
'DATABASES',
'JARS',
'VIEWS'
])
})
}) expect(suggestion).toEqual([
'MODULES',
'FULL',
'FUNCTIONS',
'USER',
'CREATE',
'COLUMNS',
'TABLES',
'CURRENT',
'CATALOGS',
'DATABASES',
'JARS',
'VIEWS',
]);
});
});

View File

@ -1,5 +1,5 @@
import FlinkSQL from "../../../../src/parser/flinksql"; import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
table: readSQL(__dirname, 'alterTable.sql'), table: readSQL(__dirname, 'alterTable.sql'),
@ -26,11 +26,10 @@ describe('FlinkSQL Alter Statements Syntax Tests', () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.function.forEach((sql) => { features.function.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
}); });

View File

@ -1,9 +1,9 @@
import FlinkSQL from "../../../../src/parser/flinksql"; import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
// 综合测试的 sql 不做切割 // 综合测试的 sql 不做切割
const features = { const features = {
chores: readSQL(__dirname, 'chore.sql') chores: readSQL(__dirname, 'chore.sql'),
}; };
describe('FlinkSQL Chore Syntax Tests', () => { describe('FlinkSQL Chore Syntax Tests', () => {

View File

@ -1,10 +1,10 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import FlinkSQL from "../../../../src/parser/flinksql"; import FlinkSQL from '../../../../src/parser/flinksql';
// 注释 sql 不做切割 // 注释 sql 不做切割
const features = { const features = {
comments: fs.readFileSync(path.join(__dirname, 'fixtures', 'comment.sql'), 'utf-8') comments: fs.readFileSync(path.join(__dirname, 'fixtures', 'comment.sql'), 'utf-8'),
}; };
describe('FlinkSQL Comment Syntax Tests', () => { describe('FlinkSQL Comment Syntax Tests', () => {

View File

@ -1,10 +1,10 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import FlinkSQL from "../../../../src/parser/flinksql"; import FlinkSQL from '../../../../src/parser/flinksql';
// 综合测试的 sql 不做切割 // 综合测试的 sql 不做切割
const features = { const features = {
templates: fs.readFileSync(path.join(__dirname, 'fixtures', 'templates.sql'), 'utf-8') templates: fs.readFileSync(path.join(__dirname, 'fixtures', 'templates.sql'), 'utf-8'),
}; };
describe('FlinkSQL Comprehensive Tests', () => { describe('FlinkSQL Comprehensive Tests', () => {
@ -13,4 +13,3 @@ describe('FlinkSQL Comprehensive Tests', () => {
expect(parser.validate(features.templates).length).toBe(0); expect(parser.validate(features.templates).length).toBe(0);
}); });
}); });

View File

@ -1,8 +1,8 @@
import FlinkSQL from "../../../../src/parser/flinksql"; import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
describes: readSQL(__dirname, 'describe.sql') describes: readSQL(__dirname, 'describe.sql'),
}; };
describe('FlinkSQL Describe Syntax Tests', () => { describe('FlinkSQL Describe Syntax Tests', () => {

View File

@ -1,5 +1,5 @@
import FlinkSQL from "../../../../src/parser/flinksql"; import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
table: readSQL(__dirname, 'dropTable.sql'), table: readSQL(__dirname, 'dropTable.sql'),
@ -30,7 +30,7 @@ describe('FlinkSQL Drop Statements Tests', () => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.database.forEach((sql) => { features.database.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);

View File

@ -1,8 +1,8 @@
import FlinkSQL from "../../../../src/parser/flinksql"; import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
dtAddFiles: readSQL(__dirname, 'dtAddFile.sql') dtAddFiles: readSQL(__dirname, 'dtAddFile.sql'),
}; };
describe('FlinkSQL DT Add File Syntax Tests', () => { describe('FlinkSQL DT Add File Syntax Tests', () => {

View File

@ -1,8 +1,8 @@
import FlinkSQL from "../../../../src/parser/flinksql"; import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
explains: readSQL(__dirname, 'explain.sql') explains: readSQL(__dirname, 'explain.sql'),
}; };
describe('FlinkSQL Explain Syntax Tests', () => { describe('FlinkSQL Explain Syntax Tests', () => {

View File

@ -1,13 +1,12 @@
import FlinkSQL from '../../../../src/parser/flinksql'; import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper'; import { readSQL } from '../../../helper';
const parser = new FlinkSQL(); const parser = new FlinkSQL();
const features = { const features = {
InsertFromSelectQueries: readSQL(__dirname, 'insertFromSelectQueries.sql'), InsertFromSelectQueries: readSQL(__dirname, 'insertFromSelectQueries.sql'),
InsertValuesIntoTable: readSQL(__dirname, 'insertValuesIntoTable.sql'), InsertValuesIntoTable: readSQL(__dirname, 'insertValuesIntoTable.sql'),
InsertMultipleTable: readSQL(__dirname, 'insertMultipleTable.sql') InsertMultipleTable: readSQL(__dirname, 'insertMultipleTable.sql'),
}; };
describe('FlinkSQL Insert Syntax Tests', () => { describe('FlinkSQL Insert Syntax Tests', () => {

View File

@ -1,22 +1,22 @@
import FlinkSQL from "../../../../src/parser/flinksql"; import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const parser = new FlinkSQL(); const parser = new FlinkSQL();
const features = { const features = {
base: readSQL(__dirname, "select.sql"), base: readSQL(__dirname, 'select.sql'),
withClause: readSQL(__dirname, "selectWithClause.sql"), withClause: readSQL(__dirname, 'selectWithClause.sql'),
distinct: readSQL(__dirname, "selectDistinct.sql"), distinct: readSQL(__dirname, 'selectDistinct.sql'),
windowTVF: readSQL(__dirname, "selectWindowTVF.sql"), windowTVF: readSQL(__dirname, 'selectWindowTVF.sql'),
aggregation: readSQL(__dirname, "selectAggregation.sql"), aggregation: readSQL(__dirname, 'selectAggregation.sql'),
join: readSQL(__dirname, "selectJoin.sql"), join: readSQL(__dirname, 'selectJoin.sql'),
setOperation: readSQL(__dirname, "selectSetOperations.sql"), setOperation: readSQL(__dirname, 'selectSetOperations.sql'),
pattern: readSQL(__dirname, "selectPatternRecognition.sql"), pattern: readSQL(__dirname, 'selectPatternRecognition.sql'),
where: readSQL(__dirname, "selectWhere.sql"), where: readSQL(__dirname, 'selectWhere.sql'),
}; };
describe("FlinkSQL Query Statement Tests", () => { describe('FlinkSQL Query Statement Tests', () => {
describe("Base Select", () => { describe('Base Select', () => {
features.base.forEach((sql) => { features.base.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
@ -24,7 +24,7 @@ describe("FlinkSQL Query Statement Tests", () => {
}); });
}); });
describe("With Clause Select", () => { describe('With Clause Select', () => {
features.withClause.forEach((sql) => { features.withClause.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
@ -32,60 +32,59 @@ describe("FlinkSQL Query Statement Tests", () => {
}); });
}); });
describe('Select DISTINCT', () => {
describe("Select DISTINCT", () => {
features.distinct.forEach((sql) => { features.distinct.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}) });
}) });
describe("Select Window TVF", () => { describe('Select Window TVF', () => {
features.windowTVF.forEach((sql) => { features.windowTVF.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}) });
}) });
describe("Select Aggregation", () => { describe('Select Aggregation', () => {
features.aggregation.forEach((sql) => { features.aggregation.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}) });
}) });
describe("Select Join", () => { describe('Select Join', () => {
features.join.forEach((sql) => { features.join.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}) });
}) });
describe("Select Set Operations", () => { describe('Select Set Operations', () => {
features.setOperation.forEach((sql) => { features.setOperation.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}) });
}) });
describe("Select Pattern Recognition", () => { describe('Select Pattern Recognition', () => {
features.pattern.forEach((sql) => { features.pattern.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}) });
}) });
describe("Select Where", () => { describe('Select Where', () => {
features.where.forEach((sql) => { features.where.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0) expect(parser.validate(sql).length).toBe(0);
}) });
}) });
}) });
}); });

View File

@ -1,8 +1,8 @@
import FlinkSQL from "../../../../src/parser/flinksql"; import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
shows: readSQL(__dirname, 'show.sql') shows: readSQL(__dirname, 'show.sql'),
}; };
describe('FlinkSQL Show Syntax Tests', () => { describe('FlinkSQL Show Syntax Tests', () => {

View File

@ -1,13 +1,13 @@
import FlinkSQL from "../../../../src/parser/flinksql"; import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
uses: readSQL(__dirname, 'use.sql') uses: readSQL(__dirname, 'use.sql'),
}; };
describe('FlinkSQL Use Syntax Tests', () => { describe('FlinkSQL Use Syntax Tests', () => {
const parser = new FlinkSQL(); const parser = new FlinkSQL();
features.uses.forEach((sql) => { features.uses.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);

View File

@ -13,13 +13,16 @@ describe('Flink SQL Visitor Tests', () => {
test('Visitor visitTableName', () => { test('Visitor visitTableName', () => {
let result = ''; let result = '';
class MyVisitor extends AbstractParseTreeVisitor<any> implements FlinkSqlParserVisitor<any>{ class MyVisitor
extends AbstractParseTreeVisitor<any>
implements FlinkSqlParserVisitor<any>
{
protected defaultResult() { protected defaultResult() {
return result; return result;
} }
visitTableExpression = (ctx): void => { visitTableExpression = (ctx): void => {
result = ctx.text.toLowerCase(); result = ctx.text.toLowerCase();
} };
} }
const visitor: any = new MyVisitor(); const visitor: any = new MyVisitor();
visitor.visit(parserTree); visitor.visit(parserTree);

View File

@ -1,5 +1,6 @@
import GenericSQL from '../../../src/parser/generic'; import GenericSQL from '../../../src/parser/generic';
import { SqlParserListener } from '../../../src/lib/generic/SqlParserListener'; import { SqlParserListener } from '../../../src/lib/generic/SqlParserListener';
import { ParseTreeListener } from 'antlr4ts/tree';
describe('Generic SQL Listener Tests', () => { describe('Generic SQL Listener Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -13,11 +14,11 @@ describe('Generic SQL Listener Tests', () => {
class MyListener implements SqlParserListener { class MyListener implements SqlParserListener {
enterTableName = (ctx): void => { enterTableName = (ctx): void => {
result = ctx.text.toLowerCase(); result = ctx.text.toLowerCase();
} };
} }
const listenTableName: any = new MyListener(); const listenTableName: any = new MyListener();
await parser.listen(listenTableName, parserTree); await parser.listen(listenTableName as ParseTreeListener, parserTree);
expect(result).toBe(expectTableName); expect(result).toBe(expectTableName);
}); });
}); });

View File

@ -17,10 +17,10 @@ describe('Generic SQL Visitor Tests', () => {
protected defaultResult() { protected defaultResult() {
return result; return result;
} }
visitTableName = (ctx): void => { visitTableName = (ctx): void => {
result = ctx.text.toLowerCase(); result = ctx.text.toLowerCase();
} };
} }
const visitor = new MyVisitor(); const visitor = new MyVisitor();
visitor.visit(parserTree); visitor.visit(parserTree);

View File

@ -1,8 +1,8 @@
import { ParseTreeListener } from 'antlr4ts/tree';
import { ProgramContext } from '../../../src/lib/hive/HiveSqlParser'; import { ProgramContext } from '../../../src/lib/hive/HiveSqlParser';
import { HiveSqlParserListener } from '../../../src/lib/hive/HiveSqlParserListener'; import { HiveSqlParserListener } from '../../../src/lib/hive/HiveSqlParserListener';
import HiveSQL from '../../../src/parser/hive'; import HiveSQL from '../../../src/parser/hive';
describe('HiveSQL Listener Tests', () => { describe('HiveSQL Listener Tests', () => {
const parser = new HiveSQL(); const parser = new HiveSQL();
test('Listener enterSelectList', async () => { test('Listener enterSelectList', async () => {
@ -16,9 +16,9 @@ describe('HiveSQL Listener Tests', () => {
result = ctx.text; result = ctx.text;
} }
} }
const listenTableName: any = new MyListener(); const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree as ProgramContext); await parser.listen(listenTableName as ParseTreeListener, parserTree as ProgramContext);
expect(result).toBe(expectTableName.toUpperCase()); expect(result).toBe(expectTableName.toUpperCase());
}); });
test('Listener enterCreateTable', async () => { test('Listener enterCreateTable', async () => {
@ -30,9 +30,9 @@ describe('HiveSQL Listener Tests', () => {
result = ctx.text; result = ctx.text;
} }
} }
const listenTableName: any = new MyListener(); const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree as ProgramContext); await parser.listen(listenTableName as ParseTreeListener, parserTree as ProgramContext);
expect(result).toBe('DROPTABLETABLE_NAME'); expect(result).toBe('DROPTABLETABLE_NAME');
}); });
}); });

View File

@ -1,9 +1,12 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { CaretPosition, SyntaxContextType } from '../../../../src/parser/common/basic-parser-types'; import { CaretPosition, SyntaxContextType } from '../../../../src/parser/common/basic-parser-types';
import HiveSQL from '../../../../src/parser/hive' import HiveSQL from '../../../../src/parser/hive';
const syntaxSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'), 'utf-8'); const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
'utf-8'
);
describe('Hive SQL Syntax Suggestion', () => { describe('Hive SQL Syntax Suggestion', () => {
const parser = new HiveSQL(); const parser = new HiveSQL();
@ -17,131 +20,140 @@ describe('Hive SQL Syntax Suggestion', () => {
test('Insert table ', () => { test('Insert table ', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 1, lineNumber: 1,
column: 18 column: 18,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'tb']);
.toEqual([ 'db', '.', 'tb' ])
}); });
test('Select table ', () => { test('Select table ', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 3, lineNumber: 3,
column: 18 column: 18,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
.toEqual([ 'db', '.' ])
}); });
test('Create table ', () => { test('Create table ', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 5, lineNumber: 5,
column: 17 column: 17,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
.toEqual([ 'db', '.' ])
}); });
test('DROP table ', () => { test('DROP table ', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 7, lineNumber: 7,
column: 26 column: 26,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'a']);
.toEqual([ 'db', '.', 'a' ])
}); });
test('Create view ', () => { test('Create view ', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 9, lineNumber: 9,
column: 28 column: 28,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'v']);
.toEqual([ 'db', '.', 'v' ])
}); });
test('Drop view ', () => { test('Drop view ', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 11, lineNumber: 11,
column: 15 column: 15,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'v']);
.toEqual([ 'db', '.', 'v' ])
}); });
test('Create function ', () => { test('Create function ', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 13, lineNumber: 13,
column: 20 column: 20,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['fn1']);
.toEqual([ 'fn1' ])
}); });
test('Use function', () => { test('Use function', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 15, lineNumber: 15,
column: 27 column: 27,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.FUNCTION); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['calculate_age']);
.toEqual([ 'calculate_age' ])
}); });
test('Create database', () => { test('Create database', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 17, lineNumber: 17,
column: 19 column: 19,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db']);
.toEqual([ 'db' ])
}); });
test('Drop database', () => { test('Drop database', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 19, lineNumber: 19,
column: 26 column: 26,
} };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['sch']);
.toEqual([ 'sch' ])
}); });
});
})

View File

@ -1,232 +1,191 @@
import fs from "fs"; import fs from 'fs';
import path from "path"; import path from 'path';
import { CaretPosition } from "../../../../src/parser/common/basic-parser-types"; import { CaretPosition } from '../../../../src/parser/common/basic-parser-types';
import HiveSQL from "../../../../src/parser/hive"; import HiveSQL from '../../../../src/parser/hive';
const tokenSql = fs.readFileSync( const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
path.join(__dirname, "fixtures", "tokenSuggestion.sql"),
"utf-8"
);
describe("Hive SQL Syntax Suggestion", () => { describe('Hive SQL Syntax Suggestion', () => {
const parser = new HiveSQL(); const parser = new HiveSQL();
test("After ALTER", () => { test('After ALTER', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 1, lineNumber: 1,
column: 7, column: 7,
}; };
const suggestion = parser.getSuggestionAtCaretPosition( const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([ expect(suggestion).toEqual([
"APPLICATION", 'APPLICATION',
"GROUP", 'GROUP',
"USER", 'USER',
"POOL", 'POOL',
"TRIGGER", 'TRIGGER',
"RESOURCE", 'RESOURCE',
"SCHEDULED", 'SCHEDULED',
"INDEX", 'INDEX',
"CONNECTOR", 'CONNECTOR',
"DATABASE", 'DATABASE',
"SCHEMA", 'SCHEMA',
"MATERIALIZED", 'MATERIALIZED',
"VIEW", 'VIEW',
"TABLE", 'TABLE',
]); ]);
}); });
test("After CREATE", () => { test('After CREATE', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 3, lineNumber: 3,
column: 8, column: 8,
}; };
const suggestion = parser.getSuggestionAtCaretPosition( const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([ expect(suggestion).toEqual([
"CONNECTOR", 'CONNECTOR',
"APPLICATION", 'APPLICATION',
"GROUP", 'GROUP',
"USER", 'USER',
"POOL", 'POOL',
"TRIGGER", 'TRIGGER',
"RESOURCE", 'RESOURCE',
"ROLE", 'ROLE',
"INDEX", 'INDEX',
"TEMPORARY", 'TEMPORARY',
"FUNCTION", 'FUNCTION',
"SCHEDULED", 'SCHEDULED',
"MATERIALIZED", 'MATERIALIZED',
"VIEW", 'VIEW',
"OR", 'OR',
"MANAGED", 'MANAGED',
"TABLE", 'TABLE',
"EXTERNAL", 'EXTERNAL',
"TRANSACTIONAL", 'TRANSACTIONAL',
"REMOTE", 'REMOTE',
"DATABASE", 'DATABASE',
"SCHEMA", 'SCHEMA',
]); ]);
}); });
test("After DELETE", () => { test('After DELETE', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 5, lineNumber: 5,
column: 8, column: 8,
}; };
const suggestion = parser.getSuggestionAtCaretPosition( const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual(['FROM']); expect(suggestion).toEqual(['FROM']);
}); });
test("After DESCRIBE", () => { test('After DESCRIBE', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 7, lineNumber: 7,
column: 10, column: 10,
}; };
const suggestion = parser.getSuggestionAtCaretPosition( const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([ expect(suggestion).toEqual([
"EXTENDED", 'EXTENDED',
"FORMATTED", 'FORMATTED',
"FUNCTION", 'FUNCTION',
"CONNECTOR", 'CONNECTOR',
"DATABASE", 'DATABASE',
"SCHEMA", 'SCHEMA',
]); ]);
}); });
test("After DROP", () => { test('After DROP', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 9, lineNumber: 9,
column: 6, column: 6,
}; };
const suggestion = parser.getSuggestionAtCaretPosition( const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([ expect(suggestion).toEqual([
"CONNECTOR", 'CONNECTOR',
"APPLICATION", 'APPLICATION',
"GROUP", 'GROUP',
"USER", 'USER',
"POOL", 'POOL',
"TRIGGER", 'TRIGGER',
"RESOURCE", 'RESOURCE',
"ROLE", 'ROLE',
"INDEX", 'INDEX',
"TEMPORARY", 'TEMPORARY',
"FUNCTION", 'FUNCTION',
"MATERIALIZED", 'MATERIALIZED',
"VIEW", 'VIEW',
"SCHEDULED", 'SCHEDULED',
"TABLE", 'TABLE',
"DATABASE", 'DATABASE',
"SCHEMA", 'SCHEMA',
]); ]);
}); });
test("After EXPORT", () => { test('After EXPORT', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 11, lineNumber: 11,
column: 8, column: 8,
}; };
const suggestion = parser.getSuggestionAtCaretPosition( const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual(['TABLE']); expect(suggestion).toEqual(['TABLE']);
}); });
test("After IMPORT", () => { test('After IMPORT', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 13, lineNumber: 13,
column: 8, column: 8,
}; };
const suggestion = parser.getSuggestionAtCaretPosition( const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
tokenSql, expect(suggestion).toEqual(['FROM', 'TABLE', 'EXTERNAL']);
pos
)?.keywords;
expect(suggestion).toEqual([
"FROM",
"TABLE",
"EXTERNAL",
]);
}); });
test("After INSERT", () => { test('After INSERT', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 15, lineNumber: 15,
column: 8, column: 8,
}; };
const suggestion = parser.getSuggestionAtCaretPosition( const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
tokenSql, expect(suggestion).toEqual(['INTO', 'OVERWRITE']);
pos
)?.keywords;
expect(suggestion).toEqual([
"INTO",
"OVERWRITE",
]);
}); });
test("After LOAD", () => { test('After LOAD', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 17, lineNumber: 17,
column: 6, column: 6,
}; };
const suggestion = parser.getSuggestionAtCaretPosition( const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
tokenSql, expect(suggestion).toEqual(['DATA']);
pos
)?.keywords;
expect(suggestion).toEqual(["DATA"
]);
}); });
test("After SHOW", () => { test('After SHOW', () => {
const pos: CaretPosition = { const pos: CaretPosition = {
lineNumber: 19, lineNumber: 19,
column: 6, column: 6,
}; };
const suggestion = parser.getSuggestionAtCaretPosition( const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([ expect(suggestion).toEqual([
"CURRENT", 'CURRENT',
"ROLES", 'ROLES',
"PRINCIPALS", 'PRINCIPALS',
"ROLE", 'ROLE',
"GRANT", 'GRANT',
"INDEX", 'INDEX',
"INDEXES", 'INDEXES',
"FORMATTED", 'FORMATTED',
"CONNECTORS", 'CONNECTORS',
"RESOURCE", 'RESOURCE',
"CONF", 'CONF',
"TRANSACTIONS", 'TRANSACTIONS',
"COMPACTIONS", 'COMPACTIONS',
"LOCKS", 'LOCKS',
"TBLPROPERTIES", 'TBLPROPERTIES',
"TABLE", 'TABLE',
"CREATE", 'CREATE',
"PARTITIONS", 'PARTITIONS',
"FUNCTIONS", 'FUNCTIONS',
"COLUMNS", 'COLUMNS',
"SORTED", 'SORTED',
"MATERIALIZED", 'MATERIALIZED',
"VIEWS", 'VIEWS',
"TABLES", 'TABLES',
"EXTENDED", 'EXTENDED',
"DATABASES", 'DATABASES',
"SCHEMAS", 'SCHEMAS',
]); ]);
}); });
}); });

View File

@ -12,7 +12,7 @@ const features = {
indexes: readSQL(__dirname, 'createIndex.sql'), indexes: readSQL(__dirname, 'createIndex.sql'),
macros: readSQL(__dirname, 'createMacro.sql'), macros: readSQL(__dirname, 'createMacro.sql'),
connectors: readSQL(__dirname, 'createConnector.sql'), connectors: readSQL(__dirname, 'createConnector.sql'),
scheduledQueries: readSQL(__dirname, 'createScheduledQuery.sql') scheduledQueries: readSQL(__dirname, 'createScheduledQuery.sql'),
}; };
describe('HiveSQL Create Syntax Tests', () => { describe('HiveSQL Create Syntax Tests', () => {

View File

@ -5,7 +5,7 @@ const parser = new HiveSQL();
const features = { const features = {
drops: readSQL(__dirname, 'drop.sql'), drops: readSQL(__dirname, 'drop.sql'),
reloads: readSQL(__dirname, 'reload.sql') reloads: readSQL(__dirname, 'reload.sql'),
}; };
describe('HiveSQL Drop Syntax Tests', () => { describe('HiveSQL Drop Syntax Tests', () => {

View File

@ -2,7 +2,7 @@ import HiveSQL from '../../../../src/parser/hive';
import { readSQL } from '../../../helper'; import { readSQL } from '../../../helper';
const features = { const features = {
exports: readSQL(__dirname, 'export.sql') exports: readSQL(__dirname, 'export.sql'),
}; };
describe('HiveSQL Export Syntax Tests', () => { describe('HiveSQL Export Syntax Tests', () => {
@ -14,4 +14,3 @@ describe('HiveSQL Export Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,8 +1,8 @@
import HiveSQL from '../../../../src/parser/hive'; import HiveSQL from '../../../../src/parser/hive';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
imports: readSQL(__dirname, 'import.sql') imports: readSQL(__dirname, 'import.sql'),
}; };
describe('HiveSQL Import Syntax Tests', () => { describe('HiveSQL Import Syntax Tests', () => {

View File

@ -5,7 +5,7 @@ const parser = new HiveSQL();
const features = { const features = {
insertFromQueries: readSQL(__dirname, 'insertFromQuery.sql'), insertFromQueries: readSQL(__dirname, 'insertFromQuery.sql'),
insertFromValues: readSQL(__dirname, 'insertFormValues.sql') insertFromValues: readSQL(__dirname, 'insertFormValues.sql'),
}; };
describe('HiveSQL Insert Syntax Tests', () => { describe('HiveSQL Insert Syntax Tests', () => {

View File

@ -15,7 +15,6 @@ describe('HiveSQL Visitor Tests', () => {
test('Visitor visitTableName', () => { test('Visitor visitTableName', () => {
let result = ''; let result = '';
class MyVisitor extends AbstractParseTreeVisitor<any> implements HiveSqlParserVisitor<any> { class MyVisitor extends AbstractParseTreeVisitor<any> implements HiveSqlParserVisitor<any> {
defaultResult() { defaultResult() {
return result; return result;
} }

View File

@ -1,3 +1,4 @@
import { ParseTreeListener } from 'antlr4ts/tree';
import { Target_listContext } from '../../../src/lib/pgsql/PostgreSQLParser'; import { Target_listContext } from '../../../src/lib/pgsql/PostgreSQLParser';
import { PostgreSQLParserListener } from '../../../src/lib/pgsql/PostgreSQLParserListener'; import { PostgreSQLParserListener } from '../../../src/lib/pgsql/PostgreSQLParserListener';
import PostgresSQL from '../../../src/parser/pgsql'; import PostgresSQL from '../../../src/parser/pgsql';
@ -16,9 +17,9 @@ describe('PostgresSQL Listener Tests', () => {
result = ctx.text.toLowerCase(); result = ctx.text.toLowerCase();
} }
} }
const listenTableName: any = new MyListener(); const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree); await parser.listen(listenTableName as ParseTreeListener, parserTree);
expect(result).toBe(expectTableName); expect(result).toBe(expectTableName);
}); });
}); });

View File

@ -1,14 +1,14 @@
import PostgresSQL from "../../../src/parser/pgsql"; import PostgresSQL from '../../../src/parser/pgsql';
import { readSQL } from "../../helper"; import { readSQL } from '../../helper';
const parser = new PostgresSQL(); const parser = new PostgresSQL();
const features = { const features = {
base: readSQL(__dirname, "select.sql"), base: readSQL(__dirname, 'select.sql'),
}; };
describe("Postgre SQL Query Statement Tests", () => { describe('Postgre SQL Query Statement Tests', () => {
describe("Base Select", () => { describe('Base Select', () => {
features.base.forEach((sql) => { features.base.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);

View File

@ -1,4 +1,4 @@
import PostgresSQL from "../../../src/parser/pgsql"; import PostgresSQL from '../../../src/parser/pgsql';
describe('PostgresSQL SQL Syntax Tests', () => { describe('PostgresSQL SQL Syntax Tests', () => {
const parser = new PostgresSQL(); const parser = new PostgresSQL();
@ -21,5 +21,4 @@ describe('PostgresSQL SQL Syntax Tests', () => {
const result = parser.validate(sql); const result = parser.validate(sql);
expect(result.length).toBe(0); expect(result.length).toBe(0);
}); });
}); });

View File

@ -1,6 +1,6 @@
import { AbstractParseTreeVisitor } from "antlr4ts/tree/AbstractParseTreeVisitor"; import { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor';
import { PostgreSQLParserVisitor } from "../../../src/lib/pgsql/PostgreSQLParserVisitor"; import { PostgreSQLParserVisitor } from '../../../src/lib/pgsql/PostgreSQLParserVisitor';
import PostgresSQL from "../../../src/parser/pgsql"; import PostgresSQL from '../../../src/parser/pgsql';
describe('Generic SQL Visitor Tests', () => { describe('Generic SQL Visitor Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -13,11 +13,14 @@ describe('Generic SQL Visitor Tests', () => {
test('Visitor visitTableName', () => { test('Visitor visitTableName', () => {
let result = ''; let result = '';
class MyVisitor extends AbstractParseTreeVisitor<any> implements PostgreSQLParserVisitor<any> { class MyVisitor
extends AbstractParseTreeVisitor<any>
implements PostgreSQLParserVisitor<any>
{
protected defaultResult() { protected defaultResult() {
return result; return result;
} }
visitTable_ref(ctx) { visitTable_ref(ctx) {
result = ctx.text.toLowerCase(); result = ctx.text.toLowerCase();
} }

View File

@ -1,3 +1,4 @@
import { ParseTreeListener } from 'antlr4ts/tree';
import { PlSqlParserListener } from '../../../src/lib/plsql/PlSqlParserListener'; import { PlSqlParserListener } from '../../../src/lib/plsql/PlSqlParserListener';
import PLSQL from '../../../src/parser/plsql'; import PLSQL from '../../../src/parser/plsql';
@ -11,14 +12,13 @@ describe('PLSQL Listener Tests', () => {
test('Listener enterTableName', async () => { test('Listener enterTableName', async () => {
let result = ''; let result = '';
class MyListener implements PlSqlParserListener { class MyListener implements PlSqlParserListener {
// eslint-disable-next-line camelcase
enterTable_ref_list = (ctx): void => { enterTable_ref_list = (ctx): void => {
result = ctx.text.toLowerCase(); result = ctx.text.toLowerCase();
} };
} }
const listenTableName: any = new MyListener(); const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree); await parser.listen(listenTableName as ParseTreeListener, parserTree);
expect(result).toBe(expectTableName); expect(result).toBe(expectTableName);
}); });
}); });

View File

@ -15,10 +15,9 @@ describe('PLSQL Visitor Tests', () => {
protected defaultResult() { protected defaultResult() {
return result; return result;
} }
// eslint-disable-next-line camelcase
visitTable_ref_list = (ctx): void => { visitTable_ref_list = (ctx): void => {
result = ctx.text.toLowerCase(); result = ctx.text.toLowerCase();
} };
} }
const visitor: any = new MyVisitor(); const visitor: any = new MyVisitor();
visitor.visit(parserTree); visitor.visit(parserTree);

View File

@ -1,3 +1,4 @@
import { ParseTreeListener } from 'antlr4ts/tree';
import { SparkSqlParserListener } from '../../../src/lib/spark/SparkSqlParserListener'; import { SparkSqlParserListener } from '../../../src/lib/spark/SparkSqlParserListener';
import SparkSQL from '../../../src/parser/spark'; import SparkSQL from '../../../src/parser/spark';
@ -13,11 +14,11 @@ describe('Spark SQL Listener Tests', () => {
class MyListener implements SparkSqlParserListener { class MyListener implements SparkSqlParserListener {
exitRelationPrimary = (ctx): void => { exitRelationPrimary = (ctx): void => {
result = ctx.text.toLowerCase(); result = ctx.text.toLowerCase();
} };
} }
const listenTableName = new MyListener(); const listenTableName = new MyListener();
parser.listen(listenTableName, parserTree); parser.listen(listenTableName as ParseTreeListener, parserTree);
expect(result).toBe(expectTableName); expect(result).toBe(expectTableName);
}); });
}); });

View File

@ -3,7 +3,10 @@ import path from 'path';
import { CaretPosition, SyntaxContextType } from '../../../../src/parser/common/basic-parser-types'; import { CaretPosition, SyntaxContextType } from '../../../../src/parser/common/basic-parser-types';
import SparkSQL from '../../../../src/parser/spark'; import SparkSQL from '../../../../src/parser/spark';
const syntaxSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'), 'utf-8'); const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
'utf-8'
);
describe('Spark SQL Syntax Suggestion', () => { describe('Spark SQL Syntax Suggestion', () => {
const parser = new SparkSQL(); const parser = new SparkSQL();
@ -20,11 +23,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 18, column: 18,
}; };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.TABLE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'tb']);
.toEqual(['db', '.', 'tb']);
}); });
test('Select table ', () => { test('Select table ', () => {
@ -33,11 +37,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 18, column: 18,
}; };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.TABLE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
.toEqual(['db', '.']);
}); });
test('Create table ', () => { test('Create table ', () => {
@ -46,11 +51,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 17, column: 17,
}; };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
.toEqual(['db', '.']);
}); });
test('DROP table ', () => { test('DROP table ', () => {
@ -59,11 +65,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 26, column: 26,
}; };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.TABLE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'a']);
.toEqual(['db', '.', 'a']);
}); });
test('Create view ', () => { test('Create view ', () => {
@ -72,11 +79,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 28, column: 28,
}; };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'v']);
.toEqual(['db', '.', 'v']);
}); });
test('Drop view ', () => { test('Drop view ', () => {
@ -85,11 +93,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 15, column: 15,
}; };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.VIEW); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'v']);
.toEqual(['db', '.', 'v']);
}); });
test('Create function ', () => { test('Create function ', () => {
@ -98,11 +107,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 20, column: 20,
}; };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['fn1']);
.toEqual(['fn1']);
}); });
test('Use function', () => { test('Use function', () => {
@ -111,11 +121,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 27, column: 27,
}; };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['calculate_age']);
.toEqual(['calculate_age']);
}); });
test('Create database', () => { test('Create database', () => {
@ -124,11 +135,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 19, column: 19,
}; };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db']);
.toEqual(['db']);
}); });
test('Drop database', () => { test('Drop database', () => {
@ -137,10 +149,11 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 26, column: 26,
}; };
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.DATABASE); const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
);
expect(suggestion).not.toBeUndefined(); expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)) expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['sch']);
.toEqual(['sch']);
}); });
}); });

View File

@ -13,19 +13,9 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 1, lineNumber: 1,
column: 7, column: 7,
}; };
const suggestion = parser.getSuggestionAtCaretPosition( const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
tokenSql,
pos,
)?.keywords;
expect(suggestion).toEqual([ expect(suggestion).toEqual(['TABLE', 'INDEX', 'VIEW', 'DATABASE', 'NAMESPACE', 'SCHEMA']);
'TABLE',
'INDEX',
'VIEW',
'DATABASE',
'NAMESPACE',
'SCHEMA',
]);
}); });
test('After CREATE', () => { test('After CREATE', () => {
@ -33,10 +23,7 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 3, lineNumber: 3,
column: 8, column: 8,
}; };
const suggestion = parser.getSuggestionAtCaretPosition( const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
tokenSql,
pos,
)?.keywords;
expect(suggestion).toEqual([ expect(suggestion).toEqual([
'TEMPORARY', 'TEMPORARY',
@ -59,10 +46,7 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 5, lineNumber: 5,
column: 8, column: 8,
}; };
const suggestion = parser.getSuggestionAtCaretPosition( const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
tokenSql,
pos,
)?.keywords;
expect(suggestion).toEqual(['FROM']); expect(suggestion).toEqual(['FROM']);
}); });
@ -72,10 +56,7 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 7, lineNumber: 7,
column: 10, column: 10,
}; };
const suggestion = parser.getSuggestionAtCaretPosition( const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
tokenSql,
pos,
)?.keywords;
expect(suggestion).toEqual([ expect(suggestion).toEqual([
'WITH', 'WITH',
@ -98,10 +79,7 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 9, lineNumber: 9,
column: 6, column: 6,
}; };
const suggestion = parser.getSuggestionAtCaretPosition( const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
tokenSql,
pos,
)?.keywords;
expect(suggestion).toEqual([ expect(suggestion).toEqual([
'TEMPORARY', 'TEMPORARY',
@ -121,15 +99,9 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 11, lineNumber: 11,
column: 8, column: 8,
}; };
const suggestion = parser.getSuggestionAtCaretPosition( const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
tokenSql,
pos,
)?.keywords;
expect(suggestion).toEqual([ expect(suggestion).toEqual(['OVERWRITE', 'INTO']);
'OVERWRITE',
'INTO',
]);
}); });
test('After LOAD', () => { test('After LOAD', () => {
@ -137,14 +109,9 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 13, lineNumber: 13,
column: 6, column: 6,
}; };
const suggestion = parser.getSuggestionAtCaretPosition( const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
tokenSql,
pos,
)?.keywords;
expect(suggestion).toEqual([ expect(suggestion).toEqual(['DATA']);
'DATA',
]);
}); });
test('After SHOW', () => { test('After SHOW', () => {
@ -152,10 +119,7 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 15, lineNumber: 15,
column: 6, column: 6,
}; };
const suggestion = parser.getSuggestionAtCaretPosition( const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
tokenSql,
pos,
)?.keywords;
expect(suggestion).toEqual([ expect(suggestion).toEqual([
'LOCKS', 'LOCKS',
@ -190,10 +154,7 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 17, lineNumber: 17,
column: 8, column: 8,
}; };
const suggestion = parser.getSuggestionAtCaretPosition( const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
tokenSql,
pos,
)?.keywords;
expect(suggestion).toEqual(['TABLE']); expect(suggestion).toEqual(['TABLE']);
}); });

View File

@ -1,7 +1,6 @@
import SparkSQL from '../../../../src/parser/spark'; import SparkSQL from '../../../../src/parser/spark';
import { readSQL } from '../../../helper'; import { readSQL } from '../../../helper';
const parser = new SparkSQL(); const parser = new SparkSQL();
const features = { const features = {

View File

@ -1,7 +1,6 @@
import SparkSQL from '../../../../src/parser/spark'; import SparkSQL from '../../../../src/parser/spark';
import { readSQL } from '../../../helper'; import { readSQL } from '../../../helper';
const parser = new SparkSQL(); const parser = new SparkSQL();
const features = { const features = {

View File

@ -12,14 +12,17 @@ describe('Spark SQL Visitor Tests', () => {
}); });
test('Visitor visitRelationPrimary', () => { test('Visitor visitRelationPrimary', () => {
class MyVisitor extends AbstractParseTreeVisitor<any> implements SparkSqlParserVisitor<any> { class MyVisitor
extends AbstractParseTreeVisitor<any>
implements SparkSqlParserVisitor<any>
{
result: string = ''; result: string = '';
protected defaultResult() { protected defaultResult() {
return this.result; return this.result;
} }
visitRelationPrimary = (ctx): void => { visitRelationPrimary = (ctx): void => {
this.result = ctx.text.toLowerCase(); this.result = ctx.text.toLowerCase();
} };
} }
const visitor = new MyVisitor(); const visitor = new MyVisitor();
visitor.visit(parserTree); visitor.visit(parserTree);

View File

@ -1,5 +1,6 @@
import trinoSQL from '../../../src/parser/trinosql'; import trinoSQL from '../../../src/parser/trinosql';
import { TrinoSqlListener } from '../../../src/lib/trinosql/TrinoSqlListener'; import { TrinoSqlListener } from '../../../src/lib/trinosql/TrinoSqlListener';
import { ParseTreeListener } from 'antlr4ts/tree';
describe('trino SQL Listener Tests', () => { describe('trino SQL Listener Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -11,14 +12,13 @@ describe('trino SQL Listener Tests', () => {
test('Listener enterTableName', async () => { test('Listener enterTableName', async () => {
let result = ''; let result = '';
class MyListener implements TrinoSqlListener { class MyListener implements TrinoSqlListener {
enterTableName = (ctx): void => { enterTableName = (ctx): void => {
result = ctx.text.toLowerCase(); result = ctx.text.toLowerCase();
}; };
} }
const listenTableName = new MyListener(); const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree); await parser.listen(listenTableName as ParseTreeListener, parserTree);
expect(result).toBe(expectTableName); expect(result).toBe(expectTableName);
}); });
}); });

View File

@ -1,11 +1,11 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
table: readSQL(__dirname, 'alter_table.sql'), table: readSQL(__dirname, 'alter_table.sql'),
view: readSQL(__dirname, 'alter_view.sql'), view: readSQL(__dirname, 'alter_view.sql'),
schema: readSQL(__dirname, 'alter_schema.sql'), schema: readSQL(__dirname, 'alter_schema.sql'),
materializedView: readSQL(__dirname, 'alter_materialized_view.sql') materializedView: readSQL(__dirname, 'alter_materialized_view.sql'),
}; };
describe('TrinoSQL Alter Statements Syntax Tests', () => { describe('TrinoSQL Alter Statements Syntax Tests', () => {
@ -19,16 +19,15 @@ describe('TrinoSQL Alter Statements Syntax Tests', () => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.schema.forEach((sql) => { features.schema.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.materializedView.forEach((sql) => { features.materializedView.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
}); });

View File

@ -1,9 +1,8 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
analyze: readSQL(__dirname, 'analyze.sql'), analyze: readSQL(__dirname, 'analyze.sql'),
}; };
describe('TrinoSQL Analyze Statements Syntax Tests', () => { describe('TrinoSQL Analyze Statements Syntax Tests', () => {
@ -15,4 +14,3 @@ describe('TrinoSQL Analyze Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,9 +1,8 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
call: readSQL(__dirname, 'call.sql'), call: readSQL(__dirname, 'call.sql'),
}; };
describe('TrinoSQL Call Statements Syntax Tests', () => { describe('TrinoSQL Call Statements Syntax Tests', () => {
@ -15,4 +14,3 @@ describe('TrinoSQL Call Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,9 +1,8 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
comment: readSQL(__dirname, 'comment.sql'), comment: readSQL(__dirname, 'comment.sql'),
}; };
describe('TrinoSQL Comment Statements Syntax Tests', () => { describe('TrinoSQL Comment Statements Syntax Tests', () => {
@ -15,4 +14,3 @@ describe('TrinoSQL Comment Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
commit: readSQL(__dirname, 'commit.sql'), commit: readSQL(__dirname, 'commit.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Commit Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
table: readSQL(__dirname, 'create_table.sql'), table: readSQL(__dirname, 'create_table.sql'),
@ -12,7 +12,7 @@ const features = {
describe('TrinoSQL Create Statements Syntax Tests', () => { describe('TrinoSQL Create Statements Syntax Tests', () => {
const parser = new TrinoSQL(); const parser = new TrinoSQL();
features.table.forEach((sql) => { features.table.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
@ -21,28 +21,26 @@ describe('TrinoSQL Create Statements Syntax Tests', () => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.schema.forEach((sql) => { features.schema.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.tableAsSelect.forEach((sql) => { features.tableAsSelect.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.role.forEach((sql) => { features.role.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.materializedView.forEach((sql) => { features.materializedView.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
deallocatePrepare: readSQL(__dirname, 'deallocate_prepare.sql'), deallocatePrepare: readSQL(__dirname, 'deallocate_prepare.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL deallocatePrepare Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
delete: readSQL(__dirname, 'delete.sql'), delete: readSQL(__dirname, 'delete.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Delete Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
deny: readSQL(__dirname, 'deny.sql'), deny: readSQL(__dirname, 'deny.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Deny Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
describe: readSQL(__dirname, 'describe.sql'), describe: readSQL(__dirname, 'describe.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Describe Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
table: readSQL(__dirname, 'drop_table.sql'), table: readSQL(__dirname, 'drop_table.sql'),
@ -12,7 +12,7 @@ const features = {
describe('TrinoSQL Drop Statements Syntax Tests', () => { describe('TrinoSQL Drop Statements Syntax Tests', () => {
const parser = new TrinoSQL(); const parser = new TrinoSQL();
features.table.forEach((sql) => { features.table.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
@ -21,28 +21,26 @@ describe('TrinoSQL Drop Statements Syntax Tests', () => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.schema.forEach((sql) => { features.schema.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.column.forEach((sql) => { features.column.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.role.forEach((sql) => { features.role.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.materializedView.forEach((sql) => { features.materializedView.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
execute: readSQL(__dirname, 'execute.sql'), execute: readSQL(__dirname, 'execute.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Execute Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
explain: readSQL(__dirname, 'explain.sql'), explain: readSQL(__dirname, 'explain.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Explain Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
grant: readSQL(__dirname, 'grant.sql'), grant: readSQL(__dirname, 'grant.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Grant Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
insertIntoTable: readSQL(__dirname, 'insert_into.sql'), insertIntoTable: readSQL(__dirname, 'insert_into.sql'),
@ -13,4 +13,3 @@ describe('TrinoSQL Insert Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
matchRecognize: readSQL(__dirname, 'match_recognize.sql'), matchRecognize: readSQL(__dirname, 'match_recognize.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Match Recognize Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
merge: readSQL(__dirname, 'merge.sql'), merge: readSQL(__dirname, 'merge.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Merge Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
prepare: readSQL(__dirname, 'prepare.sql'), prepare: readSQL(__dirname, 'prepare.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Prepare Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
refreshMaterializedView: readSQL(__dirname, 'refresh_materialized_view.sql'), refreshMaterializedView: readSQL(__dirname, 'refresh_materialized_view.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Refresh Materialized View Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
resetSession: readSQL(__dirname, 'reset_session.sql'), resetSession: readSQL(__dirname, 'reset_session.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Reset Session Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
revoke: readSQL(__dirname, 'revoke.sql'), revoke: readSQL(__dirname, 'revoke.sql'),
@ -20,4 +20,3 @@ describe('TrinoSQL Revoke Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
rollbackTransaction: readSQL(__dirname, 'rollback_transaction.sql'), rollbackTransaction: readSQL(__dirname, 'rollback_transaction.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Rollback Transaction Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
select: readSQL(__dirname, 'select.sql'), select: readSQL(__dirname, 'select.sql'),
@ -13,12 +13,12 @@ const features = {
selectWithFetch: readSQL(__dirname, 'select_with_fetch.sql'), selectWithFetch: readSQL(__dirname, 'select_with_fetch.sql'),
selectWithUNNEST: readSQL(__dirname, 'select_with_ unnest.sql'), selectWithUNNEST: readSQL(__dirname, 'select_with_ unnest.sql'),
selectWithExists: readSQL(__dirname, 'select_with_exists.sql'), selectWithExists: readSQL(__dirname, 'select_with_exists.sql'),
selectWithUnion: readSQL(__dirname, 'select_with_union.sql') selectWithUnion: readSQL(__dirname, 'select_with_union.sql'),
}; };
describe('TrinoSQL Select Statements Syntax Tests', () => { describe('TrinoSQL Select Statements Syntax Tests', () => {
const parser = new TrinoSQL(); const parser = new TrinoSQL();
features.select.forEach((sql) => { features.select.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
@ -27,57 +27,56 @@ describe('TrinoSQL Select Statements Syntax Tests', () => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.selectWithSetOperations.forEach((sql) => { features.selectWithSetOperations.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.selectWithSubQueries.forEach((sql) => { features.selectWithSubQueries.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.selectWithTableSample.forEach((sql) => { features.selectWithTableSample.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.selectWithRowType.forEach((sql) => { features.selectWithRowType.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.selectWithOffset.forEach((sql) => { features.selectWithOffset.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.selectWithJoin.forEach((sql) => { features.selectWithJoin.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.selectWithFetch.forEach((sql) => { features.selectWithFetch.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.selectWithUNNEST.forEach((sql) => { features.selectWithUNNEST.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.selectWithExists.forEach((sql) => { features.selectWithExists.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
features.selectWithUnion.forEach((sql) => { features.selectWithUnion.forEach((sql) => {
it(sql, () => { it(sql, () => {
expect(parser.validate(sql).length).toBe(0); expect(parser.validate(sql).length).toBe(0);
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
role: readSQL(__dirname, 'set_role.sql'), role: readSQL(__dirname, 'set_role.sql'),
@ -32,4 +32,3 @@ describe('TrinoSQL Set Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
tables: readSQL(__dirname, 'show_tables.sql'), tables: readSQL(__dirname, 'show_tables.sql'),
@ -75,4 +75,3 @@ describe('TrinoSQL Show Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
startTransaction: readSQL(__dirname, 'start_transaction.sql'), startTransaction: readSQL(__dirname, 'start_transaction.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Start Transaction Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
truncateTable: readSQL(__dirname, 'truncate_table.sql'), truncateTable: readSQL(__dirname, 'truncate_table.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Truncate Table Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
update: readSQL(__dirname, 'update.sql'), update: readSQL(__dirname, 'update.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Update Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
use: readSQL(__dirname, 'use.sql'), use: readSQL(__dirname, 'use.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Use Statements Syntax Tests', () => {
}); });
}); });
}); });

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql"; import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from "../../../helper"; import { readSQL } from '../../../helper';
const features = { const features = {
values: readSQL(__dirname, 'values.sql'), values: readSQL(__dirname, 'values.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Values Statements Syntax Tests', () => {
}); });
}); });
}); });

Some files were not shown because too many files have changed in this diff Show More