chroe: devops (#180)

* ci: add dependencies about lint tool

* ci: replace eslint with prettier

* ci: add husky, cz and commitlint

* style: lint fix via prettier

* ci: add prettier and check-types to github workflow

'
This commit is contained in:
Hayden 2023-10-13 11:16:36 +08:00 committed by GitHub
parent 4d1dfa676f
commit 7de192d486
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
105 changed files with 2615 additions and 1823 deletions

3
.czrc Normal file
View File

@ -0,0 +1,3 @@
{
"path": "./node_modules/@commitlint/cz-commitlint"
}

View File

@ -1,37 +0,0 @@
module.exports = {
'env': {
'browser': true,
'es6': true,
},
'extends': [
'google',
],
'globals': {
'expect': 'readable',
'test': 'readable',
'describe': 'readable',
'beforeEach': 'readable',
'afterEach': 'readable',
'jest': 'readable',
'Atomics': 'readonly',
'SharedArrayBuffer': 'readonly',
},
'parser': '@typescript-eslint/parser',
'parserOptions': {
'ecmaFeatures': {
},
'ecmaVersion': 11,
'sourceType': 'module',
},
'plugins': [
'@typescript-eslint',
],
'rules': {
'indent': ['error', 4],
'object-curly-spacing': ['error', 'always'],
'max-len': 0,
'require-jsdoc': 0,
'valid-jsdoc': 0,
'no-unused-vars': 0,
},
};

View File

@ -43,7 +43,50 @@ jobs:
- name: install
if: steps.node_modules_cache_id.outputs.cache-hit != 'true'
run: yarn
prettier:
runs-on: ubuntu-latest
needs: [setup]
steps:
- uses: actions/checkout@v2
- name: Restore cache from yarn.lock
uses: actions/cache@v2
with:
path: package-temp-dir
key: lock-${{ github.sha }}
- name: Restore cache from node_modules
uses: actions/cache@v2
with:
path: node_modules
key: node_modules-${{ hashFiles('**/package-temp-dir/yarn.lock') }}
- name: Run prettier lint
run: |
export NODE_OPTIONS="--max_old_space_size=4096"
yarn lint
check-types:
runs-on: ubuntu-latest
needs: [setup]
steps:
- uses: actions/checkout@v2
- name: Restore cache from yarn.lock
uses: actions/cache@v2
with:
path: package-temp-dir
key: lock-${{ github.sha }}
- name: Restore cache from node_modules
uses: actions/cache@v2
with:
path: node_modules
key: node_modules-${{ hashFiles('**/package-temp-dir/yarn.lock') }}
- name: Run tsc check
run: |
export NODE_OPTIONS="--max_old_space_size=4096"
yarn check-types
test:
runs-on: ubuntu-latest
needs: [setup]

6
.husky/commit-msg Executable file
View File

@ -0,0 +1,6 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
echo 'commitlint'
npx --no -- commitlint --edit

4
.husky/pre-commit Executable file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
npx lint-staged

3
.lintstagedrc.js Normal file
View File

@ -0,0 +1,3 @@
module.exports = {
'*.js|ts': [`prettier --write`],
};

7
.prettierignore Normal file
View File

@ -0,0 +1,7 @@
**/*.md
**/*.ejs
**/package.json
**/lib
**/dist
node_modules
coverage

10
.prettierrc Normal file
View File

@ -0,0 +1,10 @@
{
"semi": true,
"tabWidth": 4,
"printWidth": 100,
"singleQuote": true,
"useTabs": false,
"bracketSpacing": true,
"arrowParens": "always",
"trailingComma": "es5"
}

12
commitlint.config.js Normal file
View File

@ -0,0 +1,12 @@
module.exports = {
extends: ['@commitlint/config-conventional'],
rules: {
'type-enum': [
2,
'always',
['feat', 'fix', 'docs', 'style', 'refactor', 'test', 'build', 'ci', 'chore'],
],
'scope-case': [0, 'always'],
'scope-empty': [0, 'always'],
},
};

View File

@ -18,28 +18,33 @@
"dist"
],
"scripts": {
"prepare": "husky install",
"antlr4": "node ./scripts/antlr4.js",
"build": "rm -rf dist && tsc",
"eslint": "eslint ./src/**/*.ts",
"check-types": "tsc --skipLibCheck",
"check-types": "tsc -p ./tsconfig.check.json",
"test": "NODE_OPTIONS=--max_old_space_size=4096 && jest",
"release": "npm run build && node ./scripts/release.js"
"release": "npm run build && node ./scripts/release.js",
"lint": "prettier --check '**/*.ts' --config ./.prettierrc",
"lint-fix": "prettier --write '**/*.ts' --config ./.prettierrc"
},
"author": "dt-insight-front",
"license": "MIT",
"devDependencies": {
"@commitlint/cli": "^17.7.2",
"@commitlint/config-conventional": "^17.7.0",
"@commitlint/cz-commitlint": "^17.7.2",
"@swc/core": "^1.3.60",
"@swc/jest": "^0.2.26",
"@types/jest": "^29.5.1",
"@types/node": "^18.15.11",
"@typescript-eslint/eslint-plugin": "^3.10.1",
"@typescript-eslint/parser": "^3.10.1",
"antlr4ts-cli": "^0.5.0-alpha.4",
"chalk": "4.1.2",
"eslint": "^7.32.0",
"eslint-config-google": "^0.14.0",
"commitizen": "^4.3.0",
"husky": "^8.0.3",
"inquirer": "^8.2.2",
"jest": "^29.5.0",
"lint-staged": "12.5.0",
"prettier": "^3.0.3",
"standard-version": "^9.5.0",
"typescript": "^5.0.4",
"yargs-parser": "^21.1.1"

File diff suppressed because it is too large Load Diff

View File

@ -16,6 +16,5 @@ export * from './lib/trinosql/TrinoSqlListener';
export * from './lib/trinosql/TrinoSqlVisitor';
export { SyntaxContextType } from './parser/common/basic-parser-types';
export type * from './parser/common/basic-parser-types';
export type { SyntaxError, ParserError } from './parser/common/parserErrorListener';

View File

@ -30,12 +30,10 @@ export default abstract class PostgreSQLLexerBase extends Lexer {
return this._input;
}
checkLA( c) {
// eslint-disable-next-line new-cap
return this.getInputStream().LA(1) !== c;
}
charIsLetter() {
// eslint-disable-next-line new-cap
return isLetter(this.getInputStream().LA(-1));
}
@ -53,11 +51,9 @@ export default abstract class PostgreSQLLexerBase extends Lexer {
}
UnterminatedBlockCommentDebugAssert() {
// Debug.Assert(InputStream.LA(1) == -1 /*EOF*/);
}
CheckIfUtf32Letter() {
// eslint-disable-next-line new-cap
let codePoint = this.getInputStream().LA(-2) << 8 + this.getInputStream().LA(-1);
let c;
if (codePoint < 0x10000) {

View File

@ -1,9 +1,7 @@
/* eslint-disable new-cap,camelcase */
import { CharStreams, CommonTokenStream, Parser } from 'antlr4ts';
import { PostgreSQLLexer } from './PostgreSQLLexer';
import { PostgreSQLParser } from './PostgreSQLParser';
// @ts-ignore
export default abstract class PostgreSQLParserBase extends Parser {
constructor( input) {
super(input);
@ -32,16 +30,13 @@ export default abstract class PostgreSQLParserBase extends Parser {
}
}
if (!lang) return;
// eslint-disable-next-line camelcase
let func_as = null;
for (const a of _localctx.createfunc_opt_item()) {
if (!a.func_as()) {
// eslint-disable-next-line camelcase
func_as = a;
break;
}
}
// eslint-disable-next-line camelcase
if (!func_as) {
const txt = this.GetRoutineBodyString(func_as.func_as().sconst(0));
const line = func_as.func_as().sconst(0).start.getLine();
@ -76,7 +71,6 @@ export default abstract class PostgreSQLParserBase extends Parser {
GetRoutineBodyString( rule) {
const anysconst = rule.anysconst();
// eslint-disable-next-line new-cap
const StringConstant = anysconst.StringConstant();
if (null !== StringConstant) return this.unquote(this.TrimQuotes(StringConstant.getText()));
const UnicodeEscapeStringConstant = anysconst.UnicodeEscapeStringConstant();

View File

@ -5,7 +5,7 @@ import {
CharStreams,
CommonTokenStream,
CodePointCharStream,
ParserRuleContext
ParserRuleContext,
} from 'antlr4ts';
import { ParseTreeWalker, ParseTreeListener } from 'antlr4ts/tree';
import { CandidatesCollection, CodeCompletionCore } from 'antlr4-c3';
@ -15,7 +15,7 @@ import {
Suggestions,
SyntaxSuggestion,
WordRange,
TextSlice
TextSlice,
} from './basic-parser-types';
import ParserErrorListener, {
ParserError,
@ -38,7 +38,7 @@ interface SplitListener extends ParseTreeListener {
export default abstract class BasicParser<
L extends Lexer = Lexer,
PRC extends ParserRuleContext = ParserRuleContext,
P extends IParser<PRC> = IParser<PRC>
P extends IParser<PRC> = IParser<PRC>,
> {
protected _charStreams: CodePointCharStream;
protected _lexer: L;
@ -77,13 +77,13 @@ export default abstract class BasicParser<
candidates: CandidatesCollection,
allTokens: Token[],
caretTokenIndex: number,
tokenIndexOffset: number,
tokenIndexOffset: number
): Suggestions<Token>;
/**
* Get splitListener instance.
*/
protected abstract get splitListener (): SplitListener;
protected abstract get splitListener(): SplitListener;
/**
* Create an anltr4 lexer from input.
@ -92,8 +92,8 @@ export default abstract class BasicParser<
public createLexer(input: string) {
const charStreams = CharStreams.fromString(input.toUpperCase());
const lexer = this.createLexerFormCharStream(charStreams);
return lexer;
return lexer;
}
/**
@ -123,7 +123,7 @@ export default abstract class BasicParser<
this._parser = this.createParserFromTokenStream(this._tokenStream);
this._parser.buildParseTree = true;
return this._parser
return this._parser;
}
/**
@ -134,13 +134,10 @@ export default abstract class BasicParser<
* @param errorListener listen errors
* @returns parserTree
*/
public parse(
input: string,
errorListener?: ErrorHandler<any>
) {
public parse(input: string, errorListener?: ErrorHandler<any>) {
// Avoid parsing the same input repeatedly.
if(this._parsedInput === input && !errorListener) {
return;
if (this._parsedInput === input && !errorListener) {
return this._parserTree;
}
const parser = this.createParserWithCache(input);
@ -150,7 +147,7 @@ export default abstract class BasicParser<
this._errorCollector.clear();
parser.addErrorListener(this._errorCollector);
if(errorListener) {
if (errorListener) {
parser.addErrorListener(new ParserErrorListener(errorListener));
}
@ -178,11 +175,11 @@ export default abstract class BasicParser<
public getAllTokens(input: string): Token[] {
this.parse(input);
let allTokens = this._tokenStream.getTokens();
if(allTokens[allTokens.length - 1].text === '<EOF>') {
allTokens = allTokens.slice(0, -1)
if (allTokens[allTokens.length - 1].text === '<EOF>') {
allTokens = allTokens.slice(0, -1);
}
return allTokens;
}
return allTokens
};
/**
* It convert tree to string, it's convenient to use in unit test.
* @param string input
@ -204,7 +201,10 @@ export default abstract class BasicParser<
* @param listener Listener instance extends ParserListener
* @param parserTree parser Tree
*/
public listen<PTL extends ParseTreeListener = ParseTreeListener>(listener: PTL, parserTree: PRC) {
public listen<PTL extends ParseTreeListener = ParseTreeListener>(
listener: PTL,
parserTree: PRC
) {
ParseTreeWalker.DEFAULT.walk(listener, parserTree);
}
@ -218,7 +218,7 @@ export default abstract class BasicParser<
const splitListener = this.splitListener;
this.listen(splitListener, this._parserTree);
const res = splitListener.statementsContext.map(context => {
const res = splitListener.statementsContext.map((context) => {
const { start, stop } = context;
return {
startIndex: start.startIndex,
@ -228,8 +228,8 @@ export default abstract class BasicParser<
startColumn: start.charPositionInLine + 1,
endColumn: stop.charPositionInLine + stop.text.length,
text: this._parsedInput.slice(start.startIndex, stop.stopIndex + 1),
}
})
};
});
return res;
}
@ -240,10 +240,13 @@ export default abstract class BasicParser<
* @param caretPosition caret position, such as cursor position
* @returns suggestion
*/
public getSuggestionAtCaretPosition(input: string, caretPosition: CaretPosition): Suggestions | null {
public getSuggestionAtCaretPosition(
input: string,
caretPosition: CaretPosition
): Suggestions | null {
const splitListener = this.splitListener;
// TODO: add splitListener to all sqlParser implements add remove following if
if(!splitListener) return null;
if (!splitListener) return null;
this.parse(input);
let sqlParserIns = this._parser;
@ -252,7 +255,7 @@ export default abstract class BasicParser<
let c3Context: ParserRuleContext = this._parserTree;
let tokenIndexOffset: number = 0;
if(!caretTokenIndex && caretTokenIndex !== 0) return null;
if (!caretTokenIndex && caretTokenIndex !== 0) return null;
/**
* Split sql by statement.
@ -263,15 +266,18 @@ export default abstract class BasicParser<
// If there are multiple statements.
if (splitListener.statementsContext.length > 1) {
// find statement rule context where caretPosition is located.
const caretStatementContext = splitListener?.statementsContext.find(ctx => {
return caretTokenIndex <= ctx.stop?.tokenIndex && caretTokenIndex >= ctx.start.tokenIndex;
const caretStatementContext = splitListener?.statementsContext.find((ctx) => {
return (
caretTokenIndex <= ctx.stop?.tokenIndex &&
caretTokenIndex >= ctx.start.tokenIndex
);
});
if(caretStatementContext) {
c3Context = caretStatementContext
if (caretStatementContext) {
c3Context = caretStatementContext;
} else {
const lastStatementToken= splitListener
.statementsContext[splitListener?.statementsContext.length - 1]
const lastStatementToken =
splitListener.statementsContext[splitListener?.statementsContext.length - 1]
.start;
/**
* If caretStatementContext is not found and it follows all statements.
@ -304,28 +310,34 @@ export default abstract class BasicParser<
core.preferredRules = this.preferredRules;
const candidates = core.collectCandidates(caretTokenIndex, c3Context);
const originalSuggestions = this.processCandidates(candidates, allTokens, caretTokenIndex, tokenIndexOffset);
const originalSuggestions = this.processCandidates(
candidates,
allTokens,
caretTokenIndex,
tokenIndexOffset
);
const syntaxSuggestions: SyntaxSuggestion<WordRange>[] = originalSuggestions.syntax
.map(syntaxCtx => {
const wordRanges: WordRange[] = syntaxCtx.wordRanges.map(token => {
const syntaxSuggestions: SyntaxSuggestion<WordRange>[] = originalSuggestions.syntax.map(
(syntaxCtx) => {
const wordRanges: WordRange[] = syntaxCtx.wordRanges.map((token) => {
return {
text: this._parsedInput.slice(token.startIndex, token.stopIndex + 1),
startIndex: token.startIndex,
stopIndex: token.stopIndex,
line: token.line,
startColumn: token.charPositionInLine + 1,
stopColumn: token.charPositionInLine + token.text.length
}
})
stopColumn: token.charPositionInLine + token.text.length,
};
});
return {
syntaxContextType: syntaxCtx.syntaxContextType,
wordRanges,
};
}
})
);
return {
syntax: syntaxSuggestions,
keywords: originalSuggestions.keywords
}
keywords: originalSuggestions.keywords,
};
}
}

View File

@ -1,5 +1,5 @@
import { Token, Recognizer, ParserErrorListener, RecognitionException } from 'antlr4ts';
import { ATNSimulator } from 'antlr4ts/atn/ATNSimulator'
import { ATNSimulator } from 'antlr4ts/atn/ATNSimulator';
export interface ParserError {
startLine: number;
@ -30,11 +30,11 @@ export class ParserErrorCollector implements ParserErrorListener {
line: number,
charPositionInLine: number,
msg: string,
e: RecognitionException,
e: RecognitionException
) {
let endCol = charPositionInLine + 1;
if (offendingSymbol && offendingSymbol.text !== null) {
endCol = charPositionInLine + offendingSymbol.text.length;
endCol = charPositionInLine + (offendingSymbol.text?.length ?? 0);
}
this._parseErrors.push({
startLine: line,
@ -51,7 +51,7 @@ export class ParserErrorCollector implements ParserErrorListener {
recognizer,
offendingSymbol,
charPositionInLine,
})
});
}
clear() {
@ -59,8 +59,8 @@ export class ParserErrorCollector implements ParserErrorListener {
this._syntaxErrors = [];
}
get parserErrors () {
return this._parseErrors
get parserErrors() {
return this._parseErrors;
}
}
@ -72,28 +72,35 @@ export default class CustomParserErrorListener implements ParserErrorListener {
}
syntaxError(
recognizer: Recognizer<Token, ATNSimulator>, offendingSymbol: Token, line: number,
charPositionInLine: number, msg: string, e: RecognitionException,
recognizer: Recognizer<Token, ATNSimulator>,
offendingSymbol: Token,
line: number,
charPositionInLine: number,
msg: string,
e: RecognitionException
) {
let endCol = charPositionInLine + 1;
if (offendingSymbol && offendingSymbol.text !== null) {
endCol = charPositionInLine + offendingSymbol.text.length;
}
if (this._errorHandler) {
this._errorHandler({
this._errorHandler(
{
startLine: line,
endLine: line,
startCol: charPositionInLine,
endCol: endCol,
message: msg,
}, {
},
{
e,
line,
msg,
recognizer,
offendingSymbol,
charPositionInLine,
});
}
);
}
}
}

View File

@ -5,7 +5,7 @@ import {
FlinkSqlParser,
ProgramContext,
SqlStatementContext,
SqlStatementsContext
SqlStatementsContext,
} from '../lib/flinksql/FlinkSqlParser';
import { FlinkSqlParserListener } from '../lib/flinksql/FlinkSqlParserListener';
import { SyntaxContextType, Suggestions, SyntaxSuggestion } from './common/basic-parser-types';
@ -34,7 +34,7 @@ export default class FlinkSQL extends BasicParser<FlinkSqlLexer, ProgramContext,
FlinkSqlParser.RULE_functionNameCreate, // functionName that will be created
]);
protected get splitListener () {
protected get splitListener() {
return new FlinkSqlSplitListener();
}
@ -50,7 +50,10 @@ export default class FlinkSQL extends BasicParser<FlinkSqlLexer, ProgramContext,
for (let candidate of candidates.rules) {
const [ruleType, candidateRule] = candidate;
const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset;
const tokenRanges = allTokens.slice(startTokenIndex, caretTokenIndex + tokenIndexOffset + 1);
const tokenRanges = allTokens.slice(
startTokenIndex,
caretTokenIndex + tokenIndexOffset + 1
);
let syntaxContextType: SyntaxContextType;
switch (ruleType) {
@ -78,15 +81,15 @@ export default class FlinkSQL extends BasicParser<FlinkSqlLexer, ProgramContext,
syntaxContextType = SyntaxContextType.VIEW;
break;
}
case FlinkSqlParser.RULE_viewPathCreate : {
case FlinkSqlParser.RULE_viewPathCreate: {
syntaxContextType = SyntaxContextType.VIEW_CREATE;
break;
}
case FlinkSqlParser.RULE_functionName : {
case FlinkSqlParser.RULE_functionName: {
syntaxContextType = SyntaxContextType.FUNCTION;
break;
}
case FlinkSqlParser.RULE_functionNameCreate : {
case FlinkSqlParser.RULE_functionNameCreate: {
syntaxContextType = SyntaxContextType.FUNCTION_CREATE;
break;
}
@ -97,25 +100,26 @@ export default class FlinkSQL extends BasicParser<FlinkSqlLexer, ProgramContext,
if (syntaxContextType) {
originalSyntaxSuggestions.push({
syntaxContextType,
wordRanges: tokenRanges
})
wordRanges: tokenRanges,
});
}
}
for (let candidate of candidates.tokens) {
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
if(symbolicName && symbolicName.startsWith('KW_')) {
const keyword = displayName.startsWith("'") && displayName.endsWith("'")
if (symbolicName && symbolicName.startsWith('KW_')) {
const keyword =
displayName.startsWith("'") && displayName.endsWith("'")
? displayName.slice(1, -1)
: displayName
: displayName;
keywords.push(keyword);
}
}
return {
syntax: originalSyntaxSuggestions,
keywords,
}
};
}
}
@ -124,12 +128,11 @@ export class FlinkSqlSplitListener implements FlinkSqlParserListener {
exitSqlStatement = (ctx: SqlStatementContext) => {
this._statementsContext.push(ctx);
}
enterSqlStatements = (ctx: SqlStatementsContext) => {
};
get statementsContext () {
enterSqlStatements = (ctx: SqlStatementsContext) => {};
get statementsContext() {
return this._statementsContext;
}
}

View File

@ -17,7 +17,7 @@ export default class GenericSQL extends BasicParser<SqlLexer, ProgramContext, Sq
protected preferredRules: Set<number> = new Set();
protected get splitListener () {
protected get splitListener() {
return null as any;
}
@ -28,8 +28,7 @@ export default class GenericSQL extends BasicParser<SqlLexer, ProgramContext, Sq
): Suggestions<Token> {
return {
syntax: [],
keywords: []
}
keywords: [],
};
}
}

View File

@ -1,12 +1,17 @@
import { Token } from 'antlr4ts';
import { CandidatesCollection } from 'antlr4-c3';
import { HiveSqlLexer } from '../lib/hive/HiveSqlLexer';
import { HiveSqlParser, ProgramContext, StatementContext, ExplainStatementContext, ExecStatementContext } from '../lib/hive/HiveSqlParser';
import {
HiveSqlParser,
ProgramContext,
StatementContext,
ExplainStatementContext,
ExecStatementContext,
} from '../lib/hive/HiveSqlParser';
import BasicParser from './common/basicParser';
import { HiveSqlParserListener } from '../lib/hive/HiveSqlParserListener';
import { SyntaxContextType, Suggestions, SyntaxSuggestion } from './common/basic-parser-types';
export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, HiveSqlParser> {
protected createLexerFormCharStream(charStreams) {
const lexer = new HiveSqlLexer(charStreams);
@ -27,10 +32,9 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
HiveSqlParser.RULE_functionNameForDDL, // function name
HiveSqlParser.RULE_functionNameForInvoke, // function name
HiveSqlParser.RULE_functionNameCreate, // function name that will be created
]);
protected get splitListener () {
protected get splitListener() {
return new HiveSqlSplitListener();
}
@ -38,14 +42,17 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
candidates: CandidatesCollection,
allTokens: Token[],
caretTokenIndex: number,
tokenIndexOffset: number,
tokenIndexOffset: number
): Suggestions<Token> {
const originalSyntaxSuggestions: SyntaxSuggestion<Token>[] = [];
const keywords: string[] = [];
for (let candidate of candidates.rules) {
const [ruleType, candidateRule] = candidate;
const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset;
const tokenRanges = allTokens.slice(startTokenIndex, caretTokenIndex + tokenIndexOffset + 1);
const tokenRanges = allTokens.slice(
startTokenIndex,
caretTokenIndex + tokenIndexOffset + 1
);
let syntaxContextType: SyntaxContextType;
switch (ruleType) {
@ -62,7 +69,7 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
break;
}
case HiveSqlParser.RULE_tableNameCreate: {
syntaxContextType = SyntaxContextType.TABLE_CREATE
syntaxContextType = SyntaxContextType.TABLE_CREATE;
break;
}
case HiveSqlParser.RULE_viewName: {
@ -98,7 +105,10 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
if (symbolicName && symbolicName.startsWith('KW_')) {
const keyword = displayName.startsWith("'") && displayName.endsWith("'") ? displayName.slice(1, -1) : displayName;
const keyword =
displayName.startsWith("'") && displayName.endsWith("'")
? displayName.slice(1, -1)
: displayName;
keywords.push(keyword);
}
}
@ -114,13 +124,11 @@ export class HiveSqlSplitListener implements HiveSqlParserListener {
exitStatement = (ctx: StatementContext) => {
this._statementContext.push(ctx);
}
enterStatement = (ctx: StatementContext) => {
};
enterStatement = (ctx: StatementContext) => {};
get statementsContext() {
return this._statementContext;
}
}

View File

@ -5,7 +5,11 @@ import { PostgreSQLParser, ProgramContext } from '../lib/pgsql/PostgreSQLParser'
import BasicParser from './common/basicParser';
import { Suggestions } from './common/basic-parser-types';
export default class PostgresSQL extends BasicParser<PostgreSQLLexer, ProgramContext, PostgreSQLParser> {
export default class PostgresSQL extends BasicParser<
PostgreSQLLexer,
ProgramContext,
PostgreSQLParser
> {
protected createLexerFormCharStream(charStreams) {
const lexer = new PostgreSQLLexer(charStreams);
return lexer;
@ -17,7 +21,7 @@ export default class PostgresSQL extends BasicParser<PostgreSQLLexer, ProgramCon
protected preferredRules: Set<number> = new Set();
protected get splitListener () {
protected get splitListener() {
return null as any;
}
@ -28,7 +32,7 @@ export default class PostgresSQL extends BasicParser<PostgreSQLLexer, ProgramCon
): Suggestions<Token> {
return {
syntax: [],
keywords: []
}
keywords: [],
};
}
}

View File

@ -17,7 +17,7 @@ export default class PLSQL extends BasicParser<PlSqlLexer, ProgramContext, PlSql
protected preferredRules: Set<number> = new Set();
protected get splitListener () {
protected get splitListener() {
return null as any;
}
@ -28,7 +28,7 @@ export default class PLSQL extends BasicParser<PlSqlLexer, ProgramContext, PlSql
): Suggestions<Token> {
return {
syntax: [],
keywords: []
}
keywords: [],
};
}
}

View File

@ -1,7 +1,11 @@
import { Token } from 'antlr4ts';
import { CandidatesCollection } from 'antlr4-c3';
import { SparkSqlLexer } from '../lib/spark/SparkSqlLexer';
import { SparkSqlParser, ProgramContext, SingleStatementContext } from '../lib/spark/SparkSqlParser';
import {
SparkSqlParser,
ProgramContext,
SingleStatementContext,
} from '../lib/spark/SparkSqlParser';
import BasicParser from './common/basicParser';
import { Suggestions, SyntaxContextType, SyntaxSuggestion } from './common/basic-parser-types';
import { SparkSqlParserListener } from 'src/lib/spark/SparkSqlParserListener';
@ -36,7 +40,7 @@ export default class SparkSQL extends BasicParser<SparkSqlLexer, ProgramContext,
candidates: CandidatesCollection,
allTokens: Token[],
caretTokenIndex: number,
tokenIndexOffset: number,
tokenIndexOffset: number
): Suggestions<Token> {
const originalSyntaxSuggestions: SyntaxSuggestion<Token>[] = [];
const keywords: string[] = [];
@ -44,7 +48,10 @@ export default class SparkSQL extends BasicParser<SparkSqlLexer, ProgramContext,
for (const candidate of candidates.rules) {
const [ruleType, candidateRule] = candidate;
const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset;
const tokenRanges = allTokens.slice(startTokenIndex, caretTokenIndex + tokenIndexOffset + 1);
const tokenRanges = allTokens.slice(
startTokenIndex,
caretTokenIndex + tokenIndexOffset + 1
);
let syntaxContextType: SyntaxContextType;
switch (ruleType) {
@ -96,7 +103,10 @@ export default class SparkSQL extends BasicParser<SparkSqlLexer, ProgramContext,
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
if (symbolicName && symbolicName.startsWith('KW_')) {
const keyword = displayName.startsWith("'") && displayName.endsWith("'") ? displayName.slice(1, -1) : displayName;
const keyword =
displayName.startsWith("'") && displayName.endsWith("'")
? displayName.slice(1, -1)
: displayName;
keywords.push(keyword);
}
}
@ -113,11 +123,10 @@ export class SparkSqlSplitListener implements SparkSqlParserListener {
exitSingleStatement = (ctx: SingleStatementContext) => {
this._statementsContext.push(ctx);
}
enterSingleStatement = (ctx: SingleStatementContext) => {
};
enterSingleStatement = (ctx: SingleStatementContext) => {};
get statementsContext() {
return this._statementsContext;
}

View File

@ -16,7 +16,7 @@ export default class TrinoSQL extends BasicParser<TrinoSqlLexer, ProgramContext,
return parser;
}
protected get splitListener () {
protected get splitListener() {
return null as any;
}
@ -29,8 +29,7 @@ export default class TrinoSQL extends BasicParser<TrinoSqlLexer, ProgramContext,
): Suggestions<Token> {
return {
syntax: [],
keywords: []
}
keywords: [],
};
}
}

View File

@ -1 +0,0 @@
declare type sql = string | string[]

View File

@ -1,5 +1,5 @@
import { Token } from "antlr4ts";
import { CaretPosition } from "../../src/parser/common/basic-parser-types";
import { Token } from 'antlr4ts';
import { CaretPosition } from '../../src/parser/common/basic-parser-types';
/**
* find token index via caret position (cursor position)
@ -12,21 +12,22 @@ export function findCaretTokenIndex(caretPosition: CaretPosition, allTokens: Tok
let left = 0;
let right = allTokens.length - 1;
while(left <= right) {
while (left <= right) {
const mid = left + ((right - left) >> 1);
const token = allTokens[mid];
if (token.line > caretLine || (
token.line === caretLine
&& token.charPositionInLine + 1 >= caretCol
)) {
if (
token.line > caretLine ||
(token.line === caretLine && token.charPositionInLine + 1 >= caretCol)
) {
right = mid - 1;
} else if (token.line < caretLine || (
token.line === caretLine
&& token.charPositionInLine + token.text.length + 1 < caretCol
)) {
} else if (
token.line < caretLine ||
(token.line === caretLine &&
token.charPositionInLine + token.text.length + 1 < caretCol)
) {
left = mid + 1;
} else {
return allTokens[mid].tokenIndex
return allTokens[mid].tokenIndex;
}
}
return null;

View File

@ -1,4 +1,3 @@
import { TokenType, Token, TokenReg } from './token';
/**
@ -15,7 +14,6 @@ function lexer(input: string): Token[] {
/**
* TokenType
*/
// eslint-disable-next-line
const extract = (currentChar: string, validator: RegExp, TokenType: TokenType): Token => {
let value = '';
const start = current;
@ -95,19 +93,16 @@ function lexer(input: string): Token[] {
}
if (TokenReg.BackQuotation.test(char)) {
// eslint-disable-next-line
matchQuotation(char, TokenReg.BackQuotation, TokenType.BackQuotation);
continue;
}
if (TokenReg.SingleQuotation.test(char)) {
// eslint-disable-next-line
matchQuotation(char, TokenReg.SingleQuotation, TokenType.SingleQuotation);
continue;
}
if (TokenReg.DoubleQuotation.test(char)) {
// eslint-disable-next-line
matchQuotation(char, TokenReg.DoubleQuotation, TokenType.DoubleQuotation);
continue;
}
@ -162,7 +157,7 @@ function lexer(input: string): Token[] {
const newToken = extract(
char,
TokenReg.StatementTerminator,
TokenType.StatementTerminator,
TokenType.StatementTerminator
);
tokens.push(newToken);
continue;
@ -211,8 +206,4 @@ function cleanSql(sql: string) {
resultSql += sql.slice(startIndex);
return resultSql;
}
export {
cleanSql,
splitSql,
lexer,
};
export { cleanSql, splitSql, lexer };

View File

@ -28,14 +28,14 @@ export enum TokenType {
*/
RightSmallBracket = 'RightSmallBracket',
Comma = 'Comma',
FunctionArguments = 'FunctionArguments'
FunctionArguments = 'FunctionArguments',
}
/**
* Token object
*/
export interface Token {
type: TokenType,
type: TokenType;
value: string;
start: number;
end: number;
@ -44,8 +44,8 @@ export interface Token {
}
/**
* Token recognition rules
*/
* Token recognition rules
*/
export const TokenReg = {
[TokenType.StatementTerminator]: /[;]/,
[TokenType.SingleQuotation]: /['|\']/,

View File

@ -10,7 +10,8 @@ export const readSQL = (dirname: string, fileName: string) => {
const char = content[index];
tmp += char;
const isMulti = tmp.includes('EXECUTE STATEMENT SET') || tmp.includes('BEGIN STATEMENT SET;');
const isMulti =
tmp.includes('EXECUTE STATEMENT SET') || tmp.includes('BEGIN STATEMENT SET;');
if (!isMulti) {
// 非批量的先简单按照分号切割
@ -46,13 +47,14 @@ export function benchmark(name: string, fn: Function, times: number = 1): [numbe
const totalTime = end - start;
const averageTime = totalTime / times;
const msg = `Benchmark: ${name} executed ${times} times. Total time: ${totalTime.toFixed(2)}ms. Average time: ${averageTime.toFixed(2)}ms`;
console.log(msg)
const msg = `Benchmark: ${name} executed ${times} times. Total time: ${totalTime.toFixed(
2
)}ms. Average time: ${averageTime.toFixed(2)}ms`;
console.log(msg);
return [totalTime, averageTime, msg];
}
export function getReportTableRow(name, rows, times, totalTime, averageTime) {
return `| ${name} | ${rows} | ${times} | ${totalTime.toFixed(2)} | ${averageTime.toFixed(2)} |`;
}

View File

@ -2,7 +2,13 @@ import path from 'path';
import { writeFileSync } from 'node:fs';
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL, benchmark, getReportTableHeader, getReportTableRow, exportReportTable } from '../../../helper';
import {
readSQL,
benchmark,
getReportTableHeader,
getReportTableRow,
exportReportTable,
} from '../../../helper';
const features = {
selectTable: readSQL(__dirname, 'selectTable.sql'),
@ -15,13 +21,11 @@ describe('FlinkSQL benchmark tests', () => {
let reportsHeader = getReportTableHeader('FlinkSQL Benchmark');
const reportData: string[] = [];
test('createTable Over 100 Rows', async () => {
const [totalTimes, averageTimes , msg] = benchmark('CreateTable Over 100 Rows', () => {
const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 100 Rows', () => {
const testSQL = features.createTable[0];
const res = parser.validate(testSQL);
expect(res).toEqual([])
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('CreateTable', 100, 1, totalTimes, averageTimes));
});
@ -30,91 +34,75 @@ describe('FlinkSQL benchmark tests', () => {
const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 1000 Rows', () => {
const testSQL = features.createTable[1];
const res = parser.validate(testSQL);
expect(res).toEqual([])
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('CreateTable', 1000, 1, totalTimes, averageTimes));
});
test('createTable Over 5000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 5000 Rows', () => {
const testSQL = features.createTable[2];
const res = parser.validate(testSQL);
expect(res).toEqual([])
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('CreateTable', 5000, 1, totalTimes, averageTimes));
});
test('selectTable Over 100 Rows', async () => {
const [totalTimes, averageTimes , msg] = benchmark('SelectTable Over 100 Rows', () => {
const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 100 Rows', () => {
const testSQL = features.selectTable[0];
const res = parser.validate(testSQL);
expect(res).toEqual([])
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('SelectTable', 100, 1, totalTimes, averageTimes));
});
test('selectTable Over 1000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 1000 Rows', () => {
const testSQL = features.selectTable[1];
const res = parser.validate(testSQL);
expect(res).toEqual([])
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('SelectTable', 1000, 1, totalTimes, averageTimes));
});
test('selectTable Over 5000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 5000 Rows', () => {
const testSQL = features.selectTable[2];
const res = parser.validate(testSQL);
expect(res).toEqual([])
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('SelectTable', 5000, 1, totalTimes, averageTimes));
});
test('insertTable Over 100 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 100 Rows', () => {
const testSQL = features.insertTable[0];
const res = parser.validate(testSQL);
expect(res).toEqual([])
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('InsertTable', 100, 1, totalTimes, averageTimes));
});
test('insertTable Over 1000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 1000 Rows', () => {
const testSQL = features.insertTable[1];
const res = parser.validate(testSQL);
expect(res).toEqual([])
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('InsertTable', 1000, 1, totalTimes, averageTimes));
});
test('insertTable Over 5000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 5000 Rows', () => {
const testSQL = features.insertTable[2];
const res = parser.validate(testSQL);
expect(res).toEqual([])
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('InsertTable', 5000, 1, totalTimes, averageTimes));
});
afterAll(() => {
exportReportTable(reportsHeader + reportData.join('\n'), __dirname)
})
exportReportTable(reportsHeader + reportData.join('\n'), __dirname);
});
});

View File

@ -1,6 +1,7 @@
import FlinkSQL from '../../../src/parser/flinksql';
import { FlinkSqlParserListener } from '../../../src/lib/flinksql/FlinkSqlParserListener';
import { TableExpressionContext } from '../../../src/lib/flinksql/FlinkSqlParser';
import { ParseTreeListener } from 'antlr4ts/tree';
describe('Flink SQL Listener Tests', () => {
const expectTableName = 'user1';
@ -12,14 +13,13 @@ describe('Flink SQL Listener Tests', () => {
test('Listener enterTableName', async () => {
let result = '';
class MyListener implements FlinkSqlParserListener {
enterTableExpression = (ctx: TableExpressionContext): void => {
result = ctx.text.toLowerCase();
}
};
}
const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree);
await parser.listen(listenTableName as ParseTreeListener, parserTree);
expect(result).toBe(expectTableName);
});
});

View File

@ -1,9 +1,12 @@
import fs from 'fs';
import path from 'path';
import { CaretPosition, SyntaxContextType } from '../../../../src/parser/common/basic-parser-types';
import FlinkSQL from '../../../../src/parser/flinksql'
import FlinkSQL from '../../../../src/parser/flinksql';
const syntaxSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'), 'utf-8');
const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
'utf-8'
);
const multipleSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'multipleSql.sql'), 'utf-8');
describe('Flink SQL Syntax Suggestion', () => {
@ -13,187 +16,201 @@ describe('Flink SQL Syntax Suggestion', () => {
expect(parser.validate(syntaxSql).length).not.toBe(0);
expect(parser.validate(syntaxSql).length).not.toBe(0);
expect(parser.validate(syntaxSql).length).not.toBe(0);
})
});
test("Multiple SQL use database", () => {
test('Multiple SQL use database', () => {
const pos: CaretPosition = {
lineNumber: 19,
column: 10,
}
};
const syntaxes = parser.getSuggestionAtCaretPosition(multipleSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat1', '.' ]);
})
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cat1', '.']);
});
test('Drop catalog', () => {
const pos: CaretPosition = {
lineNumber: 1,
column: 17
}
column: 17,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.CATALOG);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.CATALOG
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat' ]);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cat']);
});
test('Select table', () => {
const pos: CaretPosition = {
lineNumber: 3,
column: 19
}
column: 19,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
console.log(syntaxes);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat', '.' ])
})
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cat', '.']);
});
test('Create table', () => {
const pos: CaretPosition = {
lineNumber: 5,
column: 20
}
column: 20,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat', '.', 'db' ])
})
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cat', '.', 'db']);
});
test('Show tables from', () => {
const pos: CaretPosition = {
lineNumber: 7,
column: 21
}
column: 21,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat' ])
})
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cat']);
});
test('Alter database', () => {
const pos: CaretPosition = {
lineNumber: 9,
column: 20
}
column: 20,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat', '.' ])
})
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cat', '.']);
});
test('Drop view', () => {
const pos: CaretPosition = {
lineNumber: 11,
column: 12
}
column: 12,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'v' ]);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['v']);
});
test('Select view', () => {
const pos: CaretPosition = {
lineNumber: 13,
column: 15
}
column: 15,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([]);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
});
test('Create view', () => {
const pos: CaretPosition = {
lineNumber: 15,
column: 15
}
column: 15,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual(['cv']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cv']);
});
test('Function call', () => {
const pos: CaretPosition = {
lineNumber: 17,
column: 27
}
column: 27,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.FUNCTION);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual(['calculate_age']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['calculate_age']);
});
test('Create Function', () => {
const pos: CaretPosition = {
lineNumber: 19,
column: 20
}
column: 20,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual(['fnc']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['fnc']);
});
test('Show columns from view', () => {
const pos: CaretPosition = {
lineNumber: 21,
column: 22
}
column: 22,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual(['vie']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['vie']);
});
test('Show create table', () => {
const pos: CaretPosition = {
lineNumber: 23,
column: 22
}
column: 22,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual(['tb1']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['tb1']);
});
test('Show create view', () => {
const pos: CaretPosition = {
lineNumber: 25,
column: 20
}
column: 20,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual(['v1']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['v1']);
});
})
});

View File

@ -1,7 +1,7 @@
import fs from 'fs';
import path from 'path';
import { CaretPosition } from '../../../../src/parser/common/basic-parser-types';
import FlinkSQL from '../../../../src/parser/flinksql'
import FlinkSQL from '../../../../src/parser/flinksql';
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
@ -11,34 +11,38 @@ describe('Flink SQL Token Suggestion', () => {
test('Use Statement ', () => {
const pos: CaretPosition = {
lineNumber: 3,
column: 5
}
column: 5,
};
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion)
.toEqual([ 'MODULES', 'CATALOG' ])
})
expect(suggestion).toEqual(['MODULES', 'CATALOG']);
});
test('Create Statement ', () => {
const pos: CaretPosition = {
lineNumber: 5,
column: 8
}
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion)
.toEqual([ 'CATALOG', 'FUNCTION', 'TEMPORARY', 'VIEW', 'DATABASE', 'TABLE' ])
})
expect(suggestion).toEqual([
'CATALOG',
'FUNCTION',
'TEMPORARY',
'VIEW',
'DATABASE',
'TABLE',
]);
});
test('Show Statement ', () => {
const pos: CaretPosition = {
lineNumber: 7,
column: 6
}
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion)
.toEqual([
expect(suggestion).toEqual([
'MODULES',
'FULL',
'FUNCTIONS',
@ -50,8 +54,7 @@ describe('Flink SQL Token Suggestion', () => {
'CATALOGS',
'DATABASES',
'JARS',
'VIEWS'
])
})
})
'VIEWS',
]);
});
});

View File

@ -1,5 +1,5 @@
import FlinkSQL from "../../../../src/parser/flinksql";
import { readSQL } from "../../../helper";
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
const features = {
table: readSQL(__dirname, 'alterTable.sql'),
@ -33,4 +33,3 @@ describe('FlinkSQL Alter Statements Syntax Tests', () => {
});
});
});

View File

@ -1,9 +1,9 @@
import FlinkSQL from "../../../../src/parser/flinksql";
import { readSQL } from "../../../helper";
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
// 综合测试的 sql 不做切割
const features = {
chores: readSQL(__dirname, 'chore.sql')
chores: readSQL(__dirname, 'chore.sql'),
};
describe('FlinkSQL Chore Syntax Tests', () => {

View File

@ -1,10 +1,10 @@
import fs from 'fs';
import path from 'path';
import FlinkSQL from "../../../../src/parser/flinksql";
import FlinkSQL from '../../../../src/parser/flinksql';
// 注释 sql 不做切割
const features = {
comments: fs.readFileSync(path.join(__dirname, 'fixtures', 'comment.sql'), 'utf-8')
comments: fs.readFileSync(path.join(__dirname, 'fixtures', 'comment.sql'), 'utf-8'),
};
describe('FlinkSQL Comment Syntax Tests', () => {

View File

@ -1,10 +1,10 @@
import fs from 'fs';
import path from 'path';
import FlinkSQL from "../../../../src/parser/flinksql";
import FlinkSQL from '../../../../src/parser/flinksql';
// 综合测试的 sql 不做切割
const features = {
templates: fs.readFileSync(path.join(__dirname, 'fixtures', 'templates.sql'), 'utf-8')
templates: fs.readFileSync(path.join(__dirname, 'fixtures', 'templates.sql'), 'utf-8'),
};
describe('FlinkSQL Comprehensive Tests', () => {
@ -13,4 +13,3 @@ describe('FlinkSQL Comprehensive Tests', () => {
expect(parser.validate(features.templates).length).toBe(0);
});
});

View File

@ -1,8 +1,8 @@
import FlinkSQL from "../../../../src/parser/flinksql";
import { readSQL } from "../../../helper";
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
const features = {
describes: readSQL(__dirname, 'describe.sql')
describes: readSQL(__dirname, 'describe.sql'),
};
describe('FlinkSQL Describe Syntax Tests', () => {

View File

@ -1,5 +1,5 @@
import FlinkSQL from "../../../../src/parser/flinksql";
import { readSQL } from "../../../helper";
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
const features = {
table: readSQL(__dirname, 'dropTable.sql'),

View File

@ -1,8 +1,8 @@
import FlinkSQL from "../../../../src/parser/flinksql";
import { readSQL } from "../../../helper";
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
const features = {
dtAddFiles: readSQL(__dirname, 'dtAddFile.sql')
dtAddFiles: readSQL(__dirname, 'dtAddFile.sql'),
};
describe('FlinkSQL DT Add File Syntax Tests', () => {

View File

@ -1,8 +1,8 @@
import FlinkSQL from "../../../../src/parser/flinksql";
import { readSQL } from "../../../helper";
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
const features = {
explains: readSQL(__dirname, 'explain.sql')
explains: readSQL(__dirname, 'explain.sql'),
};
describe('FlinkSQL Explain Syntax Tests', () => {

View File

@ -1,13 +1,12 @@
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
const parser = new FlinkSQL();
const features = {
InsertFromSelectQueries: readSQL(__dirname, 'insertFromSelectQueries.sql'),
InsertValuesIntoTable: readSQL(__dirname, 'insertValuesIntoTable.sql'),
InsertMultipleTable: readSQL(__dirname, 'insertMultipleTable.sql')
InsertMultipleTable: readSQL(__dirname, 'insertMultipleTable.sql'),
};
describe('FlinkSQL Insert Syntax Tests', () => {

View File

@ -1,22 +1,22 @@
import FlinkSQL from "../../../../src/parser/flinksql";
import { readSQL } from "../../../helper";
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
const parser = new FlinkSQL();
const features = {
base: readSQL(__dirname, "select.sql"),
withClause: readSQL(__dirname, "selectWithClause.sql"),
distinct: readSQL(__dirname, "selectDistinct.sql"),
windowTVF: readSQL(__dirname, "selectWindowTVF.sql"),
aggregation: readSQL(__dirname, "selectAggregation.sql"),
join: readSQL(__dirname, "selectJoin.sql"),
setOperation: readSQL(__dirname, "selectSetOperations.sql"),
pattern: readSQL(__dirname, "selectPatternRecognition.sql"),
where: readSQL(__dirname, "selectWhere.sql"),
base: readSQL(__dirname, 'select.sql'),
withClause: readSQL(__dirname, 'selectWithClause.sql'),
distinct: readSQL(__dirname, 'selectDistinct.sql'),
windowTVF: readSQL(__dirname, 'selectWindowTVF.sql'),
aggregation: readSQL(__dirname, 'selectAggregation.sql'),
join: readSQL(__dirname, 'selectJoin.sql'),
setOperation: readSQL(__dirname, 'selectSetOperations.sql'),
pattern: readSQL(__dirname, 'selectPatternRecognition.sql'),
where: readSQL(__dirname, 'selectWhere.sql'),
};
describe("FlinkSQL Query Statement Tests", () => {
describe("Base Select", () => {
describe('FlinkSQL Query Statement Tests', () => {
describe('Base Select', () => {
features.base.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
@ -24,7 +24,7 @@ describe("FlinkSQL Query Statement Tests", () => {
});
});
describe("With Clause Select", () => {
describe('With Clause Select', () => {
features.withClause.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
@ -32,60 +32,59 @@ describe("FlinkSQL Query Statement Tests", () => {
});
});
describe("Select DISTINCT", () => {
describe('Select DISTINCT', () => {
features.distinct.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
})
})
});
});
describe("Select Window TVF", () => {
describe('Select Window TVF', () => {
features.windowTVF.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
})
})
});
});
describe("Select Aggregation", () => {
describe('Select Aggregation', () => {
features.aggregation.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
})
})
});
});
describe("Select Join", () => {
describe('Select Join', () => {
features.join.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
})
})
});
});
describe("Select Set Operations", () => {
describe('Select Set Operations', () => {
features.setOperation.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
})
})
});
});
describe("Select Pattern Recognition", () => {
describe('Select Pattern Recognition', () => {
features.pattern.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
})
})
});
});
describe("Select Where", () => {
describe('Select Where', () => {
features.where.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0)
})
})
})
expect(parser.validate(sql).length).toBe(0);
});
});
});
});

View File

@ -1,8 +1,8 @@
import FlinkSQL from "../../../../src/parser/flinksql";
import { readSQL } from "../../../helper";
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
const features = {
shows: readSQL(__dirname, 'show.sql')
shows: readSQL(__dirname, 'show.sql'),
};
describe('FlinkSQL Show Syntax Tests', () => {

View File

@ -1,8 +1,8 @@
import FlinkSQL from "../../../../src/parser/flinksql";
import { readSQL } from "../../../helper";
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
const features = {
uses: readSQL(__dirname, 'use.sql')
uses: readSQL(__dirname, 'use.sql'),
};
describe('FlinkSQL Use Syntax Tests', () => {

View File

@ -13,13 +13,16 @@ describe('Flink SQL Visitor Tests', () => {
test('Visitor visitTableName', () => {
let result = '';
class MyVisitor extends AbstractParseTreeVisitor<any> implements FlinkSqlParserVisitor<any>{
class MyVisitor
extends AbstractParseTreeVisitor<any>
implements FlinkSqlParserVisitor<any>
{
protected defaultResult() {
return result;
}
visitTableExpression = (ctx): void => {
result = ctx.text.toLowerCase();
}
};
}
const visitor: any = new MyVisitor();
visitor.visit(parserTree);

View File

@ -1,5 +1,6 @@
import GenericSQL from '../../../src/parser/generic';
import { SqlParserListener } from '../../../src/lib/generic/SqlParserListener';
import { ParseTreeListener } from 'antlr4ts/tree';
describe('Generic SQL Listener Tests', () => {
const expectTableName = 'user1';
@ -13,11 +14,11 @@ describe('Generic SQL Listener Tests', () => {
class MyListener implements SqlParserListener {
enterTableName = (ctx): void => {
result = ctx.text.toLowerCase();
}
};
}
const listenTableName: any = new MyListener();
await parser.listen(listenTableName, parserTree);
await parser.listen(listenTableName as ParseTreeListener, parserTree);
expect(result).toBe(expectTableName);
});
});

View File

@ -20,7 +20,7 @@ describe('Generic SQL Visitor Tests', () => {
visitTableName = (ctx): void => {
result = ctx.text.toLowerCase();
}
};
}
const visitor = new MyVisitor();
visitor.visit(parserTree);

View File

@ -1,8 +1,8 @@
import { ParseTreeListener } from 'antlr4ts/tree';
import { ProgramContext } from '../../../src/lib/hive/HiveSqlParser';
import { HiveSqlParserListener } from '../../../src/lib/hive/HiveSqlParserListener';
import HiveSQL from '../../../src/parser/hive';
describe('HiveSQL Listener Tests', () => {
const parser = new HiveSQL();
test('Listener enterSelectList', async () => {
@ -16,9 +16,9 @@ describe('HiveSQL Listener Tests', () => {
result = ctx.text;
}
}
const listenTableName: any = new MyListener();
const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree as ProgramContext);
await parser.listen(listenTableName as ParseTreeListener, parserTree as ProgramContext);
expect(result).toBe(expectTableName.toUpperCase());
});
test('Listener enterCreateTable', async () => {
@ -30,9 +30,9 @@ describe('HiveSQL Listener Tests', () => {
result = ctx.text;
}
}
const listenTableName: any = new MyListener();
const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree as ProgramContext);
await parser.listen(listenTableName as ParseTreeListener, parserTree as ProgramContext);
expect(result).toBe('DROPTABLETABLE_NAME');
});
});

View File

@ -1,9 +1,12 @@
import fs from 'fs';
import path from 'path';
import { CaretPosition, SyntaxContextType } from '../../../../src/parser/common/basic-parser-types';
import HiveSQL from '../../../../src/parser/hive'
import HiveSQL from '../../../../src/parser/hive';
const syntaxSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'), 'utf-8');
const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
'utf-8'
);
describe('Hive SQL Syntax Suggestion', () => {
const parser = new HiveSQL();
@ -17,131 +20,140 @@ describe('Hive SQL Syntax Suggestion', () => {
test('Insert table ', () => {
const pos: CaretPosition = {
lineNumber: 1,
column: 18
}
column: 18,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.', 'tb' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'tb']);
});
test('Select table ', () => {
const pos: CaretPosition = {
lineNumber: 3,
column: 18
}
column: 18,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
});
test('Create table ', () => {
const pos: CaretPosition = {
lineNumber: 5,
column: 17
}
column: 17,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
});
test('DROP table ', () => {
const pos: CaretPosition = {
lineNumber: 7,
column: 26
}
column: 26,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.', 'a' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'a']);
});
test('Create view ', () => {
const pos: CaretPosition = {
lineNumber: 9,
column: 28
}
column: 28,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.', 'v' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'v']);
});
test('Drop view ', () => {
const pos: CaretPosition = {
lineNumber: 11,
column: 15
}
column: 15,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.', 'v' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'v']);
});
test('Create function ', () => {
const pos: CaretPosition = {
lineNumber: 13,
column: 20
}
column: 20,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'fn1' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['fn1']);
});
test('Use function', () => {
const pos: CaretPosition = {
lineNumber: 15,
column: 27
}
column: 27,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.FUNCTION);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'calculate_age' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['calculate_age']);
});
test('Create database', () => {
const pos: CaretPosition = {
lineNumber: 17,
column: 19
}
column: 19,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db']);
});
test('Drop database', () => {
const pos: CaretPosition = {
lineNumber: 19,
column: 26
}
column: 26,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'sch' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['sch']);
});
})
});

View File

@ -1,232 +1,191 @@
import fs from "fs";
import path from "path";
import { CaretPosition } from "../../../../src/parser/common/basic-parser-types";
import HiveSQL from "../../../../src/parser/hive";
import fs from 'fs';
import path from 'path';
import { CaretPosition } from '../../../../src/parser/common/basic-parser-types';
import HiveSQL from '../../../../src/parser/hive';
const tokenSql = fs.readFileSync(
path.join(__dirname, "fixtures", "tokenSuggestion.sql"),
"utf-8"
);
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
describe("Hive SQL Syntax Suggestion", () => {
describe('Hive SQL Syntax Suggestion', () => {
const parser = new HiveSQL();
test("After ALTER", () => {
test('After ALTER', () => {
const pos: CaretPosition = {
lineNumber: 1,
column: 7,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
"APPLICATION",
"GROUP",
"USER",
"POOL",
"TRIGGER",
"RESOURCE",
"SCHEDULED",
"INDEX",
"CONNECTOR",
"DATABASE",
"SCHEMA",
"MATERIALIZED",
"VIEW",
"TABLE",
'APPLICATION',
'GROUP',
'USER',
'POOL',
'TRIGGER',
'RESOURCE',
'SCHEDULED',
'INDEX',
'CONNECTOR',
'DATABASE',
'SCHEMA',
'MATERIALIZED',
'VIEW',
'TABLE',
]);
});
test("After CREATE", () => {
test('After CREATE', () => {
const pos: CaretPosition = {
lineNumber: 3,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
"CONNECTOR",
"APPLICATION",
"GROUP",
"USER",
"POOL",
"TRIGGER",
"RESOURCE",
"ROLE",
"INDEX",
"TEMPORARY",
"FUNCTION",
"SCHEDULED",
"MATERIALIZED",
"VIEW",
"OR",
"MANAGED",
"TABLE",
"EXTERNAL",
"TRANSACTIONAL",
"REMOTE",
"DATABASE",
"SCHEMA",
'CONNECTOR',
'APPLICATION',
'GROUP',
'USER',
'POOL',
'TRIGGER',
'RESOURCE',
'ROLE',
'INDEX',
'TEMPORARY',
'FUNCTION',
'SCHEDULED',
'MATERIALIZED',
'VIEW',
'OR',
'MANAGED',
'TABLE',
'EXTERNAL',
'TRANSACTIONAL',
'REMOTE',
'DATABASE',
'SCHEMA',
]);
});
test("After DELETE", () => {
test('After DELETE', () => {
const pos: CaretPosition = {
lineNumber: 5,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual(['FROM']);
});
test("After DESCRIBE", () => {
test('After DESCRIBE', () => {
const pos: CaretPosition = {
lineNumber: 7,
column: 10,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
"EXTENDED",
"FORMATTED",
"FUNCTION",
"CONNECTOR",
"DATABASE",
"SCHEMA",
'EXTENDED',
'FORMATTED',
'FUNCTION',
'CONNECTOR',
'DATABASE',
'SCHEMA',
]);
});
test("After DROP", () => {
test('After DROP', () => {
const pos: CaretPosition = {
lineNumber: 9,
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
"CONNECTOR",
"APPLICATION",
"GROUP",
"USER",
"POOL",
"TRIGGER",
"RESOURCE",
"ROLE",
"INDEX",
"TEMPORARY",
"FUNCTION",
"MATERIALIZED",
"VIEW",
"SCHEDULED",
"TABLE",
"DATABASE",
"SCHEMA",
'CONNECTOR',
'APPLICATION',
'GROUP',
'USER',
'POOL',
'TRIGGER',
'RESOURCE',
'ROLE',
'INDEX',
'TEMPORARY',
'FUNCTION',
'MATERIALIZED',
'VIEW',
'SCHEDULED',
'TABLE',
'DATABASE',
'SCHEMA',
]);
});
test("After EXPORT", () => {
test('After EXPORT', () => {
const pos: CaretPosition = {
lineNumber: 11,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual(['TABLE']);
});
test("After IMPORT", () => {
test('After IMPORT', () => {
const pos: CaretPosition = {
lineNumber: 13,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([
"FROM",
"TABLE",
"EXTERNAL",
]);
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual(['FROM', 'TABLE', 'EXTERNAL']);
});
test("After INSERT", () => {
test('After INSERT', () => {
const pos: CaretPosition = {
lineNumber: 15,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([
"INTO",
"OVERWRITE",
]);
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual(['INTO', 'OVERWRITE']);
});
test("After LOAD", () => {
test('After LOAD', () => {
const pos: CaretPosition = {
lineNumber: 17,
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual(["DATA"
]);
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual(['DATA']);
});
test("After SHOW", () => {
test('After SHOW', () => {
const pos: CaretPosition = {
lineNumber: 19,
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
"CURRENT",
"ROLES",
"PRINCIPALS",
"ROLE",
"GRANT",
"INDEX",
"INDEXES",
"FORMATTED",
"CONNECTORS",
"RESOURCE",
"CONF",
"TRANSACTIONS",
"COMPACTIONS",
"LOCKS",
"TBLPROPERTIES",
"TABLE",
"CREATE",
"PARTITIONS",
"FUNCTIONS",
"COLUMNS",
"SORTED",
"MATERIALIZED",
"VIEWS",
"TABLES",
"EXTENDED",
"DATABASES",
"SCHEMAS",
'CURRENT',
'ROLES',
'PRINCIPALS',
'ROLE',
'GRANT',
'INDEX',
'INDEXES',
'FORMATTED',
'CONNECTORS',
'RESOURCE',
'CONF',
'TRANSACTIONS',
'COMPACTIONS',
'LOCKS',
'TBLPROPERTIES',
'TABLE',
'CREATE',
'PARTITIONS',
'FUNCTIONS',
'COLUMNS',
'SORTED',
'MATERIALIZED',
'VIEWS',
'TABLES',
'EXTENDED',
'DATABASES',
'SCHEMAS',
]);
});
});

View File

@ -12,7 +12,7 @@ const features = {
indexes: readSQL(__dirname, 'createIndex.sql'),
macros: readSQL(__dirname, 'createMacro.sql'),
connectors: readSQL(__dirname, 'createConnector.sql'),
scheduledQueries: readSQL(__dirname, 'createScheduledQuery.sql')
scheduledQueries: readSQL(__dirname, 'createScheduledQuery.sql'),
};
describe('HiveSQL Create Syntax Tests', () => {

View File

@ -5,7 +5,7 @@ const parser = new HiveSQL();
const features = {
drops: readSQL(__dirname, 'drop.sql'),
reloads: readSQL(__dirname, 'reload.sql')
reloads: readSQL(__dirname, 'reload.sql'),
};
describe('HiveSQL Drop Syntax Tests', () => {

View File

@ -2,7 +2,7 @@ import HiveSQL from '../../../../src/parser/hive';
import { readSQL } from '../../../helper';
const features = {
exports: readSQL(__dirname, 'export.sql')
exports: readSQL(__dirname, 'export.sql'),
};
describe('HiveSQL Export Syntax Tests', () => {
@ -14,4 +14,3 @@ describe('HiveSQL Export Syntax Tests', () => {
});
});
});

View File

@ -1,8 +1,8 @@
import HiveSQL from '../../../../src/parser/hive';
import { readSQL } from "../../../helper";
import { readSQL } from '../../../helper';
const features = {
imports: readSQL(__dirname, 'import.sql')
imports: readSQL(__dirname, 'import.sql'),
};
describe('HiveSQL Import Syntax Tests', () => {

View File

@ -5,7 +5,7 @@ const parser = new HiveSQL();
const features = {
insertFromQueries: readSQL(__dirname, 'insertFromQuery.sql'),
insertFromValues: readSQL(__dirname, 'insertFormValues.sql')
insertFromValues: readSQL(__dirname, 'insertFormValues.sql'),
};
describe('HiveSQL Insert Syntax Tests', () => {

View File

@ -15,7 +15,6 @@ describe('HiveSQL Visitor Tests', () => {
test('Visitor visitTableName', () => {
let result = '';
class MyVisitor extends AbstractParseTreeVisitor<any> implements HiveSqlParserVisitor<any> {
defaultResult() {
return result;
}

View File

@ -1,3 +1,4 @@
import { ParseTreeListener } from 'antlr4ts/tree';
import { Target_listContext } from '../../../src/lib/pgsql/PostgreSQLParser';
import { PostgreSQLParserListener } from '../../../src/lib/pgsql/PostgreSQLParserListener';
import PostgresSQL from '../../../src/parser/pgsql';
@ -16,9 +17,9 @@ describe('PostgresSQL Listener Tests', () => {
result = ctx.text.toLowerCase();
}
}
const listenTableName: any = new MyListener();
const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree);
await parser.listen(listenTableName as ParseTreeListener, parserTree);
expect(result).toBe(expectTableName);
});
});

View File

@ -1,14 +1,14 @@
import PostgresSQL from "../../../src/parser/pgsql";
import { readSQL } from "../../helper";
import PostgresSQL from '../../../src/parser/pgsql';
import { readSQL } from '../../helper';
const parser = new PostgresSQL();
const features = {
base: readSQL(__dirname, "select.sql"),
base: readSQL(__dirname, 'select.sql'),
};
describe("Postgre SQL Query Statement Tests", () => {
describe("Base Select", () => {
describe('Postgre SQL Query Statement Tests', () => {
describe('Base Select', () => {
features.base.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);

View File

@ -1,4 +1,4 @@
import PostgresSQL from "../../../src/parser/pgsql";
import PostgresSQL from '../../../src/parser/pgsql';
describe('PostgresSQL SQL Syntax Tests', () => {
const parser = new PostgresSQL();
@ -21,5 +21,4 @@ describe('PostgresSQL SQL Syntax Tests', () => {
const result = parser.validate(sql);
expect(result.length).toBe(0);
});
});

View File

@ -1,6 +1,6 @@
import { AbstractParseTreeVisitor } from "antlr4ts/tree/AbstractParseTreeVisitor";
import { PostgreSQLParserVisitor } from "../../../src/lib/pgsql/PostgreSQLParserVisitor";
import PostgresSQL from "../../../src/parser/pgsql";
import { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor';
import { PostgreSQLParserVisitor } from '../../../src/lib/pgsql/PostgreSQLParserVisitor';
import PostgresSQL from '../../../src/parser/pgsql';
describe('Generic SQL Visitor Tests', () => {
const expectTableName = 'user1';
@ -13,7 +13,10 @@ describe('Generic SQL Visitor Tests', () => {
test('Visitor visitTableName', () => {
let result = '';
class MyVisitor extends AbstractParseTreeVisitor<any> implements PostgreSQLParserVisitor<any> {
class MyVisitor
extends AbstractParseTreeVisitor<any>
implements PostgreSQLParserVisitor<any>
{
protected defaultResult() {
return result;
}

View File

@ -1,3 +1,4 @@
import { ParseTreeListener } from 'antlr4ts/tree';
import { PlSqlParserListener } from '../../../src/lib/plsql/PlSqlParserListener';
import PLSQL from '../../../src/parser/plsql';
@ -11,14 +12,13 @@ describe('PLSQL Listener Tests', () => {
test('Listener enterTableName', async () => {
let result = '';
class MyListener implements PlSqlParserListener {
// eslint-disable-next-line camelcase
enterTable_ref_list = (ctx): void => {
result = ctx.text.toLowerCase();
};
}
}
const listenTableName: any = new MyListener();
const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree);
await parser.listen(listenTableName as ParseTreeListener, parserTree);
expect(result).toBe(expectTableName);
});
});

View File

@ -15,10 +15,9 @@ describe('PLSQL Visitor Tests', () => {
protected defaultResult() {
return result;
}
// eslint-disable-next-line camelcase
visitTable_ref_list = (ctx): void => {
result = ctx.text.toLowerCase();
}
};
}
const visitor: any = new MyVisitor();
visitor.visit(parserTree);

View File

@ -1,3 +1,4 @@
import { ParseTreeListener } from 'antlr4ts/tree';
import { SparkSqlParserListener } from '../../../src/lib/spark/SparkSqlParserListener';
import SparkSQL from '../../../src/parser/spark';
@ -13,11 +14,11 @@ describe('Spark SQL Listener Tests', () => {
class MyListener implements SparkSqlParserListener {
exitRelationPrimary = (ctx): void => {
result = ctx.text.toLowerCase();
}
};
}
const listenTableName = new MyListener();
parser.listen(listenTableName, parserTree);
parser.listen(listenTableName as ParseTreeListener, parserTree);
expect(result).toBe(expectTableName);
});
});

View File

@ -3,7 +3,10 @@ import path from 'path';
import { CaretPosition, SyntaxContextType } from '../../../../src/parser/common/basic-parser-types';
import SparkSQL from '../../../../src/parser/spark';
const syntaxSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'), 'utf-8');
const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
'utf-8'
);
describe('Spark SQL Syntax Suggestion', () => {
const parser = new SparkSQL();
@ -20,11 +23,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 18,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.TABLE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['db', '.', 'tb']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'tb']);
});
test('Select table ', () => {
@ -33,11 +37,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 18,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.TABLE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['db', '.']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
});
test('Create table ', () => {
@ -46,11 +51,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 17,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['db', '.']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
});
test('DROP table ', () => {
@ -59,11 +65,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 26,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.TABLE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['db', '.', 'a']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'a']);
});
test('Create view ', () => {
@ -72,11 +79,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 28,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['db', '.', 'v']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'v']);
});
test('Drop view ', () => {
@ -85,11 +93,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 15,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.VIEW);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['db', '.', 'v']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'v']);
});
test('Create function ', () => {
@ -98,11 +107,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 20,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['fn1']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['fn1']);
});
test('Use function', () => {
@ -111,11 +121,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 27,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['calculate_age']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['calculate_age']);
});
test('Create database', () => {
@ -124,11 +135,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 19,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['db']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db']);
});
test('Drop database', () => {
@ -137,10 +149,11 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 26,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.DATABASE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['sch']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['sch']);
});
});

View File

@ -13,19 +13,9 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 1,
column: 7,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos,
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
'TABLE',
'INDEX',
'VIEW',
'DATABASE',
'NAMESPACE',
'SCHEMA',
]);
expect(suggestion).toEqual(['TABLE', 'INDEX', 'VIEW', 'DATABASE', 'NAMESPACE', 'SCHEMA']);
});
test('After CREATE', () => {
@ -33,10 +23,7 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 3,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos,
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
'TEMPORARY',
@ -59,10 +46,7 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 5,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos,
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual(['FROM']);
});
@ -72,10 +56,7 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 7,
column: 10,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos,
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
'WITH',
@ -98,10 +79,7 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 9,
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos,
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
'TEMPORARY',
@ -121,15 +99,9 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 11,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos,
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
'OVERWRITE',
'INTO',
]);
expect(suggestion).toEqual(['OVERWRITE', 'INTO']);
});
test('After LOAD', () => {
@ -137,14 +109,9 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 13,
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos,
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
'DATA',
]);
expect(suggestion).toEqual(['DATA']);
});
test('After SHOW', () => {
@ -152,10 +119,7 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 15,
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos,
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
'LOCKS',
@ -190,10 +154,7 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 17,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos,
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual(['TABLE']);
});

View File

@ -1,7 +1,6 @@
import SparkSQL from '../../../../src/parser/spark';
import { readSQL } from '../../../helper';
const parser = new SparkSQL();
const features = {

View File

@ -1,7 +1,6 @@
import SparkSQL from '../../../../src/parser/spark';
import { readSQL } from '../../../helper';
const parser = new SparkSQL();
const features = {

View File

@ -12,14 +12,17 @@ describe('Spark SQL Visitor Tests', () => {
});
test('Visitor visitRelationPrimary', () => {
class MyVisitor extends AbstractParseTreeVisitor<any> implements SparkSqlParserVisitor<any> {
class MyVisitor
extends AbstractParseTreeVisitor<any>
implements SparkSqlParserVisitor<any>
{
result: string = '';
protected defaultResult() {
return this.result;
}
visitRelationPrimary = (ctx): void => {
this.result = ctx.text.toLowerCase();
}
};
}
const visitor = new MyVisitor();
visitor.visit(parserTree);

View File

@ -1,5 +1,6 @@
import trinoSQL from '../../../src/parser/trinosql';
import { TrinoSqlListener } from '../../../src/lib/trinosql/TrinoSqlListener';
import { ParseTreeListener } from 'antlr4ts/tree';
describe('trino SQL Listener Tests', () => {
const expectTableName = 'user1';
@ -11,14 +12,13 @@ describe('trino SQL Listener Tests', () => {
test('Listener enterTableName', async () => {
let result = '';
class MyListener implements TrinoSqlListener {
enterTableName = (ctx): void => {
result = ctx.text.toLowerCase();
};
}
const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree);
await parser.listen(listenTableName as ParseTreeListener, parserTree);
expect(result).toBe(expectTableName);
});
});

View File

@ -1,11 +1,11 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
table: readSQL(__dirname, 'alter_table.sql'),
view: readSQL(__dirname, 'alter_view.sql'),
schema: readSQL(__dirname, 'alter_schema.sql'),
materializedView: readSQL(__dirname, 'alter_materialized_view.sql')
materializedView: readSQL(__dirname, 'alter_materialized_view.sql'),
};
describe('TrinoSQL Alter Statements Syntax Tests', () => {
@ -31,4 +31,3 @@ describe('TrinoSQL Alter Statements Syntax Tests', () => {
});
});
});

View File

@ -1,9 +1,8 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
analyze: readSQL(__dirname, 'analyze.sql'),
};
describe('TrinoSQL Analyze Statements Syntax Tests', () => {
@ -15,4 +14,3 @@ describe('TrinoSQL Analyze Statements Syntax Tests', () => {
});
});
});

View File

@ -1,9 +1,8 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
call: readSQL(__dirname, 'call.sql'),
};
describe('TrinoSQL Call Statements Syntax Tests', () => {
@ -15,4 +14,3 @@ describe('TrinoSQL Call Statements Syntax Tests', () => {
});
});
});

View File

@ -1,9 +1,8 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
comment: readSQL(__dirname, 'comment.sql'),
};
describe('TrinoSQL Comment Statements Syntax Tests', () => {
@ -15,4 +14,3 @@ describe('TrinoSQL Comment Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
commit: readSQL(__dirname, 'commit.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Commit Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
table: readSQL(__dirname, 'create_table.sql'),
@ -43,6 +43,4 @@ describe('TrinoSQL Create Statements Syntax Tests', () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
deallocatePrepare: readSQL(__dirname, 'deallocate_prepare.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL deallocatePrepare Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
delete: readSQL(__dirname, 'delete.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Delete Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
deny: readSQL(__dirname, 'deny.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Deny Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
describe: readSQL(__dirname, 'describe.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Describe Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
table: readSQL(__dirname, 'drop_table.sql'),
@ -43,6 +43,4 @@ describe('TrinoSQL Drop Statements Syntax Tests', () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
execute: readSQL(__dirname, 'execute.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Execute Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
explain: readSQL(__dirname, 'explain.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Explain Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
grant: readSQL(__dirname, 'grant.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Grant Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
insertIntoTable: readSQL(__dirname, 'insert_into.sql'),
@ -13,4 +13,3 @@ describe('TrinoSQL Insert Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
matchRecognize: readSQL(__dirname, 'match_recognize.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Match Recognize Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
merge: readSQL(__dirname, 'merge.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Merge Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
prepare: readSQL(__dirname, 'prepare.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Prepare Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
refreshMaterializedView: readSQL(__dirname, 'refresh_materialized_view.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Refresh Materialized View Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
resetSession: readSQL(__dirname, 'reset_session.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Reset Session Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
revoke: readSQL(__dirname, 'revoke.sql'),
@ -20,4 +20,3 @@ describe('TrinoSQL Revoke Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
rollbackTransaction: readSQL(__dirname, 'rollback_transaction.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Rollback Transaction Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
select: readSQL(__dirname, 'select.sql'),
@ -13,7 +13,7 @@ const features = {
selectWithFetch: readSQL(__dirname, 'select_with_fetch.sql'),
selectWithUNNEST: readSQL(__dirname, 'select_with_ unnest.sql'),
selectWithExists: readSQL(__dirname, 'select_with_exists.sql'),
selectWithUnion: readSQL(__dirname, 'select_with_union.sql')
selectWithUnion: readSQL(__dirname, 'select_with_union.sql'),
};
describe('TrinoSQL Select Statements Syntax Tests', () => {
@ -80,4 +80,3 @@ describe('TrinoSQL Select Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
role: readSQL(__dirname, 'set_role.sql'),
@ -32,4 +32,3 @@ describe('TrinoSQL Set Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
tables: readSQL(__dirname, 'show_tables.sql'),
@ -75,4 +75,3 @@ describe('TrinoSQL Show Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
startTransaction: readSQL(__dirname, 'start_transaction.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Start Transaction Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
truncateTable: readSQL(__dirname, 'truncate_table.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Truncate Table Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
update: readSQL(__dirname, 'update.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Update Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
use: readSQL(__dirname, 'use.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Use Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
values: readSQL(__dirname, 'values.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Values Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
windowWithRowPatternRecognition: readSQL(__dirname, 'window_with_row_pattern_recognition.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Window With Row Pattern Recognition Statements Syntax Tests',
});
});
});

Some files were not shown because too many files have changed in this diff Show More