refactor: standard naming (#278)

* refactor: rename flinksql to flink

* refactor: rename pgsql to postgresql

* refactor: rename trinosql to trino

* refactor: replace all default exports with named export

* refactor: rename basicParser to basicSQL

* refactor: rename basic-parser-types to types

* refactor: replace arrow func with plain func
This commit is contained in:
Hayden 2024-03-27 10:33:25 +08:00 committed by GitHub
parent a99721162b
commit bb0fad1dbe
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
325 changed files with 33161 additions and 33202 deletions

View File

@ -11,7 +11,7 @@ options {
}
@header {
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
}
program

View File

@ -32,7 +32,7 @@ options
}
@header {
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
}
program

View File

@ -26,7 +26,7 @@ options
}
@header {
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
}
program

View File

@ -40,7 +40,7 @@ options {
}
@header {
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
}
// Top Level Description

View File

@ -35,7 +35,7 @@ options {
caseInsensitive= true;
}
@lexer::header {
import PlSqlBaseLexer from "./PlSqlBaseLexer";
import { PlSqlBaseLexer } from "./PlSqlBaseLexer";
}
ABORT : 'ABORT';

View File

@ -36,7 +36,7 @@ options {
}
@parser::header {
import PlSqlBaseParser from './PlSqlBaseParser';
import { PlSqlBaseParser } from './PlSqlBaseParser';
}
program

View File

@ -34,7 +34,7 @@
// $antlr-format singleLineOverrulesHangingColon true, alignLexerCommands true, alignLabels true, alignTrailers true
// $antlr-format spaceBeforeAssignmentOperators false, groupedAlignments true
lexer grammar PostgreSQLLexer;
lexer grammar PostgreSqlLexer;
options {
caseInsensitive= true;

View File

@ -37,16 +37,16 @@
// $antlr-format allowShortRulesOnASingleLine false, allowShortBlocksOnASingleLine true, alignSemicolons hanging, alignColons hanging
// $antlr-format spaceBeforeAssignmentOperators false, keepEmptyLinesAtTheStartOfBlocks true
parser grammar PostgreSQLParser;
parser grammar PostgreSqlParser;
options {
tokenVocab= PostgreSQLLexer;
tokenVocab= PostgreSqlLexer;
caseInsensitive= true;
superClass=SQLParserBase;
}
@header {
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
}
program

View File

@ -31,7 +31,7 @@ options {
}
@header {
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
}
program

View File

@ -29,7 +29,7 @@ options {
}
@header {
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
}
tokens {

View File

@ -5,7 +5,7 @@ export {
FlinkSQL,
SparkSQL,
HiveSQL,
PostgresSQL,
PostgreSQL,
TrinoSQL,
ImpalaSQL,
PLSQL,
@ -22,15 +22,15 @@ export type {
HiveSqlParserVisitor,
PlSqlParserListener,
PlSqlParserVisitor,
PostgreSQLParserListener,
PostgreSQLParserVisitor,
PostgreSqlParserListener,
PostgreSqlParserVisitor,
TrinoSqlListener,
TrinoSqlVisitor,
ImpalaSqlParserListener,
ImpalaSqlParserVisitor,
} from './lib';
export { EntityContextType } from './parser/common/basic-parser-types';
export { EntityContextType } from './parser/common/types';
export {
/**
@ -38,13 +38,9 @@ export {
* It will be removed when the stable version is released.
*/
EntityContextType as SyntaxContextType,
} from './parser/common/basic-parser-types';
} from './parser/common/types';
export type {
CaretPosition,
Suggestions,
SyntaxSuggestion,
} from './parser/common/basic-parser-types';
export type { CaretPosition, Suggestions, SyntaxSuggestion } from './parser/common/types';
export type { WordRange, TextSlice } from './parser/common/textAndWord';

View File

@ -1,5 +1,5 @@
import * as antlr from "antlr4ng";
export default abstract class SQLParserBase<T = antlr.ParserRuleContext> extends antlr.Parser{
export abstract class SQLParserBase<T = antlr.ParserRuleContext> extends antlr.Parser{
public constructor(input: antlr.TokenStream) {
super(input);
}

View File

@ -1,4 +1,4 @@
// Generated from dt-sql-parser/src/grammar/flinksql/FlinkSqlLexer.g4 by ANTLR 4.13.1
// Generated from dt-sql-parser/src/grammar/flink/FlinkSqlLexer.g4 by ANTLR 4.13.1
import * as antlr from "antlr4ng";
import { Token } from "antlr4ng";

View File

@ -1,4 +1,4 @@
// Generated from dt-sql-parser/src/grammar/flinksql/FlinkSqlParser.g4 by ANTLR 4.13.1
// Generated from dt-sql-parser/src/grammar/flink/FlinkSqlParser.g4 by ANTLR 4.13.1
import * as antlr from "antlr4ng";
import { Token } from "antlr4ng";
@ -11,7 +11,7 @@ import { FlinkSqlParserVisitor } from "./FlinkSqlParserVisitor.js";
type int = number;
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
export class FlinkSqlParser extends SQLParserBase {

View File

@ -1,9 +1,9 @@
// Generated from dt-sql-parser/src/grammar/flinksql/FlinkSqlParser.g4 by ANTLR 4.13.1
// Generated from dt-sql-parser/src/grammar/flink/FlinkSqlParser.g4 by ANTLR 4.13.1
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
import { ProgramContext } from "./FlinkSqlParser.js";

View File

@ -1,9 +1,9 @@
// Generated from dt-sql-parser/src/grammar/flinksql/FlinkSqlParser.g4 by ANTLR 4.13.1
// Generated from dt-sql-parser/src/grammar/flink/FlinkSqlParser.g4 by ANTLR 4.13.1
import { AbstractParseTreeVisitor } from "antlr4ng";
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
import { ProgramContext } from "./FlinkSqlParser.js";

View File

@ -11,7 +11,7 @@ import { HiveSqlParserVisitor } from "./HiveSqlParserVisitor.js";
type int = number;
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
export class HiveSqlParser extends SQLParserBase {

View File

@ -3,7 +3,7 @@
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
import { ProgramContext } from "./HiveSqlParser.js";

View File

@ -3,7 +3,7 @@
import { AbstractParseTreeVisitor } from "antlr4ng";
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
import { ProgramContext } from "./HiveSqlParser.js";

View File

@ -11,7 +11,7 @@ import { ImpalaSqlParserVisitor } from "./ImpalaSqlParserVisitor.js";
type int = number;
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
export class ImpalaSqlParser extends SQLParserBase {

View File

@ -3,7 +3,7 @@
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
import { ProgramContext } from "./ImpalaSqlParser.js";

View File

@ -3,7 +3,7 @@
import { AbstractParseTreeVisitor } from "antlr4ng";
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
import { ProgramContext } from "./ImpalaSqlParser.js";

View File

@ -1,5 +1,5 @@
export { FlinkSqlParserListener } from './flinksql/FlinkSqlParserListener';
export { FlinkSqlParserVisitor } from './flinksql/FlinkSqlParserVisitor';
export { FlinkSqlParserListener } from './flink/FlinkSqlParserListener';
export { FlinkSqlParserVisitor } from './flink/FlinkSqlParserVisitor';
export { MySqlParserListener } from './mysql/MySqlParserListener';
export { MySqlParserVisitor } from './mysql/MySqlParserVisitor';
@ -13,11 +13,11 @@ export { PlSqlParserVisitor } from './plsql/PlSqlParserVisitor';
export { SparkSqlParserListener } from './spark/SparkSqlParserListener';
export { SparkSqlParserVisitor } from './spark/SparkSqlParserVisitor';
export { PostgreSQLParserListener } from './pgsql/PostgreSQLParserListener';
export { PostgreSQLParserVisitor } from './pgsql/PostgreSQLParserVisitor';
export { PostgreSqlParserListener } from './postgresql/PostgreSqlParserListener';
export { PostgreSqlParserVisitor } from './postgresql/PostgreSqlParserVisitor';
export { TrinoSqlListener } from './trinosql/TrinoSqlListener';
export { TrinoSqlVisitor } from './trinosql/TrinoSqlVisitor';
export { TrinoSqlListener } from './trino/TrinoSqlListener';
export { TrinoSqlVisitor } from './trino/TrinoSqlVisitor';
export { ImpalaSqlParserListener } from './impala/ImpalaSqlParserListener';
export { ImpalaSqlParserVisitor } from './impala/ImpalaSqlParserVisitor';

View File

@ -11,7 +11,7 @@ import { MySqlParserVisitor } from "./MySqlParserVisitor.js";
type int = number;
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
export class MySqlParser extends SQLParserBase {

View File

@ -3,7 +3,7 @@
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
import { ProgramContext } from "./MySqlParser.js";

View File

@ -3,7 +3,7 @@
import { AbstractParseTreeVisitor } from "antlr4ng";
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
import { ProgramContext } from "./MySqlParser.js";

View File

@ -1,6 +1,6 @@
import { Lexer } from "antlr4ng";
export default abstract class PlSqlBaseLexer extends Lexer {
export abstract class PlSqlBaseLexer extends Lexer {
_interp: any;

View File

@ -1,6 +1,6 @@
import { TokenStream } from "antlr4ng";
import SQLParserBase from "../SQLParserBase";
export default abstract class PlSqlBaseParser extends SQLParserBase {
import {SQLParserBase} from "../SQLParserBase";
export abstract class PlSqlBaseParser extends SQLParserBase {
private _isVersion10: boolean = false;
private _isVersion12: boolean = true;

View File

@ -4,7 +4,7 @@ import * as antlr from "antlr4ng";
import { Token } from "antlr4ng";
import PlSqlBaseLexer from "./PlSqlBaseLexer";
import { PlSqlBaseLexer } from "./PlSqlBaseLexer";
export class PlSqlLexer extends PlSqlBaseLexer {

View File

@ -11,7 +11,7 @@ import { PlSqlParserVisitor } from "./PlSqlParserVisitor.js";
type int = number;
import PlSqlBaseParser from './PlSqlBaseParser';
import { PlSqlBaseParser } from './PlSqlBaseParser';
export class PlSqlParser extends PlSqlBaseParser {

View File

@ -1,10 +1,10 @@
// Generated from dt-sql-parser/src/grammar/pgsql/PostgreSQLLexer.g4 by ANTLR 4.13.1
// Generated from dt-sql-parser/src/grammar/postgresql/PostgreSqlLexer.g4 by ANTLR 4.13.1
import * as antlr from "antlr4ng";
import { Token } from "antlr4ng";
export class PostgreSQLLexer extends antlr.Lexer {
export class PostgreSqlLexer extends antlr.Lexer {
public static readonly Dollar = 1;
public static readonly OPEN_PAREN = 2;
public static readonly CLOSE_PAREN = 3;
@ -971,20 +971,20 @@ export class PostgreSQLLexer extends antlr.Lexer {
public constructor(input: antlr.CharStream) {
super(input);
this.interpreter = new antlr.LexerATNSimulator(this, PostgreSQLLexer._ATN, PostgreSQLLexer.decisionsToDFA, new antlr.PredictionContextCache());
this.interpreter = new antlr.LexerATNSimulator(this, PostgreSqlLexer._ATN, PostgreSqlLexer.decisionsToDFA, new antlr.PredictionContextCache());
}
public get grammarFileName(): string { return "PostgreSQLLexer.g4"; }
public get grammarFileName(): string { return "PostgreSqlLexer.g4"; }
public get literalNames(): (string | null)[] { return PostgreSQLLexer.literalNames; }
public get symbolicNames(): (string | null)[] { return PostgreSQLLexer.symbolicNames; }
public get ruleNames(): string[] { return PostgreSQLLexer.ruleNames; }
public get literalNames(): (string | null)[] { return PostgreSqlLexer.literalNames; }
public get symbolicNames(): (string | null)[] { return PostgreSqlLexer.symbolicNames; }
public get ruleNames(): string[] { return PostgreSqlLexer.ruleNames; }
public get serializedATN(): number[] { return PostgreSQLLexer._serializedATN; }
public get serializedATN(): number[] { return PostgreSqlLexer._serializedATN; }
public get channelNames(): string[] { return PostgreSQLLexer.channelNames; }
public get channelNames(): string[] { return PostgreSqlLexer.channelNames; }
public get modeNames(): string[] { return PostgreSQLLexer.modeNames; }
public get modeNames(): string[] { return PostgreSqlLexer.modeNames; }
public static readonly _serializedATN: number[] = [
4,0,590,5802,6,-1,6,-1,6,-1,6,-1,6,-1,2,0,7,0,2,1,7,1,2,2,7,2,2,
@ -3224,19 +3224,19 @@ export class PostgreSQLLexer extends antlr.Lexer {
private static __ATN: antlr.ATN;
public static get _ATN(): antlr.ATN {
if (!PostgreSQLLexer.__ATN) {
PostgreSQLLexer.__ATN = new antlr.ATNDeserializer().deserialize(PostgreSQLLexer._serializedATN);
if (!PostgreSqlLexer.__ATN) {
PostgreSqlLexer.__ATN = new antlr.ATNDeserializer().deserialize(PostgreSqlLexer._serializedATN);
}
return PostgreSQLLexer.__ATN;
return PostgreSqlLexer.__ATN;
}
private static readonly vocabulary = new antlr.Vocabulary(PostgreSQLLexer.literalNames, PostgreSQLLexer.symbolicNames, []);
private static readonly vocabulary = new antlr.Vocabulary(PostgreSqlLexer.literalNames, PostgreSqlLexer.symbolicNames, []);
public override get vocabulary(): antlr.Vocabulary {
return PostgreSQLLexer.vocabulary;
return PostgreSqlLexer.vocabulary;
}
private static readonly decisionsToDFA = PostgreSQLLexer._ATN.decisionToState.map( (ds: antlr.DecisionState, index: number) => new antlr.DFA(ds, index) );
private static readonly decisionsToDFA = PostgreSqlLexer._ATN.decisionToState.map( (ds: antlr.DecisionState, index: number) => new antlr.DFA(ds, index) );
}

View File

@ -11,7 +11,7 @@ import { SparkSqlParserVisitor } from "./SparkSqlParserVisitor.js";
type int = number;
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
export class SparkSqlParser extends SQLParserBase {

View File

@ -3,7 +3,7 @@
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
import { ProgramContext } from "./SparkSqlParser.js";

View File

@ -3,7 +3,7 @@
import { AbstractParseTreeVisitor } from "antlr4ng";
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
import { ProgramContext } from "./SparkSqlParser.js";

View File

@ -1,10 +1,10 @@
// Generated from dt-sql-parser/src/grammar/trinosql/TrinoSql.g4 by ANTLR 4.13.1
// Generated from dt-sql-parser/src/grammar/trino/TrinoSql.g4 by ANTLR 4.13.1
import * as antlr from "antlr4ng";
import { Token } from "antlr4ng";
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
export class TrinoSqlLexer extends antlr.Lexer {

View File

@ -1,9 +1,9 @@
// Generated from dt-sql-parser/src/grammar/trinosql/TrinoSql.g4 by ANTLR 4.13.1
// Generated from dt-sql-parser/src/grammar/trino/TrinoSql.g4 by ANTLR 4.13.1
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
import { ProgramContext } from "./TrinoSqlParser.js";

View File

@ -1,4 +1,4 @@
// Generated from dt-sql-parser/src/grammar/trinosql/TrinoSql.g4 by ANTLR 4.13.1
// Generated from dt-sql-parser/src/grammar/trino/TrinoSql.g4 by ANTLR 4.13.1
import * as antlr from "antlr4ng";
import { Token } from "antlr4ng";
@ -11,7 +11,7 @@ import { TrinoSqlVisitor } from "./TrinoSqlVisitor.js";
type int = number;
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
export class TrinoSqlParser extends SQLParserBase {

View File

@ -1,9 +1,9 @@
// Generated from dt-sql-parser/src/grammar/trinosql/TrinoSql.g4 by ANTLR 4.13.1
// Generated from dt-sql-parser/src/grammar/trino/TrinoSql.g4 by ANTLR 4.13.1
import { AbstractParseTreeVisitor } from "antlr4ng";
import SQLParserBase from '../SQLParserBase';
import { SQLParserBase } from '../SQLParserBase';
import { ProgramContext } from "./TrinoSqlParser.js";

View File

@ -10,20 +10,20 @@ import {
PredictionMode,
} from 'antlr4ng';
import { CandidatesCollection, CodeCompletionCore } from 'antlr4-c3';
import SQLParserBase from '../../lib/SQLParserBase';
import { SQLParserBase } from '../../lib/SQLParserBase';
import { findCaretTokenIndex } from './findCaretTokenIndex';
import { ctxToText, tokenToWord, WordRange, TextSlice } from './textAndWord';
import { CaretPosition, Suggestions, SyntaxSuggestion } from './basic-parser-types';
import ParseErrorListener, { ParseError, ErrorListener } from './parseErrorListener';
import { CaretPosition, Suggestions, SyntaxSuggestion } from './types';
import { ParseError, ErrorListener, ParseErrorListener } from './parseErrorListener';
import { ErrorStrategy } from './errorStrategy';
import type SplitListener from './splitListener';
import type EntityCollector from './entityCollector';
import type { SplitListener } from './splitListener';
import type { EntityCollector } from './entityCollector';
import { EntityContext } from './entityCollector';
/**
* Custom Parser class, subclass needs extends it.
* Basic SQL class, every sql needs extends it.
*/
export default abstract class BasicParser<
export abstract class BasicSQL<
L extends Lexer = Lexer,
PRC extends ParserRuleContext = ParserRuleContext,
P extends SQLParserBase<PRC> = SQLParserBase<PRC>,

View File

@ -1,8 +1,8 @@
import { ParserRuleContext } from 'antlr4ng';
import { EntityContextType } from './basic-parser-types';
import { EntityContextType } from './types';
import { WordPosition, TextPosition } from './textAndWord';
import { ctxToText, ctxToWord } from './textAndWord';
import SimpleStack from './simpleStack';
import { SimpleStack } from './simpleStack';
/**
* TODO: more stmt type should be supported.
@ -11,7 +11,7 @@ export enum StmtContextType {
/** A self-contained and complete statement */
COMMON_STMT = 'commonStmt',
CREATE_CATALOG_STMT = 'createCatalogStmt',
CREATE_DATABASE_STMT = 'crateDatabaseStmt',
CREATE_DATABASE_STMT = 'createDatabaseStmt',
CREATE_TABLE_STMT = 'createTableStmt',
CREATE_VIEW_STMT = 'createViewStmt',
SELECT_STMT = 'selectStmt',
@ -90,7 +90,7 @@ export function toEntityContext(
* @todo: Handle alias, includes column alias, table alias, query as alias and so on.
* @todo: [may be need] Combine the entities in each clause.
*/
abstract class EntityCollector {
export abstract class EntityCollector {
constructor(input: string, caretTokenIndex?: number) {
this._input = input;
this._caretTokenIndex = caretTokenIndex ?? -1;
@ -275,5 +275,3 @@ abstract class EntityCollector {
return finalEntities;
}
}
export default EntityCollector;

View File

@ -1,5 +1,5 @@
import { Token } from 'antlr4ng';
import { CaretPosition } from './basic-parser-types';
import { CaretPosition } from './types';
/**
* find token index via caret position (cursor position)

View File

@ -39,7 +39,7 @@ export interface SyntaxError<T> {
*/
export type ErrorListener<T> = (parseError: ParseError, originalError: SyntaxError<T>) => void;
export default class ParseErrorListener implements ANTLRErrorListener {
export class ParseErrorListener implements ANTLRErrorListener {
private _errorListener: ErrorListener<Token>;
constructor(errorListener: ErrorListener<Token>) {

View File

@ -1,4 +1,4 @@
class SimpleStack<T> {
export class SimpleStack<T> {
constructor() {
this.stack = [];
}
@ -28,5 +28,3 @@ class SimpleStack<T> {
return this.stack.length === 0;
}
}
export default SimpleStack;

View File

@ -1,4 +1,4 @@
abstract class SplitListener<T> {
export abstract class SplitListener<T> {
protected _statementsContext: T[] = [];
visitTerminal() {}
@ -13,5 +13,3 @@ abstract class SplitListener<T> {
return this._statementsContext;
}
}
export default SplitListener;

View File

@ -1,4 +1,4 @@
import { EntityContextType } from '../../parser/common/basic-parser-types';
import { EntityContextType } from '../common/types';
import {
CatalogPathContext,
CatalogPathCreateContext,
@ -18,14 +18,11 @@ import {
TablePathCreateContext,
ViewPathContext,
ViewPathCreateContext,
} from '../../lib/flinksql/FlinkSqlParser';
import { FlinkSqlParserListener } from '../../lib/flinksql/FlinkSqlParserListener';
import EntityCollector, { StmtContextType } from '../common/entityCollector';
} from '../../lib/flink/FlinkSqlParser';
import { FlinkSqlParserListener } from '../../lib/flink/FlinkSqlParserListener';
import { StmtContextType, EntityCollector } from '../common/entityCollector';
export default class FlinkEntityCollector
extends EntityCollector
implements FlinkSqlParserListener
{
export class FlinkEntityCollector extends EntityCollector implements FlinkSqlParserListener {
/** ====== Entity Begin */
exitCatalogPathCreate(ctx: CatalogPathCreateContext) {
this.pushEntity(ctx, EntityContextType.CATALOG_CREATE);

View File

@ -0,0 +1,12 @@
import { SingleStatementContext } from '../../lib/flink/FlinkSqlParser';
import { FlinkSqlParserListener } from '../../lib/flink/FlinkSqlParserListener';
import { SplitListener } from '../common/splitListener';
export class FlinkSqlSplitListener
extends SplitListener<SingleStatementContext>
implements FlinkSqlParserListener
{
exitSingleStatement(ctx: SingleStatementContext) {
this._statementsContext.push(ctx);
}
}

View File

@ -1,24 +1,22 @@
import { Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3';
import { FlinkSqlLexer } from '../../lib/flinksql/FlinkSqlLexer';
import { FlinkSqlParser, ProgramContext } from '../../lib/flinksql/FlinkSqlParser';
import { EntityContextType, Suggestions, SyntaxSuggestion } from '../common/basic-parser-types';
import BasicParser from '../common/basicParser';
import { FlinkSqlLexer } from '../../lib/flink/FlinkSqlLexer';
import { FlinkSqlParser, ProgramContext } from '../../lib/flink/FlinkSqlParser';
import { EntityContextType, Suggestions, SyntaxSuggestion } from '../common/types';
import { BasicSQL } from '../common/basicSQL';
import { StmtContextType } from '../common/entityCollector';
import { FlinkSqlSplitListener } from './flinkSplitListener';
import FlinkEntityCollector from './flinkEntityCollector';
import { FlinkEntityCollector } from './flinkEntityCollector';
export { FlinkSqlSplitListener, FlinkEntityCollector };
export default class FlinkSQL extends BasicParser<FlinkSqlLexer, ProgramContext, FlinkSqlParser> {
export class FlinkSQL extends BasicSQL<FlinkSqlLexer, ProgramContext, FlinkSqlParser> {
protected createLexerFromCharStream(charStreams) {
const lexer = new FlinkSqlLexer(charStreams);
return lexer;
return new FlinkSqlLexer(charStreams);
}
protected createParserFromTokenStream(tokenStream) {
const parser = new FlinkSqlParser(tokenStream);
return parser;
return new FlinkSqlParser(tokenStream);
}
protected preferredRules = new Set([

View File

@ -1,12 +0,0 @@
import { SingleStatementContext } from '../../lib/flinksql/FlinkSqlParser';
import { FlinkSqlParserListener } from '../../lib/flinksql/FlinkSqlParserListener';
import SplitListener from '../common/splitListener';
export class FlinkSqlSplitListener
extends SplitListener<SingleStatementContext>
implements FlinkSqlParserListener
{
exitSingleStatement = (ctx: SingleStatementContext) => {
this._statementsContext.push(ctx);
};
}

View File

@ -1,4 +1,4 @@
import { EntityContextType } from '../..';
import { EntityContextType } from '../common/types';
import { HiveSqlParserListener } from '../../lib';
import {
ColumnNameCreateContext,
@ -21,29 +21,29 @@ import {
ViewNameContext,
ViewNameCreateContext,
} from '../../lib/hive/HiveSqlParser';
import EntityCollector, { StmtContextType } from '../common/entityCollector';
import { StmtContextType, EntityCollector } from '../common/entityCollector';
export default class HiveEntityCollector extends EntityCollector implements HiveSqlParserListener {
export class HiveEntityCollector extends EntityCollector implements HiveSqlParserListener {
/** ====== Entity Begin */
exitTableNameCreate = (ctx: TableNameCreateContext) => {
exitTableNameCreate(ctx: TableNameCreateContext) {
this.pushEntity(ctx, EntityContextType.TABLE_CREATE);
};
}
exitTableName = (ctx: TableNameContext) => {
exitTableName(ctx: TableNameContext) {
this.pushEntity(ctx, EntityContextType.TABLE);
};
}
exitColumnNameCreate = (ctx: ColumnNameCreateContext) => {
exitColumnNameCreate(ctx: ColumnNameCreateContext) {
this.pushEntity(ctx, EntityContextType.COLUMN_CREATE);
};
}
exitViewNameCreate = (ctx: ViewNameCreateContext) => {
exitViewNameCreate(ctx: ViewNameCreateContext) {
this.pushEntity(ctx, EntityContextType.VIEW_CREATE);
};
}
exitViewName = (ctx: ViewNameContext) => {
exitViewName(ctx: ViewNameContext) {
this.pushEntity(ctx, EntityContextType.VIEW);
};
}
exitDbSchemaNameCreate(ctx: DbSchemaNameCreateContext) {
this.pushEntity(ctx, EntityContextType.DATABASE_CREATE);
@ -57,70 +57,70 @@ export default class HiveEntityCollector extends EntityCollector implements Hive
this.pushEntity(ctx, EntityContextType.FUNCTION_CREATE);
}
/** ===== Statement begin */
enterStatement = (ctx: StatementContext) => {
/** ==== Statement begin */
enterStatement(ctx: StatementContext) {
this.pushStmt(ctx, StmtContextType.COMMON_STMT);
};
}
exitStatement = () => {
exitStatement() {
this.popStmt();
};
}
enterCreateTableStatement = (ctx: CreateTableStatementContext) => {
enterCreateTableStatement(ctx: CreateTableStatementContext) {
this.pushStmt(ctx, StmtContextType.CREATE_TABLE_STMT);
};
}
exitCreateTableStatement = () => {
exitCreateTableStatement() {
this.popStmt();
};
}
enterSelectStatement = (ctx: SelectStatementContext) => {
enterSelectStatement(ctx: SelectStatementContext) {
this.pushStmt(ctx, StmtContextType.SELECT_STMT);
};
}
exitSelectStatement = (ctx: SelectStatementContext) => {
exitSelectStatement(ctx: SelectStatementContext) {
this.popStmt();
};
}
enterFromSelectStmt = (ctx: FromSelectStmtContext) => {
enterFromSelectStmt(ctx: FromSelectStmtContext) {
this.pushStmt(ctx, StmtContextType.SELECT_STMT);
};
}
exitFromSelectStmt = (ctx: FromSelectStmtContext) => {
exitFromSelectStmt(ctx: FromSelectStmtContext) {
this.popStmt();
};
}
enterCreateViewStatement = (ctx: CreateViewStatementContext) => {
enterCreateViewStatement(ctx: CreateViewStatementContext) {
this.pushStmt(ctx, StmtContextType.CREATE_VIEW_STMT);
};
}
exitCreateViewStatement = (ctx: CreateViewStatementContext) => {
exitCreateViewStatement(ctx: CreateViewStatementContext) {
this.popStmt();
};
}
enterCreateMaterializedViewStatement = (ctx: CreateMaterializedViewStatementContext) => {
enterCreateMaterializedViewStatement(ctx: CreateMaterializedViewStatementContext) {
this.pushStmt(ctx, StmtContextType.CREATE_VIEW_STMT);
};
}
exitCreateMaterializedViewStatement = (ctx: CreateMaterializedViewStatementContext) => {
exitCreateMaterializedViewStatement(ctx: CreateMaterializedViewStatementContext) {
this.popStmt();
};
}
enterInsertStmt = (ctx: InsertStmtContext) => {
enterInsertStmt(ctx: InsertStmtContext) {
this.pushStmt(ctx, StmtContextType.INSERT_STMT);
};
}
exitInsertStmt = (ctx: InsertStmtContext) => {
exitInsertStmt(ctx: InsertStmtContext) {
this.popStmt();
};
}
enterFromInsertStmt = (ctx: FromInsertStmtContext) => {
enterFromInsertStmt(ctx: FromInsertStmtContext) {
this.pushStmt(ctx, StmtContextType.INSERT_STMT);
};
}
exitFromInsertStmt = (ctx: FromInsertStmtContext) => {
exitFromInsertStmt(ctx: FromInsertStmtContext) {
this.popStmt();
};
}
enterCreateDatabaseStatement(ctx: CreateDatabaseStatementContext) {
this.pushStmt(ctx, StmtContextType.CREATE_DATABASE_STMT);

View File

@ -1,12 +1,12 @@
import { StatementContext } from '../../lib/hive/HiveSqlParser';
import { HiveSqlParserListener } from '../../lib/hive/HiveSqlParserListener';
import SplitListener from '../common/splitListener';
import { SplitListener } from '../common/splitListener';
export class HiveSqlSplitListener
extends SplitListener<StatementContext>
implements HiveSqlParserListener
{
exitStatement = (ctx: StatementContext) => {
exitStatement(ctx: StatementContext) {
this._statementsContext.push(ctx);
};
}
}

View File

@ -2,19 +2,18 @@ import { Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3';
import { HiveSqlLexer } from '../../lib/hive/HiveSqlLexer';
import { HiveSqlParser, ProgramContext } from '../../lib/hive/HiveSqlParser';
import BasicParser from '../common/basicParser';
import { BasicSQL } from '../common/basicSQL';
import { EntityContextType, Suggestions, SyntaxSuggestion } from '../common/basic-parser-types';
import { EntityContextType, Suggestions, SyntaxSuggestion } from '../common/types';
import { StmtContextType } from '../common/entityCollector';
import { HiveSqlSplitListener } from './hiveSplitListener';
import HiveEntityCollector from './hiveEntityCollector';
import { HiveEntityCollector } from './hiveEntityCollector';
export { HiveEntityCollector, HiveSqlSplitListener };
export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, HiveSqlParser> {
export class HiveSQL extends BasicSQL<HiveSqlLexer, ProgramContext, HiveSqlParser> {
protected createLexerFromCharStream(charStreams) {
const lexer = new HiveSqlLexer(charStreams);
return lexer;
return new HiveSqlLexer(charStreams);
}
protected createParserFromTokenStream(tokenStream) {

View File

@ -20,13 +20,10 @@ import {
ViewNameCreateContext,
ViewNamePathContext,
} from '../../lib/impala/ImpalaSqlParser';
import { EntityContextType } from '../common/basic-parser-types';
import EntityCollector, { StmtContextType } from '../common/entityCollector';
import { EntityContextType } from '../common/types';
import { StmtContextType, EntityCollector } from '../common/entityCollector';
export default class ImpalaEntityCollector
extends EntityCollector
implements ImpalaSqlParserListener
{
export class ImpalaEntityCollector extends EntityCollector implements ImpalaSqlParserListener {
/** ===== Entity begin */
exitTableNameCreate(ctx: TableNameCreateContext) {
this.pushEntity(ctx, EntityContextType.TABLE_CREATE);

View File

@ -1,12 +1,12 @@
import { SingleStatementContext } from '../../lib/impala/ImpalaSqlParser';
import { ImpalaSqlParserListener } from '../../lib/impala/ImpalaSqlParserListener';
import SplitListener from '../common/splitListener';
import { SplitListener } from '../common/splitListener';
export class ImpalaSqlSplitListener
extends SplitListener<SingleStatementContext>
implements ImpalaSqlParserListener
{
exitSingleStatement = (ctx: SingleStatementContext) => {
exitSingleStatement(ctx: SingleStatementContext) {
this._statementsContext.push(ctx);
};
}
}

View File

@ -2,22 +2,17 @@ import { Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3';
import { ImpalaSqlLexer } from '../../lib/impala/ImpalaSqlLexer';
import { ImpalaSqlParser, ProgramContext } from '../../lib/impala/ImpalaSqlParser';
import BasicParser from '../common/basicParser';
import { EntityContextType, Suggestions, SyntaxSuggestion } from '../common/basic-parser-types';
import { BasicSQL } from '../common/basicSQL';
import { EntityContextType, Suggestions, SyntaxSuggestion } from '../common/types';
import { StmtContextType } from '../common/entityCollector';
import { ImpalaSqlSplitListener } from './impalaSplitListener';
import ImpalaEntityCollector from './impalaEntityCollector';
import { ImpalaEntityCollector } from './impalaEntityCollector';
export { ImpalaEntityCollector, ImpalaSqlSplitListener };
export default class ImpalaSQL extends BasicParser<
ImpalaSqlLexer,
ProgramContext,
ImpalaSqlParser
> {
export class ImpalaSQL extends BasicSQL<ImpalaSqlLexer, ProgramContext, ImpalaSqlParser> {
protected createLexerFromCharStream(charStreams) {
const lexer = new ImpalaSqlLexer(charStreams);
return lexer;
return new ImpalaSqlLexer(charStreams);
}
protected createParserFromTokenStream(tokenStream) {

View File

@ -1,8 +1,8 @@
export { default as MySQL } from './mysql';
export { default as PLSQL } from './plsql';
export { default as HiveSQL } from './hive';
export { default as FlinkSQL } from './flinksql';
export { default as SparkSQL } from './spark';
export { default as PostgresSQL } from './pgsql';
export { default as TrinoSQL } from './trino';
export { default as ImpalaSQL } from './impala';
export { MySQL } from './mysql';
export { PLSQL } from './plsql';
export { HiveSQL } from './hive';
export { FlinkSQL } from './flink';
export { SparkSQL } from './spark';
export { PostgreSQL } from './postgresql';
export { TrinoSQL } from './trino';
export { ImpalaSQL } from './impala';

View File

@ -2,18 +2,17 @@ import { Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3';
import { MySqlLexer } from '../../lib/mysql/MySqlLexer';
import { MySqlParser, ProgramContext } from '../../lib/mysql/MySqlParser';
import BasicParser from '../common/basicParser';
import { Suggestions, EntityContextType, SyntaxSuggestion } from '../common/basic-parser-types';
import { BasicSQL } from '../common/basicSQL';
import { Suggestions, EntityContextType, SyntaxSuggestion } from '../common/types';
import { StmtContextType } from '../common/entityCollector';
import MysqlSplitListener from './mysqlSplitListener';
import MySqlEntityCollector from './mysqlEntityCollector';
import { MysqlSplitListener } from './mysqlSplitListener';
import { MySqlEntityCollector } from './mysqlEntityCollector';
export { MySqlEntityCollector, MysqlSplitListener };
export default class MySQL extends BasicParser<MySqlLexer, ProgramContext, MySqlParser> {
export class MySQL extends BasicSQL<MySqlLexer, ProgramContext, MySqlParser> {
protected createLexerFromCharStream(charStreams): MySqlLexer {
const lexer = new MySqlLexer(charStreams);
return lexer;
return new MySqlLexer(charStreams);
}
protected createParserFromTokenStream(tokenStream): MySqlParser {

View File

@ -19,10 +19,10 @@ import type {
ViewNameCreateContext,
} from '../../lib/mysql/MySqlParser';
import type { MySqlParserListener } from '../../lib/mysql/MySqlParserListener';
import { EntityContextType } from '../common/basic-parser-types';
import EntityCollector, { StmtContextType } from '../common/entityCollector';
import { EntityContextType } from '../common/types';
import { StmtContextType, EntityCollector } from '../common/entityCollector';
export default class MySqlEntityCollector extends EntityCollector implements MySqlParserListener {
export class MySqlEntityCollector extends EntityCollector implements MySqlParserListener {
/** ====== Entity Begin */
exitDatabaseName(ctx: DatabaseNameContext) {
this.pushEntity(ctx, EntityContextType.DATABASE);

View File

@ -1,12 +1,12 @@
import { SingleStatementContext } from '../../lib/mysql/MySqlParser';
import { MySqlParserListener } from '../../lib/mysql/MySqlParserListener';
import SplitListener from '../common/splitListener';
import { SplitListener } from '../common/splitListener';
export default class MysqlSplitListener
export class MysqlSplitListener
extends SplitListener<SingleStatementContext>
implements MySqlParserListener
{
exitSingleStatement = (ctx: SingleStatementContext) => {
exitSingleStatement(ctx: SingleStatementContext) {
this._statementsContext.push(ctx);
};
}
}

View File

@ -1,12 +0,0 @@
import { SingleStmtContext } from '../../lib/pgsql/PostgreSQLParser';
import { PostgreSQLParserListener } from '../../lib/pgsql/PostgreSQLParserListener';
import SplitListener from '../common/splitListener';
export default class PostgreSqlSplitListener
extends SplitListener<SingleStmtContext>
implements PostgreSQLParserListener
{
exitSingleStmt = (ctx: SingleStmtContext) => {
this._statementsContext.push(ctx);
};
}

View File

@ -2,13 +2,12 @@ import { Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3';
import { PlSqlLexer } from '../lib/plsql/PlSqlLexer';
import { PlSqlParser, ProgramContext } from '../lib/plsql/PlSqlParser';
import BasicParser from './common/basicParser';
import { Suggestions } from './common/basic-parser-types';
import { BasicSQL } from './common/basicSQL';
import { Suggestions } from './common/types';
export default class PLSQL extends BasicParser<PlSqlLexer, ProgramContext, PlSqlParser> {
export class PLSQL extends BasicSQL<PlSqlLexer, ProgramContext, PlSqlParser> {
protected createLexerFromCharStream(charStreams) {
const lexer = new PlSqlLexer(charStreams);
return lexer;
return new PlSqlLexer(charStreams);
}
protected createParserFromTokenStream(tokenStream) {

View File

@ -1,45 +1,40 @@
import { CandidatesCollection } from 'antlr4-c3';
import { Token } from 'antlr4ng';
import { PostgreSQLLexer } from '../../lib/pgsql/PostgreSQLLexer';
import { PostgreSQLParser, ProgramContext } from '../../lib/pgsql/PostgreSQLParser';
import { EntityContextType, Suggestions, SyntaxSuggestion } from '../common/basic-parser-types';
import BasicParser from '../common/basicParser';
import { PostgreSqlLexer } from '../../lib/postgresql/PostgreSqlLexer';
import { PostgreSqlParser, ProgramContext } from '../../lib/postgresql/PostgreSqlParser';
import { EntityContextType, Suggestions, SyntaxSuggestion } from '../common/types';
import { BasicSQL } from '../common/basicSQL';
import { StmtContextType } from '../common/entityCollector';
import PostgreSQLEntityCollector from './postgreEntityCollector';
import PostgreSqlSplitListener from './postgreSplitListener';
import { PostgreSqlEntityCollector } from './postgreEntityCollector';
import { PostgreSqlSplitListener } from './postgreSplitListener';
export { PostgreSQLEntityCollector, PostgreSqlSplitListener };
export { PostgreSqlEntityCollector, PostgreSqlSplitListener };
export default class PostgresSQL extends BasicParser<
PostgreSQLLexer,
ProgramContext,
PostgreSQLParser
> {
export class PostgreSQL extends BasicSQL<PostgreSqlLexer, ProgramContext, PostgreSqlParser> {
protected createLexerFromCharStream(charStreams) {
const lexer = new PostgreSQLLexer(charStreams);
return lexer;
return new PostgreSqlLexer(charStreams);
}
protected createParserFromTokenStream(tokenStream) {
return new PostgreSQLParser(tokenStream);
return new PostgreSqlParser(tokenStream);
}
protected preferredRules: Set<number> = new Set([
PostgreSQLParser.RULE_table_name_create, // table name
PostgreSQLParser.RULE_table_name, // table name that will be created
PostgreSQLParser.RULE_function_name, // function name
PostgreSQLParser.RULE_function_name_create, // function name that will be created
PostgreSQLParser.RULE_schema_name_create, // schema name that will be created
PostgreSQLParser.RULE_schema_name, // schema name
PostgreSQLParser.RULE_view_name_create, // view name that will be created
PostgreSQLParser.RULE_view_name, // view name
PostgreSQLParser.RULE_database_name_create, // database name that will be created
PostgreSQLParser.RULE_database_name, // database name
PostgreSQLParser.RULE_procedure_name_create, // procedure name that will be created
PostgreSQLParser.RULE_procedure_name, // procedure name
PostgreSQLParser.RULE_column_name_create, // column name that will be created
PostgreSQLParser.RULE_column_name, // column name
PostgreSqlParser.RULE_table_name_create, // table name
PostgreSqlParser.RULE_table_name, // table name that will be created
PostgreSqlParser.RULE_function_name, // function name
PostgreSqlParser.RULE_function_name_create, // function name that will be created
PostgreSqlParser.RULE_schema_name_create, // schema name that will be created
PostgreSqlParser.RULE_schema_name, // schema name
PostgreSqlParser.RULE_view_name_create, // view name that will be created
PostgreSqlParser.RULE_view_name, // view name
PostgreSqlParser.RULE_database_name_create, // database name that will be created
PostgreSqlParser.RULE_database_name, // database name
PostgreSqlParser.RULE_procedure_name_create, // procedure name that will be created
PostgreSqlParser.RULE_procedure_name, // procedure name
PostgreSqlParser.RULE_column_name_create, // column name that will be created
PostgreSqlParser.RULE_column_name, // column name
]);
protected get splitListener() {
@ -47,7 +42,7 @@ export default class PostgresSQL extends BasicParser<
}
protected createEntityCollector(input: string, caretTokenIndex?: number) {
return new PostgreSQLEntityCollector(input, caretTokenIndex);
return new PostgreSqlEntityCollector(input, caretTokenIndex);
}
protected processCandidates(
@ -68,59 +63,59 @@ export default class PostgresSQL extends BasicParser<
let syntaxContextType: EntityContextType | StmtContextType;
switch (ruleType) {
case PostgreSQLParser.RULE_table_name_create: {
case PostgreSqlParser.RULE_table_name_create: {
syntaxContextType = EntityContextType.TABLE_CREATE;
break;
}
case PostgreSQLParser.RULE_table_name: {
case PostgreSqlParser.RULE_table_name: {
syntaxContextType = EntityContextType.TABLE;
break;
}
case PostgreSQLParser.RULE_function_name_create: {
case PostgreSqlParser.RULE_function_name_create: {
syntaxContextType = EntityContextType.FUNCTION_CREATE;
break;
}
case PostgreSQLParser.RULE_function_name: {
case PostgreSqlParser.RULE_function_name: {
syntaxContextType = EntityContextType.FUNCTION;
break;
}
case PostgreSQLParser.RULE_schema_name_create: {
case PostgreSqlParser.RULE_schema_name_create: {
syntaxContextType = EntityContextType.DATABASE_CREATE;
break;
}
case PostgreSQLParser.RULE_schema_name: {
case PostgreSqlParser.RULE_schema_name: {
syntaxContextType = EntityContextType.DATABASE;
break;
}
case PostgreSQLParser.RULE_view_name_create: {
case PostgreSqlParser.RULE_view_name_create: {
syntaxContextType = EntityContextType.VIEW_CREATE;
break;
}
case PostgreSQLParser.RULE_view_name: {
case PostgreSqlParser.RULE_view_name: {
syntaxContextType = EntityContextType.VIEW;
break;
}
case PostgreSQLParser.RULE_database_name_create: {
case PostgreSqlParser.RULE_database_name_create: {
syntaxContextType = EntityContextType.DATABASE_CREATE;
break;
}
case PostgreSQLParser.RULE_database_name: {
case PostgreSqlParser.RULE_database_name: {
syntaxContextType = EntityContextType.DATABASE;
break;
}
case PostgreSQLParser.RULE_procedure_name_create: {
case PostgreSqlParser.RULE_procedure_name_create: {
syntaxContextType = EntityContextType.PROCEDURE_CREATE;
break;
}
case PostgreSQLParser.RULE_procedure_name: {
case PostgreSqlParser.RULE_procedure_name: {
syntaxContextType = EntityContextType.PROCEDURE;
break;
}
case PostgreSQLParser.RULE_column_name_create: {
case PostgreSqlParser.RULE_column_name_create: {
syntaxContextType = EntityContextType.COLUMN_CREATE;
break;
}
case PostgreSQLParser.RULE_column_name: {
case PostgreSqlParser.RULE_column_name: {
syntaxContextType = EntityContextType.COLUMN;
break;
}

View File

@ -18,15 +18,12 @@ import type {
TableNameCreateContext,
ViewNameContext,
ViewNameCreateContext,
} from '../../lib/pgsql/PostgreSQLParser';
import type { PostgreSQLParserListener } from '../../lib/pgsql/PostgreSQLParserListener';
import { EntityContextType } from '../common/basic-parser-types';
import EntityCollector, { StmtContextType } from '../common/entityCollector';
} from '../../lib/postgresql/PostgreSqlParser';
import type { PostgreSqlParserListener } from '../../lib/postgresql/PostgreSqlParserListener';
import { EntityContextType } from '../common/types';
import { StmtContextType, EntityCollector } from '../common/entityCollector';
export default class PostgreSQLEntityCollector
extends EntityCollector
implements PostgreSQLParserListener
{
export class PostgreSqlEntityCollector extends EntityCollector implements PostgreSqlParserListener {
/** ====== Entity Begin */
exitDatabaseName(ctx: DatabaseNameContext) {
this.pushEntity(ctx, EntityContextType.DATABASE);

View File

@ -0,0 +1,12 @@
import { SingleStmtContext } from '../../lib/postgresql/PostgreSqlParser';
import { PostgreSqlParserListener } from '../../lib/postgresql/PostgreSqlParserListener';
import { SplitListener } from '../common/splitListener';
export class PostgreSqlSplitListener
extends SplitListener<SingleStmtContext>
implements PostgreSqlParserListener
{
exitSingleStmt(ctx: SingleStmtContext) {
this._statementsContext.push(ctx);
}
}

View File

@ -2,23 +2,21 @@ import { Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3';
import { SparkSqlLexer } from '../../lib/spark/SparkSqlLexer';
import { SparkSqlParser, ProgramContext } from '../../lib/spark/SparkSqlParser';
import BasicParser from '../common/basicParser';
import { Suggestions, EntityContextType, SyntaxSuggestion } from '../common/basic-parser-types';
import { BasicSQL } from '../common/basicSQL';
import { Suggestions, EntityContextType, SyntaxSuggestion } from '../common/types';
import { StmtContextType } from '../common/entityCollector';
import SparkSqlSplitListener from './sparkSplitListener';
import SparkEntityCollector from './sparkEntityCollector';
import { SparkSqlSplitListener } from './sparkSplitListener';
import { SparkEntityCollector } from './sparkEntityCollector';
export { SparkSqlSplitListener, SparkEntityCollector };
export default class SparkSQL extends BasicParser<SparkSqlLexer, ProgramContext, SparkSqlParser> {
export class SparkSQL extends BasicSQL<SparkSqlLexer, ProgramContext, SparkSqlParser> {
protected createLexerFromCharStream(charStreams) {
const lexer = new SparkSqlLexer(charStreams);
return lexer;
return new SparkSqlLexer(charStreams);
}
protected createParserFromTokenStream(tokenStream) {
const parser = new SparkSqlParser(tokenStream);
return parser;
return new SparkSqlParser(tokenStream);
}
protected preferredRules: Set<number> = new Set([

View File

@ -20,13 +20,10 @@ import type {
CreateFunctionContext,
} from '../../lib/spark/SparkSqlParser';
import type { SparkSqlParserListener } from '../../lib/spark/SparkSqlParserListener';
import { EntityContextType } from '../common/basic-parser-types';
import EntityCollector, { StmtContextType } from '../common/entityCollector';
import { EntityContextType } from '../common/types';
import { StmtContextType, EntityCollector } from '../common/entityCollector';
export default class SparkEntityCollector
extends EntityCollector
implements SparkSqlParserListener
{
export class SparkEntityCollector extends EntityCollector implements SparkSqlParserListener {
/** ====== Entity Begin */
exitNamespaceName(ctx: NamespaceNameContext) {
this.pushEntity(ctx, EntityContextType.DATABASE);

View File

@ -1,13 +1,13 @@
import { SingleStatementContext } from '../../lib/spark/SparkSqlParser';
import { SparkSqlParserListener } from '../../lib/spark/SparkSqlParserListener';
import SplitListener from '../common/splitListener';
import { SplitListener } from '../common/splitListener';
export default class SparkSqlSplitListener
export class SparkSqlSplitListener
extends SplitListener<SingleStatementContext>
implements SparkSqlParserListener
{
exitSingleStatement = (ctx: SingleStatementContext) => {
exitSingleStatement(ctx: SingleStatementContext) {
this._statementsContext.push(ctx);
};
}
}

View File

@ -1,24 +1,22 @@
import { Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3';
import { TrinoSqlLexer } from '../../lib/trinosql/TrinoSqlLexer';
import { TrinoSqlParser, ProgramContext } from '../../lib/trinosql/TrinoSqlParser';
import BasicParser from '../common/basicParser';
import { Suggestions, EntityContextType, SyntaxSuggestion } from '../common/basic-parser-types';
import { TrinoSqlLexer } from '../../lib/trino/TrinoSqlLexer';
import { TrinoSqlParser, ProgramContext } from '../../lib/trino/TrinoSqlParser';
import { BasicSQL } from '../common/basicSQL';
import { Suggestions, EntityContextType, SyntaxSuggestion } from '../common/types';
import { StmtContextType } from '../common/entityCollector';
import TrinoSqlSplitListener from './trinoSplitListener';
import TrinoEntityCollector from './trinoEntityCollector';
import { TrinoSqlSplitListener } from './trinoSplitListener';
import { TrinoEntityCollector } from './trinoEntityCollector';
export { TrinoSqlSplitListener, TrinoEntityCollector };
export default class TrinoSQL extends BasicParser<TrinoSqlLexer, ProgramContext, TrinoSqlParser> {
export class TrinoSQL extends BasicSQL<TrinoSqlLexer, ProgramContext, TrinoSqlParser> {
protected createLexerFromCharStream(charStreams) {
const lexer = new TrinoSqlLexer(charStreams);
return lexer;
return new TrinoSqlLexer(charStreams);
}
protected createParserFromTokenStream(tokenStream) {
const parser = new TrinoSqlParser(tokenStream);
return parser;
return new TrinoSqlParser(tokenStream);
}
protected get splitListener() {

View File

@ -14,12 +14,12 @@ import type {
TableNameCreateContext,
ViewNameContext,
ViewNameCreateContext,
} from '../../lib/trinosql/TrinoSqlParser';
import type { TrinoSqlListener } from '../../lib/trinosql/TrinoSqlListener';
import { EntityContextType } from '../common/basic-parser-types';
import EntityCollector, { StmtContextType } from '../common/entityCollector';
} from '../../lib/trino/TrinoSqlParser';
import type { TrinoSqlListener } from '../../lib/trino/TrinoSqlListener';
import { EntityContextType } from '../common/types';
import { StmtContextType, EntityCollector } from '../common/entityCollector';
export default class TrinoEntityCollector extends EntityCollector implements TrinoSqlListener {
export class TrinoEntityCollector extends EntityCollector implements TrinoSqlListener {
/** ====== Entity Begin */
exitSchemaName(ctx: SchemaNameContext) {
this.pushEntity(ctx, EntityContextType.DATABASE);

View File

@ -1,12 +1,12 @@
import { SingleStatementContext } from '../../lib/trinosql/TrinoSqlParser';
import { TrinoSqlListener } from '../../lib/trinosql/TrinoSqlListener';
import SplitListener from '../common/splitListener';
import { SingleStatementContext } from '../../lib/trino/TrinoSqlParser';
import { TrinoSqlListener } from '../../lib/trino/TrinoSqlListener';
import { SplitListener } from '../common/splitListener';
export default class TrinoSqlSplitListener
export class TrinoSqlSplitListener
extends SplitListener<SingleStatementContext>
implements TrinoSqlListener
{
exitSingleStatement = (ctx: SingleStatementContext) => {
exitSingleStatement(ctx: SingleStatementContext) {
this._statementsContext.push(ctx);
};
}
}

View File

@ -1,13 +1,13 @@
import FlinkSQL from 'src/parser/flinksql';
import { FlinkSqlLexer } from 'src/lib/flinksql/FlinkSqlLexer';
import { FlinkSQL } from 'src/parser/flink';
import { FlinkSqlLexer } from 'src/lib/flink/FlinkSqlLexer';
import { ErrorListener } from 'src/parser/common/parseErrorListener';
import { CommonTokenStream } from 'antlr4ng';
describe('BasicParser unit tests', () => {
const flinkParser = new FlinkSQL();
describe('BasicSQL unit tests', () => {
const flink = new FlinkSQL();
test('Create lexer', () => {
const sql = 'SELECT * FROM tb1';
const lexer = flinkParser.createLexer(sql);
const lexer = flink.createLexer(sql);
expect(lexer).not.toBeUndefined();
expect(lexer).not.toBeNull();
@ -19,7 +19,7 @@ describe('BasicParser unit tests', () => {
const errorListener: ErrorListener<any> = (err) => {
errors.push(err);
};
const lexer = flinkParser.createLexer(sql, errorListener);
const lexer = flink.createLexer(sql, errorListener);
const tokenStream = new CommonTokenStream(lexer);
tokenStream.fill();
expect(errors.length).not.toBe(0);
@ -27,7 +27,7 @@ describe('BasicParser unit tests', () => {
test('Create parser', () => {
const sql = 'SELECT * FROM tb1';
const parser = flinkParser.createParser(sql);
const parser = flink.createParser(sql);
expect(parser).not.toBeUndefined();
expect(parser).not.toBeNull();
@ -39,7 +39,7 @@ describe('BasicParser unit tests', () => {
const errorListener: ErrorListener<any> = (err) => {
errors.push(err);
};
const parser = flinkParser.createParser(sql, errorListener);
const parser = flink.createParser(sql, errorListener);
parser.program();
expect(errors.length).not.toBe(0);
});
@ -50,7 +50,7 @@ describe('BasicParser unit tests', () => {
const errorListener: ErrorListener<any> = (err) => {
errors.push(err);
};
const parser = flinkParser.createParser(sql, errorListener);
const parser = flink.createParser(sql, errorListener);
parser.program();
expect(errors.length).not.toBe(0);
});
@ -61,7 +61,7 @@ describe('BasicParser unit tests', () => {
const errorListener: ErrorListener<any> = (err) => {
errors.push(err);
};
const parseTree = flinkParser.parse(sql, errorListener);
const parseTree = flink.parse(sql, errorListener);
expect(parseTree).not.toBeUndefined();
expect(parseTree).not.toBeNull();
@ -74,7 +74,7 @@ describe('BasicParser unit tests', () => {
const errorListener: ErrorListener<any> = (err) => {
errors.push(err);
};
const parseTree = flinkParser.parse(sql, errorListener);
const parseTree = flink.parse(sql, errorListener);
expect(parseTree).not.toBeUndefined();
expect(parseTree).not.toBeNull();
@ -83,7 +83,7 @@ describe('BasicParser unit tests', () => {
test('Get All tokens', () => {
const sql = 'SELECT * FROM tbl1;';
const tokens = flinkParser.getAllTokens(sql);
const tokens = flink.getAllTokens(sql);
expect(tokens.length).toBe(8);
expect(tokens[0].type).toBe(FlinkSqlLexer.KW_SELECT);
@ -98,14 +98,14 @@ describe('BasicParser unit tests', () => {
test('Get All tokens with error', () => {
const sql = '袋鼠云数栈UED团队';
const tokens = flinkParser.getAllTokens(sql);
const tokens = flink.getAllTokens(sql);
expect(tokens.length).toBe(1);
expect(tokens[0].type).toBe(FlinkSqlLexer.ID_LITERAL);
});
test('Split sql', () => {
const sql = 'SHOW TABLES;\nSELECT * FROM tb;';
const sqlSlices = flinkParser.splitSQLByStatement(sql);
const sqlSlices = flink.splitSQLByStatement(sql);
expect(sqlSlices.length).toBe(2);
@ -126,7 +126,7 @@ describe('BasicParser unit tests', () => {
test('Split sql with errors', () => {
const sql = 'SHOW TABLES;\nSELECT * FOM tb;';
const sqlSlices = flinkParser.splitSQLByStatement(sql);
const sqlSlices = flink.splitSQLByStatement(sql);
expect(sqlSlices).toBeNull();
});

View File

@ -0,0 +1,12 @@
# FlinkSQL Benchmark
| Name | Rows | Times | Total Time(ms) | Average Time(ms) |
| ---- | ---- | ---- | ---- | ---- |
| CreateTable | 100 | 1 | 150.34 | 150.34 |
| CreateTable | 1000 | 1 | 53.04 | 53.04 |
| CreateTable | 5000 | 1 | 179.04 | 179.04 |
| SelectTable | 100 | 1 | 460.25 | 460.25 |
| SelectTable | 1000 | 1 | 46.24 | 46.24 |
| SelectTable | 5000 | 1 | 505.28 | 505.28 |
| InsertTable | 100 | 1 | 13.58 | 13.58 |
| InsertTable | 1000 | 1 | 33.07 | 33.07 |
| InsertTable | 5000 | 1 | 242.58 | 242.58 |

View File

@ -1,4 +1,4 @@
import FlinkSQL from 'src/parser/flinksql';
import { FlinkSQL } from 'src/parser/flink';
import {
readSQL,
benchmark,
@ -14,14 +14,14 @@ const features = {
};
describe('FlinkSQL benchmark tests', () => {
const parser = new FlinkSQL();
const flink = new FlinkSQL();
let reportsHeader = getReportTableHeader('FlinkSQL Benchmark');
const reportData: string[] = [];
test('createTable Over 100 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 100 Rows', () => {
const testSQL = features.createTable[0];
const res = parser.validate(testSQL);
const res = flink.validate(testSQL);
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('CreateTable', 100, 1, totalTimes, averageTimes));
@ -30,7 +30,7 @@ describe('FlinkSQL benchmark tests', () => {
test('createTable Over 1000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 1000 Rows', () => {
const testSQL = features.createTable[1];
const res = parser.validate(testSQL);
const res = flink.validate(testSQL);
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('CreateTable', 1000, 1, totalTimes, averageTimes));
@ -39,7 +39,7 @@ describe('FlinkSQL benchmark tests', () => {
test('createTable Over 5000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 5000 Rows', () => {
const testSQL = features.createTable[2];
const res = parser.validate(testSQL);
const res = flink.validate(testSQL);
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('CreateTable', 5000, 1, totalTimes, averageTimes));
@ -48,7 +48,7 @@ describe('FlinkSQL benchmark tests', () => {
test('selectTable Over 100 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 100 Rows', () => {
const testSQL = features.selectTable[0];
const res = parser.validate(testSQL);
const res = flink.validate(testSQL);
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('SelectTable', 100, 1, totalTimes, averageTimes));
@ -57,7 +57,7 @@ describe('FlinkSQL benchmark tests', () => {
test('selectTable Over 1000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 1000 Rows', () => {
const testSQL = features.selectTable[1];
const res = parser.validate(testSQL);
const res = flink.validate(testSQL);
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('SelectTable', 1000, 1, totalTimes, averageTimes));
@ -66,7 +66,7 @@ describe('FlinkSQL benchmark tests', () => {
test('selectTable Over 5000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 5000 Rows', () => {
const testSQL = features.selectTable[2];
const res = parser.validate(testSQL);
const res = flink.validate(testSQL);
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('SelectTable', 5000, 1, totalTimes, averageTimes));
@ -75,7 +75,7 @@ describe('FlinkSQL benchmark tests', () => {
test('insertTable Over 100 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 100 Rows', () => {
const testSQL = features.insertTable[0];
const res = parser.validate(testSQL);
const res = flink.validate(testSQL);
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('InsertTable', 100, 1, totalTimes, averageTimes));
@ -84,7 +84,7 @@ describe('FlinkSQL benchmark tests', () => {
test('insertTable Over 1000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 1000 Rows', () => {
const testSQL = features.insertTable[1];
const res = parser.validate(testSQL);
const res = flink.validate(testSQL);
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('InsertTable', 1000, 1, totalTimes, averageTimes));
@ -93,7 +93,7 @@ describe('FlinkSQL benchmark tests', () => {
test('insertTable Over 5000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 5000 Rows', () => {
const testSQL = features.insertTable[2];
const res = parser.validate(testSQL);
const res = flink.validate(testSQL);
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('InsertTable', 5000, 1, totalTimes, averageTimes));

View File

@ -1,9 +1,9 @@
import fs from 'fs';
import path from 'path';
import { ParseTreeListener } from 'antlr4ng';
import FlinkSQL, { FlinkEntityCollector, FlinkSqlSplitListener } from 'src/parser/flinksql';
import { FlinkSqlParserListener } from 'src/lib/flinksql/FlinkSqlParserListener';
import { EntityContextType } from 'src/parser/common/basic-parser-types';
import { FlinkSQL, FlinkEntityCollector, FlinkSqlSplitListener } from 'src/parser/flink';
import { FlinkSqlParserListener } from 'src/lib/flink/FlinkSqlParserListener';
import { EntityContextType } from 'src/parser/common/types';
import { StmtContextType } from 'src/parser/common/entityCollector';
const commonSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'common.sql'), 'utf-8');

View File

@ -1,5 +1,5 @@
import FlinkSQL, { FlinkSqlSplitListener } from 'src/parser/flinksql';
import { FlinkSqlParserListener } from 'src/lib/flinksql/FlinkSqlParserListener';
import { FlinkSQL, FlinkSqlSplitListener } from 'src/parser/flink';
import { FlinkSqlParserListener } from 'src/lib/flink/FlinkSqlParserListener';
const validSQL1 = `INSERT INTO country_page_view
VALUES ('Chinese', 'mumiao', 18),

View File

@ -1,10 +1,10 @@
import FlinkSQL from 'src/parser/flinksql';
import { FlinkSQL } from 'src/parser/flink';
describe('FlinkSQL Lexer tests', () => {
const parser = new FlinkSQL();
const flink = new FlinkSQL();
const sql = 'SELECT * FROM table1';
const tokens = parser.getAllTokens(sql);
const tokens = flink.getAllTokens(sql);
test('token counts', () => {
expect(tokens.length).toBe(7);
});

View File

@ -1,14 +1,14 @@
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from 'antlr4ng';
import FlinkSQL from 'src/parser/flinksql';
import { FlinkSqlParserListener } from 'src/lib/flinksql/FlinkSqlParserListener';
import { TableExpressionContext } from 'src/lib/flinksql/FlinkSqlParser';
import { ErrorNode, ParserRuleContext, TerminalNode } from 'antlr4ng';
import { FlinkSQL } from 'src/parser/flink';
import { FlinkSqlParserListener } from 'src/lib/flink/FlinkSqlParserListener';
import { TableExpressionContext } from 'src/lib/flink/FlinkSqlParser';
describe('Flink SQL Listener Tests', () => {
const expectTableName = 'user1';
const sql = `select id,name,sex from ${expectTableName};`;
const parser = new FlinkSQL();
const flink = new FlinkSQL();
const parseTree = parser.parse(sql);
const parseTree = flink.parse(sql);
test('Listener enterTableName', async () => {
let result = '';
@ -23,7 +23,7 @@ describe('Flink SQL Listener Tests', () => {
}
const listenTableName = new MyListener();
await parser.listen(listenTableName, parseTree);
await flink.listen(listenTableName, parseTree);
expect(result).toBe(expectTableName);
});
@ -46,7 +46,7 @@ describe('Flink SQL Listener Tests', () => {
`;`,
];
const sql = singleStatementArr.join('\n');
const sqlSlices = parser.splitSQLByStatement(sql);
const sqlSlices = flink.splitSQLByStatement(sql);
expect(sqlSlices).not.toBeNull();

Some files were not shown because too many files have changed in this diff Show More