Compare commits

...

10 Commits

Author SHA1 Message Date
yexinhao
bcf8880520 feat: npm包名 更换为 lava-oushudb-dt-sql-parser 2024-09-26 16:21:33 +08:00
yexinhao
f6477b35e2 feat: npm包名和管理人员更换 2024-09-26 16:02:57 +08:00
Kijin-Seija
ffb575e317 4.0.2-8 2024-08-26 16:59:41 +08:00
Kijin-Seija
a5c6d2e1f8 init 2024-06-19 20:14:16 +08:00
Kijin-Seija
fe2b0fd9ea 4.0.2-0 2024-06-07 15:49:18 +08:00
Kijin-Seija
9ac6039146 init 2024-06-07 15:41:22 +08:00
Hayden
63df067ae6 chore(release): 4.0.1 2024-04-28 11:42:05 +08:00
Hayden
f1c3bbe17c
feat: deprecate legacy APIs (#304)
* feat: deprecation of legacy APIs

* feat: deprecate plsql language
2024-04-28 11:41:13 +08:00
Hayden
a5387e4729 chore(release): 4.0.0 2024-04-28 10:53:12 +08:00
琉易
25358ec653
fix: #301 pgsql truncate table suggestion (#302) 2024-04-26 09:38:15 +08:00
39 changed files with 19316 additions and 266827 deletions

View File

@ -1,6 +0,0 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
echo 'commitlint'
npx --no -- commitlint --edit

View File

@ -1,4 +0,0 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
npx lint-staged

View File

@ -2,6 +2,20 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
### [4.0.1](https://github.com/DTStack/dt-sql-parser/compare/v4.0.0...v4.0.1) (2024-04-28)
### Features
* deprecate legacy APIs ([#304](https://github.com/DTStack/dt-sql-parser/issues/304)) ([f1c3bbe](https://github.com/DTStack/dt-sql-parser/commit/f1c3bbe17c2f48a55f8ac1664ce635323c88b579))
## [4.0.0](https://github.com/DTStack/dt-sql-parser/compare/v4.0.0-beta.4.15...v4.0.0) (2024-04-28)
### Bug Fixes
* [#301](https://github.com/DTStack/dt-sql-parser/issues/301) pgsql truncate table suggestion ([#302](https://github.com/DTStack/dt-sql-parser/issues/302)) ([25358ec](https://github.com/DTStack/dt-sql-parser/commit/25358ec65353129933c0711212f2f90d854fa242))
## [4.0.0-beta.4.15](https://github.com/DTStack/dt-sql-parser/compare/v4.0.0-beta.4.14...v4.0.0-beta.4.15) (2024-04-21) ## [4.0.0-beta.4.15](https://github.com/DTStack/dt-sql-parser/compare/v4.0.0-beta.4.14...v4.0.0-beta.4.15) (2024-04-21)

View File

@ -1,6 +1,6 @@
{ {
"name": "dt-sql-parser", "name": "lava-oushudb-dt-sql-parser",
"version": "4.0.0-beta.4.15", "version": "4.0.2-9",
"authors": "DTStack Corporation", "authors": "DTStack Corporation",
"description": "SQL Parsers for BigData, built with antlr4", "description": "SQL Parsers for BigData, built with antlr4",
"keywords": [ "keywords": [
@ -54,14 +54,14 @@
"typescript": "^5.0.4", "typescript": "^5.0.4",
"yargs-parser": "^21.1.1" "yargs-parser": "^21.1.1"
}, },
"git repository": "https://github.com/DTStack/dt-sql-parser", "git repository": "https://git.yevpt.com/oushu/lava-fe-dt-sql-parser-oushudb",
"repository": "https://github.com/DTStack/dt-sql-parser", "repository": "https://git.yevpt.com/oushu/lava-fe-dt-sql-parser-oushudb",
"publishConfig": {
"registry": "https://registry.npmjs.org/"
},
"dependencies": { "dependencies": {
"antlr4-c3": "3.3.7", "antlr4-c3": "3.3.7",
"antlr4ng": "2.0.11" "antlr4ng": "2.0.11"
}, },
"sideEffects": false "sideEffects": false,
"volta": {
"node": "14.21.3"
}
} }

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -668,6 +668,8 @@ KW_EXTENDED : 'EXTENDED';
KW_MAIN : 'MAIN'; KW_MAIN : 'MAIN';
KW_SKIP_LOCKED : 'SKIP_LOCKED'; KW_SKIP_LOCKED : 'SKIP_LOCKED';
KW_BUFFER_USAGE_LIMIT : 'BUFFER_USAGE_LIMIT'; KW_BUFFER_USAGE_LIMIT : 'BUFFER_USAGE_LIMIT';
KW_DISTRIBUTED : 'DISTRIBUTED';
KW_WRITABLE : 'WRITABLE';
// //
// IDENTIFIERS (4.1.1) // IDENTIFIERS (4.1.1)

View File

@ -722,14 +722,32 @@ copy_generic_opt_arg_list_item
; ;
createstmt createstmt
: KW_CREATE opttemp? KW_TABLE opt_if_not_exists? table_name_create ( : create_table_clause opt_if_not_exists? table_name_create (
OPEN_PAREN table_column_list? CLOSE_PAREN optinherit? optpartitionspec? table_access_method_clause? optwith? oncommitoption? opttablespace? OPEN_PAREN table_column_list? CLOSE_PAREN create_table_options*
| KW_OF any_name opttypedtableelementlist? optpartitionspec? table_access_method_clause? optwith? oncommitoption? opttablespace? | KW_OF any_name opttypedtableelementlist? optpartitionspec? table_access_method_clause? optwith? oncommitoption? opttablespace?
| KW_PARTITION KW_OF qualified_name opttypedtableelementlist? partitionboundspec optpartitionspec? table_access_method_clause? optwith? | KW_PARTITION KW_OF qualified_name opttypedtableelementlist? partitionboundspec optpartitionspec? table_access_method_clause? optwith?
oncommitoption? opttablespace? oncommitoption? opttablespace?
) # columnCreateTable ) # columnCreateTable
; ;
create_table_options
: optinherit
| optpartitionspec
| table_access_method_clause
| optwith
| oncommitoption
| opttablespace
| optdistributed
;
create_table_clause
: KW_CREATE opttemp? KW_WRITABLE? KW_EXTERNAL? KW_TABLE
;
optdistributed
: KW_DISTRIBUTED KW_BY OPEN_PAREN column_list CLOSE_PAREN
;
opttemp opttemp
: KW_TEMPORARY : KW_TEMPORARY
| KW_TEMP | KW_TEMP
@ -997,7 +1015,7 @@ alterstatsstmt
; ;
createasstmt createasstmt
: KW_CREATE opttemp? KW_TABLE opt_if_not_exists? create_as_target KW_AS selectstmt opt_with_data? # queryCreateTable : create_table_clause opt_if_not_exists? create_as_target KW_AS selectstmt opt_with_data? # queryCreateTable
; ;
create_as_target create_as_target
@ -2838,7 +2856,7 @@ preparablestmt
executestmt executestmt
: KW_EXECUTE name execute_param_clause? : KW_EXECUTE name execute_param_clause?
| KW_CREATE opttemp? KW_TABLE opt_if_not_exists? create_as_target KW_AS KW_EXECUTE name execute_param_clause? opt_with_data? | create_table_clause opt_if_not_exists? create_as_target KW_AS KW_EXECUTE name execute_param_clause? opt_with_data?
; ;
execute_param_clause execute_param_clause
@ -5556,7 +5574,6 @@ plsql_unreserved_keyword
| KW_SLICE | KW_SLICE
| KW_SQLSTATE | KW_SQLSTATE
| KW_STACKED | KW_STACKED
| KW_TABLE
//| TABLE_NAME //| TABLE_NAME
| KW_TYPE | KW_TYPE
| KW_USE_COLUMN | KW_USE_COLUMN

View File

@ -1,13 +1,4 @@
export { export { MySQL, FlinkSQL, SparkSQL, HiveSQL, PostgreSQL, TrinoSQL, ImpalaSQL } from './parser';
MySQL,
FlinkSQL,
SparkSQL,
HiveSQL,
PostgreSQL,
TrinoSQL,
ImpalaSQL,
PLSQL,
} from './parser';
export { export {
MySqlParserListener, MySqlParserListener,
@ -18,8 +9,6 @@ export {
SparkSqlParserVisitor, SparkSqlParserVisitor,
HiveSqlParserListener, HiveSqlParserListener,
HiveSqlParserVisitor, HiveSqlParserVisitor,
PlSqlParserListener,
PlSqlParserVisitor,
PostgreSqlParserListener, PostgreSqlParserListener,
PostgreSqlParserVisitor, PostgreSqlParserVisitor,
TrinoSqlListener, TrinoSqlListener,
@ -30,14 +19,6 @@ export {
export { EntityContextType } from './parser/common/types'; export { EntityContextType } from './parser/common/types';
export {
/**
* @deprecated SyntaxContextType has been renamed to {@link EntityContextType},
* It will be removed when the stable version is released.
*/
EntityContextType as SyntaxContextType,
} from './parser/common/types';
export { StmtContextType } from './parser/common/entityCollector'; export { StmtContextType } from './parser/common/entityCollector';
export type { CaretPosition, Suggestions, SyntaxSuggestion } from './parser/common/types'; export type { CaretPosition, Suggestions, SyntaxSuggestion } from './parser/common/types';
@ -47,8 +28,3 @@ export type { WordRange, TextSlice } from './parser/common/textAndWord';
export type { SyntaxError, ParseError, ErrorListener } from './parser/common/parseErrorListener'; export type { SyntaxError, ParseError, ErrorListener } from './parser/common/parseErrorListener';
export type { StmtContext, EntityContext } from './parser/common/entityCollector'; export type { StmtContext, EntityContext } from './parser/common/entityCollector';
/**
* @deprecated Legacy utils will be removed when the stable version is released.
*/
export * from './utils';

View File

@ -7,9 +7,6 @@ export { MySqlParserVisitor } from './mysql/MySqlParserVisitor';
export { HiveSqlParserListener } from './hive/HiveSqlParserListener'; export { HiveSqlParserListener } from './hive/HiveSqlParserListener';
export { HiveSqlParserVisitor } from './hive/HiveSqlParserVisitor'; export { HiveSqlParserVisitor } from './hive/HiveSqlParserVisitor';
export { PlSqlParserListener } from './plsql/PlSqlParserListener';
export { PlSqlParserVisitor } from './plsql/PlSqlParserVisitor';
export { SparkSqlParserListener } from './spark/SparkSqlParserListener'; export { SparkSqlParserListener } from './spark/SparkSqlParserListener';
export { SparkSqlParserVisitor } from './spark/SparkSqlParserVisitor'; export { SparkSqlParserVisitor } from './spark/SparkSqlParserVisitor';

View File

@ -1,8 +0,0 @@
import { Lexer } from "antlr4ng";
export abstract class PlSqlBaseLexer extends Lexer {
IsNewlineAtPos(pos: number): boolean {
const la = this._input.LA(pos);
return la == -1 || String.fromCharCode(la) == '\n';
}
}

View File

@ -1,29 +0,0 @@
import { TokenStream } from "antlr4ng";
import {SQLParserBase} from "../SQLParserBase";
export abstract class PlSqlBaseParser extends SQLParserBase {
private _isVersion10: boolean = false;
private _isVersion12: boolean = true;
constructor(input: TokenStream) {
super(input);
this._isVersion10 = false;
this._isVersion12 = true;
}
isVersion10(): boolean {
return this._isVersion10;
}
isVersion12(): boolean {
return this._isVersion12;
}
setVersion10(value: boolean): void {
this._isVersion10 = value;
}
setVersion12(value: boolean): void {
this._isVersion12 = value;
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@ -546,48 +546,50 @@ KW_EXTENDED=545
KW_MAIN=546 KW_MAIN=546
KW_SKIP_LOCKED=547 KW_SKIP_LOCKED=547
KW_BUFFER_USAGE_LIMIT=548 KW_BUFFER_USAGE_LIMIT=548
Identifier=549 KW_DISTRIBUTED=549
QuotedIdentifier=550 KW_WRITABLE=550
UnterminatedQuotedIdentifier=551 Identifier=551
InvalidQuotedIdentifier=552 QuotedIdentifier=552
InvalidUnterminatedQuotedIdentifier=553 UnterminatedQuotedIdentifier=553
UnicodeQuotedIdentifier=554 InvalidQuotedIdentifier=554
UnterminatedUnicodeQuotedIdentifier=555 InvalidUnterminatedQuotedIdentifier=555
InvalidUnicodeQuotedIdentifier=556 UnicodeQuotedIdentifier=556
InvalidUnterminatedUnicodeQuotedIdentifier=557 UnterminatedUnicodeQuotedIdentifier=557
StringConstant=558 InvalidUnicodeQuotedIdentifier=558
UnterminatedStringConstant=559 InvalidUnterminatedUnicodeQuotedIdentifier=559
UnicodeEscapeStringConstant=560 StringConstant=560
UnterminatedUnicodeEscapeStringConstant=561 UnterminatedStringConstant=561
BeginDollarStringConstant=562 UnicodeEscapeStringConstant=562
BinaryStringConstant=563 UnterminatedUnicodeEscapeStringConstant=563
UnterminatedBinaryStringConstant=564 BeginDollarStringConstant=564
InvalidBinaryStringConstant=565 BinaryStringConstant=565
InvalidUnterminatedBinaryStringConstant=566 UnterminatedBinaryStringConstant=566
HexadecimalStringConstant=567 InvalidBinaryStringConstant=567
UnterminatedHexadecimalStringConstant=568 InvalidUnterminatedBinaryStringConstant=568
InvalidHexadecimalStringConstant=569 HexadecimalStringConstant=569
InvalidUnterminatedHexadecimalStringConstant=570 UnterminatedHexadecimalStringConstant=570
Integral=571 InvalidHexadecimalStringConstant=571
NumericFail=572 InvalidUnterminatedHexadecimalStringConstant=572
Numeric=573 Integral=573
PLSQLVARIABLENAME=574 NumericFail=574
PLSQLIDENTIFIER=575 Numeric=575
Whitespace=576 PLSQLVARIABLENAME=576
Newline=577 PLSQLIDENTIFIER=577
LineComment=578 Whitespace=578
BlockComment=579 Newline=579
UnterminatedBlockComment=580 LineComment=580
MetaCommand=581 BlockComment=581
EndMetaCommand=582 UnterminatedBlockComment=582
ErrorCharacter=583 MetaCommand=583
EscapeStringConstant=584 EndMetaCommand=584
UnterminatedEscapeStringConstant=585 ErrorCharacter=585
InvalidEscapeStringConstant=586 EscapeStringConstant=586
InvalidUnterminatedEscapeStringConstant=587 UnterminatedEscapeStringConstant=587
DollarText=588 InvalidEscapeStringConstant=588
EndDollarStringConstant=589 InvalidUnterminatedEscapeStringConstant=589
AfterEscapeStringConstantWithNewlineMode_Continued=590 DollarText=590
EndDollarStringConstant=591
AfterEscapeStringConstantWithNewlineMode_Continued=592
'$'=1 '$'=1
'('=2 '('=2
')'=3 ')'=3
@ -1134,5 +1136,7 @@ AfterEscapeStringConstantWithNewlineMode_Continued=590
'MAIN'=546 'MAIN'=546
'SKIP_LOCKED'=547 'SKIP_LOCKED'=547
'BUFFER_USAGE_LIMIT'=548 'BUFFER_USAGE_LIMIT'=548
'\\\\'=582 'DISTRIBUTED'=549
'\''=590 'WRITABLE'=550
'\\\\'=584
'\''=592

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@ -546,48 +546,50 @@ KW_EXTENDED=545
KW_MAIN=546 KW_MAIN=546
KW_SKIP_LOCKED=547 KW_SKIP_LOCKED=547
KW_BUFFER_USAGE_LIMIT=548 KW_BUFFER_USAGE_LIMIT=548
Identifier=549 KW_DISTRIBUTED=549
QuotedIdentifier=550 KW_WRITABLE=550
UnterminatedQuotedIdentifier=551 Identifier=551
InvalidQuotedIdentifier=552 QuotedIdentifier=552
InvalidUnterminatedQuotedIdentifier=553 UnterminatedQuotedIdentifier=553
UnicodeQuotedIdentifier=554 InvalidQuotedIdentifier=554
UnterminatedUnicodeQuotedIdentifier=555 InvalidUnterminatedQuotedIdentifier=555
InvalidUnicodeQuotedIdentifier=556 UnicodeQuotedIdentifier=556
InvalidUnterminatedUnicodeQuotedIdentifier=557 UnterminatedUnicodeQuotedIdentifier=557
StringConstant=558 InvalidUnicodeQuotedIdentifier=558
UnterminatedStringConstant=559 InvalidUnterminatedUnicodeQuotedIdentifier=559
UnicodeEscapeStringConstant=560 StringConstant=560
UnterminatedUnicodeEscapeStringConstant=561 UnterminatedStringConstant=561
BeginDollarStringConstant=562 UnicodeEscapeStringConstant=562
BinaryStringConstant=563 UnterminatedUnicodeEscapeStringConstant=563
UnterminatedBinaryStringConstant=564 BeginDollarStringConstant=564
InvalidBinaryStringConstant=565 BinaryStringConstant=565
InvalidUnterminatedBinaryStringConstant=566 UnterminatedBinaryStringConstant=566
HexadecimalStringConstant=567 InvalidBinaryStringConstant=567
UnterminatedHexadecimalStringConstant=568 InvalidUnterminatedBinaryStringConstant=568
InvalidHexadecimalStringConstant=569 HexadecimalStringConstant=569
InvalidUnterminatedHexadecimalStringConstant=570 UnterminatedHexadecimalStringConstant=570
Integral=571 InvalidHexadecimalStringConstant=571
NumericFail=572 InvalidUnterminatedHexadecimalStringConstant=572
Numeric=573 Integral=573
PLSQLVARIABLENAME=574 NumericFail=574
PLSQLIDENTIFIER=575 Numeric=575
Whitespace=576 PLSQLVARIABLENAME=576
Newline=577 PLSQLIDENTIFIER=577
LineComment=578 Whitespace=578
BlockComment=579 Newline=579
UnterminatedBlockComment=580 LineComment=580
MetaCommand=581 BlockComment=581
EndMetaCommand=582 UnterminatedBlockComment=582
ErrorCharacter=583 MetaCommand=583
EscapeStringConstant=584 EndMetaCommand=584
UnterminatedEscapeStringConstant=585 ErrorCharacter=585
InvalidEscapeStringConstant=586 EscapeStringConstant=586
InvalidUnterminatedEscapeStringConstant=587 UnterminatedEscapeStringConstant=587
DollarText=588 InvalidEscapeStringConstant=588
EndDollarStringConstant=589 InvalidUnterminatedEscapeStringConstant=589
AfterEscapeStringConstantWithNewlineMode_Continued=590 DollarText=590
EndDollarStringConstant=591
AfterEscapeStringConstantWithNewlineMode_Continued=592
'$'=1 '$'=1
'('=2 '('=2
')'=3 ')'=3
@ -1134,5 +1136,7 @@ AfterEscapeStringConstantWithNewlineMode_Continued=590
'MAIN'=546 'MAIN'=546
'SKIP_LOCKED'=547 'SKIP_LOCKED'=547
'BUFFER_USAGE_LIMIT'=548 'BUFFER_USAGE_LIMIT'=548
'\\\\'=582 'DISTRIBUTED'=549
'\''=590 'WRITABLE'=550
'\\\\'=584
'\''=592

File diff suppressed because it is too large Load Diff

View File

@ -103,6 +103,9 @@ import { Copy_generic_opt_argContext } from "./PostgreSqlParser.js";
import { Copy_generic_opt_arg_listContext } from "./PostgreSqlParser.js"; import { Copy_generic_opt_arg_listContext } from "./PostgreSqlParser.js";
import { Copy_generic_opt_arg_list_itemContext } from "./PostgreSqlParser.js"; import { Copy_generic_opt_arg_list_itemContext } from "./PostgreSqlParser.js";
import { ColumnCreateTableContext } from "./PostgreSqlParser.js"; import { ColumnCreateTableContext } from "./PostgreSqlParser.js";
import { Create_table_optionsContext } from "./PostgreSqlParser.js";
import { Create_table_clauseContext } from "./PostgreSqlParser.js";
import { OptdistributedContext } from "./PostgreSqlParser.js";
import { OpttempContext } from "./PostgreSqlParser.js"; import { OpttempContext } from "./PostgreSqlParser.js";
import { Table_column_listContext } from "./PostgreSqlParser.js"; import { Table_column_listContext } from "./PostgreSqlParser.js";
import { OpttableelementlistContext } from "./PostgreSqlParser.js"; import { OpttableelementlistContext } from "./PostgreSqlParser.js";
@ -1852,6 +1855,36 @@ export class PostgreSqlParserListener implements ParseTreeListener {
* @param ctx the parse tree * @param ctx the parse tree
*/ */
exitColumnCreateTable?: (ctx: ColumnCreateTableContext) => void; exitColumnCreateTable?: (ctx: ColumnCreateTableContext) => void;
/**
* Enter a parse tree produced by `PostgreSqlParser.create_table_options`.
* @param ctx the parse tree
*/
enterCreate_table_options?: (ctx: Create_table_optionsContext) => void;
/**
* Exit a parse tree produced by `PostgreSqlParser.create_table_options`.
* @param ctx the parse tree
*/
exitCreate_table_options?: (ctx: Create_table_optionsContext) => void;
/**
* Enter a parse tree produced by `PostgreSqlParser.create_table_clause`.
* @param ctx the parse tree
*/
enterCreate_table_clause?: (ctx: Create_table_clauseContext) => void;
/**
* Exit a parse tree produced by `PostgreSqlParser.create_table_clause`.
* @param ctx the parse tree
*/
exitCreate_table_clause?: (ctx: Create_table_clauseContext) => void;
/**
* Enter a parse tree produced by `PostgreSqlParser.optdistributed`.
* @param ctx the parse tree
*/
enterOptdistributed?: (ctx: OptdistributedContext) => void;
/**
* Exit a parse tree produced by `PostgreSqlParser.optdistributed`.
* @param ctx the parse tree
*/
exitOptdistributed?: (ctx: OptdistributedContext) => void;
/** /**
* Enter a parse tree produced by `PostgreSqlParser.opttemp`. * Enter a parse tree produced by `PostgreSqlParser.opttemp`.
* @param ctx the parse tree * @param ctx the parse tree

View File

@ -103,6 +103,9 @@ import { Copy_generic_opt_argContext } from "./PostgreSqlParser.js";
import { Copy_generic_opt_arg_listContext } from "./PostgreSqlParser.js"; import { Copy_generic_opt_arg_listContext } from "./PostgreSqlParser.js";
import { Copy_generic_opt_arg_list_itemContext } from "./PostgreSqlParser.js"; import { Copy_generic_opt_arg_list_itemContext } from "./PostgreSqlParser.js";
import { ColumnCreateTableContext } from "./PostgreSqlParser.js"; import { ColumnCreateTableContext } from "./PostgreSqlParser.js";
import { Create_table_optionsContext } from "./PostgreSqlParser.js";
import { Create_table_clauseContext } from "./PostgreSqlParser.js";
import { OptdistributedContext } from "./PostgreSqlParser.js";
import { OpttempContext } from "./PostgreSqlParser.js"; import { OpttempContext } from "./PostgreSqlParser.js";
import { Table_column_listContext } from "./PostgreSqlParser.js"; import { Table_column_listContext } from "./PostgreSqlParser.js";
import { OpttableelementlistContext } from "./PostgreSqlParser.js"; import { OpttableelementlistContext } from "./PostgreSqlParser.js";
@ -1473,6 +1476,24 @@ export class PostgreSqlParserVisitor<Result> extends AbstractParseTreeVisitor<Re
* @return the visitor result * @return the visitor result
*/ */
visitColumnCreateTable?: (ctx: ColumnCreateTableContext) => Result; visitColumnCreateTable?: (ctx: ColumnCreateTableContext) => Result;
/**
* Visit a parse tree produced by `PostgreSqlParser.create_table_options`.
* @param ctx the parse tree
* @return the visitor result
*/
visitCreate_table_options?: (ctx: Create_table_optionsContext) => Result;
/**
* Visit a parse tree produced by `PostgreSqlParser.create_table_clause`.
* @param ctx the parse tree
* @return the visitor result
*/
visitCreate_table_clause?: (ctx: Create_table_clauseContext) => Result;
/**
* Visit a parse tree produced by `PostgreSqlParser.optdistributed`.
* @param ctx the parse tree
* @return the visitor result
*/
visitOptdistributed?: (ctx: OptdistributedContext) => Result;
/** /**
* Visit a parse tree produced by `PostgreSqlParser.opttemp`. * Visit a parse tree produced by `PostgreSqlParser.opttemp`.
* @param ctx the parse tree * @param ctx the parse tree

View File

@ -1,5 +1,4 @@
export { MySQL } from './mysql'; export { MySQL } from './mysql';
export { PLSQL } from './plsql';
export { HiveSQL } from './hive'; export { HiveSQL } from './hive';
export { FlinkSQL } from './flink'; export { FlinkSQL } from './flink';
export { SparkSQL } from './spark'; export { SparkSQL } from './spark';

View File

@ -1,37 +0,0 @@
import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3';
import { PlSqlLexer } from '../lib/plsql/PlSqlLexer';
import { PlSqlParser, ProgramContext } from '../lib/plsql/PlSqlParser';
import { BasicSQL } from './common/basicSQL';
import { Suggestions } from './common/types';
export class PLSQL extends BasicSQL<PlSqlLexer, ProgramContext, PlSqlParser> {
protected createLexerFromCharStream(charStreams: CharStream) {
return new PlSqlLexer(charStreams);
}
protected createParserFromTokenStream(tokenStream: CommonTokenStream) {
return new PlSqlParser(tokenStream);
}
protected preferredRules: Set<number> = new Set();
protected get splitListener() {
return null as any;
}
protected createEntityCollector(input: string, caretTokenIndex?: number) {
return null as any;
}
protected processCandidates(
candidates: CandidatesCollection,
allTokens: Token[],
caretTokenIndex: number
): Suggestions<Token> {
return {
syntax: [],
keywords: [],
};
}
}

View File

@ -1,202 +0,0 @@
import { Legacy_TokenType, Legacy_Token, Legacy_TokenReg } from './token';
/**
* @param {String} sql
* @deprecated use parser.createLexer() instead.
*/
function legacy_lexer(input: string): Legacy_Token[] {
let current = 0;
let line = 1;
const tokens: Legacy_Token[] = [];
const extract = (
currentChar: string,
validator: RegExp,
TokenType: Legacy_TokenType
): Legacy_Token => {
let value = '';
const start = current;
while (validator.test(currentChar)) {
value += currentChar;
currentChar = input[++current];
}
return {
type: TokenType,
start: start,
end: current,
lineNumber: line,
value: value,
};
};
const matchFunction = () => {
const bracketNum = [current];
for (let i = current + 1; i < input.length; i++) {
const currentChar = input[i];
if (currentChar === '\n') {
line++;
}
if (Legacy_TokenReg.LeftSmallBracket.test(currentChar)) {
bracketNum.push(i);
}
if (Legacy_TokenReg.RightSmallBracket.test(currentChar)) {
const start = bracketNum.pop();
const end = i + 1;
if (bracketNum.length === 0) {
current = end;
tokens.push({
type: Legacy_TokenType.FunctionArguments,
value: input.slice(start, end),
start,
lineNumber: line,
end,
});
return;
}
}
}
};
const matchQuotation = (
currentChar: string,
validator: RegExp,
TokenType: Legacy_TokenType
) => {
do {
if (currentChar === '\n') {
line++;
}
currentChar = input[++current];
} while (!validator.test(currentChar));
++current;
};
while (current < input.length) {
let char = input[current];
if (char === '\n') {
line++;
current++;
continue;
}
if (Legacy_TokenReg.LeftSmallBracket.test(char)) {
matchFunction();
continue;
}
if (Legacy_TokenReg.BackQuotation.test(char)) {
matchQuotation(char, Legacy_TokenReg.BackQuotation, Legacy_TokenType.BackQuotation);
continue;
}
if (Legacy_TokenReg.SingleQuotation.test(char)) {
matchQuotation(char, Legacy_TokenReg.SingleQuotation, Legacy_TokenType.SingleQuotation);
continue;
}
if (Legacy_TokenReg.DoubleQuotation.test(char)) {
matchQuotation(char, Legacy_TokenReg.DoubleQuotation, Legacy_TokenType.DoubleQuotation);
continue;
}
if (char === '-' && input[current + 1] === '-') {
let value = '';
const start = current;
while (char !== '\n' && current < input.length) {
value += char;
char = input[++current];
}
tokens.push({
type: Legacy_TokenType.Comment,
value,
start: start,
lineNumber: line,
end: current,
});
continue;
}
if (char === '/' && input[current + 1] === '*') {
let value = '';
const start = current;
const startLine = line;
while (!(char === '/' && input[current - 1] === '*')) {
if (char === '\n') {
line++;
}
value += char;
char = input[++current];
}
value += char;
++current;
tokens.push({
type: Legacy_TokenType.Comment,
value,
start: start,
lineNumber: startLine,
end: current,
});
continue;
}
if (Legacy_TokenReg.StatementTerminator.test(char)) {
const newToken = extract(
char,
Legacy_TokenReg.StatementTerminator,
Legacy_TokenType.StatementTerminator
);
tokens.push(newToken);
continue;
}
current++;
}
return tokens;
}
/**
* split sql
* @param {String} sql
* @deprecated use parser.splitSQLByStatement() instead.
*/
function legacy_splitSql(sql: string) {
const tokens = legacy_lexer(sql);
const sqlArr = [];
let startIndex = 0;
tokens.forEach((ele: Legacy_Token) => {
if (ele.type === Legacy_TokenType.StatementTerminator) {
sqlArr.push(sql.slice(startIndex, ele.end));
startIndex = ele.end;
}
});
if (startIndex < sql.length) {
sqlArr.push(sql.slice(startIndex));
}
return sqlArr;
}
/**
* clean comment
* @param {String} sql
* @deprecated will be removed in future.
*/
function legacy_cleanSql(sql: string) {
sql = sql.trim();
const tokens = legacy_lexer(sql);
let resultSql = '';
let startIndex = 0;
tokens.forEach((ele: Legacy_Token) => {
if (ele.type === Legacy_TokenType.Comment) {
resultSql += sql.slice(startIndex, ele.start);
startIndex = ele.end + 1;
}
});
resultSql += sql.slice(startIndex);
return resultSql;
}
export { legacy_cleanSql, legacy_splitSql, legacy_lexer };

View File

@ -1,62 +0,0 @@
/**
* @deprecated will be removed in future.
*/
export enum Legacy_TokenType {
/**
* Enclosed in single/double/back quotation, `` Symbol
* 'abc', "abc", `abc`
*/
SingleQuotation = 'SingleQuotation',
DoubleQuotation = 'DoubleQuotation',
BackQuotation = 'BackQuotation',
/**
* Language element type
*/
Comment = 'Comment',
/**
* Statement
*/
StatementTerminator = 'StatementTerminator',
/**
* Others
*/
Error = 'Error',
/**
* Left small Bracket
*/
LeftSmallBracket = 'LeftSmallBracket',
/**
* Left small Bracket
*/
RightSmallBracket = 'RightSmallBracket',
Comma = 'Comma',
FunctionArguments = 'FunctionArguments',
}
/**
* @deprecated will be removed in future.
* Token object
*/
export interface Legacy_Token {
type: Legacy_TokenType;
value: string;
start?: number;
end: number;
lineNumber: number;
message?: string;
}
/**
* @deprecated will be removed in future.
* Token recognition rules
*/
export const Legacy_TokenReg = {
[Legacy_TokenType.StatementTerminator]: /[;]/,
[Legacy_TokenType.SingleQuotation]: /['|\']/,
[Legacy_TokenType.DoubleQuotation]: /["]/,
[Legacy_TokenType.BackQuotation]: /[`]/,
[Legacy_TokenType.LeftSmallBracket]: /[(]/,
[Legacy_TokenType.RightSmallBracket]: /[)]/,
[Legacy_TokenType.Comma]: /[,]/,
};

View File

@ -1,12 +0,0 @@
import { PLSQL } from 'src/parser/plsql';
describe('PLSQL Lexer tests', () => {
const plsql = new PLSQL();
const sql = 'select id,name,sex from user1;';
const tokens = plsql.getAllTokens(sql);
test('token counts', () => {
expect(tokens.length).toBe(12);
});
});

View File

@ -1,24 +0,0 @@
import { PLSQL } from 'src/parser/plsql';
import { PlSqlParserListener } from 'src/lib/plsql/PlSqlParserListener';
describe('PLSQL Listener Tests', () => {
const expectTableName = 'user1';
const sql = `select id,name,sex from ${expectTableName};`;
const plsql = new PLSQL();
const parseTree = plsql.parse(sql);
test('Listener enterTableName', async () => {
class MyListener extends PlSqlParserListener {
result = '';
enterTable_ref_list = (ctx) => {
this.result = ctx.getText().toLowerCase();
};
}
const listener = new MyListener();
plsql.listen(listener, parseTree);
expect(listener.result).toBe(expectTableName);
});
});

View File

@ -1,23 +0,0 @@
import { PLSQL } from 'src/parser/plsql';
describe('PLSQL Syntax Tests', () => {
const plsql = new PLSQL();
test('Test simple select Statement', () => {
const sql = 'select id,name from user1;';
const result = plsql.validate(sql);
expect(result.length).toBe(0);
});
test(`Test select, where, order by`, () => {
const sql = `
select eid, emp_last, mgr_id, reportlevel
from reports_to_101 r, auto a
where r.c1 = a.c2
order by reportlevel, eid
`;
const result = plsql.validate(sql);
expect(result.length).toBe(0);
});
});

View File

@ -1,31 +0,0 @@
import { PLSQL } from 'src/parser/plsql';
import { PlSqlParserVisitor } from 'src/lib/plsql/PlSqlParserVisitor';
describe('PLSQL Visitor Tests', () => {
const expectTableName = 'user1';
const sql = `select id,name,sex from ${expectTableName};`;
const plsql = new PLSQL();
const parseTree = plsql.parse(sql);
test('Visitor visitTable_ref_list', () => {
class MyVisitor extends PlSqlParserVisitor<string> {
defaultResult(): string {
return '';
}
aggregateResult(aggregate: string, nextResult: string): string {
return aggregate + nextResult;
}
visitProgram = (ctx) => {
return this.visitChildren(ctx);
};
visitTable_ref_list = (ctx) => {
return ctx.getText().toLowerCase();
};
}
const visitor = new MyVisitor();
const result = visitor.visit(parseTree);
expect(result).toBe(expectTableName);
});
});

View File

@ -75,3 +75,7 @@ UPDATE tablename SET columnname = a + b, (col1, col2) = (a+3, b+4);
VACUUM tablename (col1, col2); VACUUM tablename (col1, col2);
SELECT * FROM db.tbs GROUP BY (col1, col2) ORDER BY col3; SELECT * FROM db.tbs GROUP BY (col1, col2) ORDER BY col3;
TRUNCATE TABLE ;
TRUNCATE TABLE t1;

View File

@ -970,4 +970,36 @@ describe('Postgre SQL Syntax Suggestion', () => {
expect(suggestion3).not.toBeUndefined(); expect(suggestion3).not.toBeUndefined();
expect(suggestion3?.wordRanges.map((token) => token.text)).toEqual(['col3']); expect(suggestion3?.wordRanges.map((token) => token.text)).toEqual(['col3']);
}); });
test('TRUNCATE TABLE', () => {
const pos1: CaretPosition = {
lineNumber: 79,
column: 16,
};
const pos2: CaretPosition = {
lineNumber: 81,
column: 18,
};
const syntaxes1 = postgresql.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos1.lineNumber),
pos1
)?.syntax;
const syntaxes2 = postgresql.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos2.lineNumber),
pos2
)?.syntax;
const suggestion1 = syntaxes1?.find(
(syn) => syn.syntaxContextType === EntityContextType.TABLE
);
const suggestion2 = syntaxes2?.find(
(syn) => syn.syntaxContextType === EntityContextType.TABLE
);
expect(suggestion1).not.toBeUndefined();
expect(suggestion1?.wordRanges.map((token) => token.text)).toEqual([]);
expect(suggestion2).not.toBeUndefined();
expect(suggestion2?.wordRanges.map((token) => token.text)).toEqual(['t1']);
});
}); });

View File

@ -1,89 +0,0 @@
import { legacy_lexer, legacy_splitSql, legacy_cleanSql } from '../../src/utils';
import { Legacy_TokenType } from '../../src/utils/token';
describe('utils', () => {
test('split single sql', () => {
const sql = 'select id,name from user';
const result = legacy_splitSql(sql);
expect(result.length).toEqual(1);
});
test('split multiple sql', () => {
const sql = `-- a ;
select * from a;
/*
xxx
xxx
*/
select user from b`;
const result = legacy_splitSql(sql);
expect(result.length).toEqual(2);
});
test('split special quotation sql', () => {
const sql = `select regexp_replace('a', 'bc', 'xfe'feefe', '233');
select regexp_replace('abc', "fe", '233');`;
const result = legacy_splitSql(sql);
expect(result.length).toEqual(2);
});
// test nested bracket
test('split nested bracket sql', () => {
const sql = `WITH cte_sales_amounts (staff, sales, year)
AS
(
SELECT
first_name + ' ' + last_name,
SUM(quantity * list_price * (1 - discount)),
YEAR(order_date)
FROM
sales.orders o
INNER JOIN sales.order_items i ON i.order_id = o.order_id
INNER JOIN sales.staffs s ON s.staff_id = o.staff_id
)
SELECT staff, sales
FROM cte_sales_amounts
WHERE year = 2018;
SELECT * FROM table;`;
const result = legacy_splitSql(sql);
expect(result.length).toEqual(2);
});
test('lexer', () => {
const sql = `-- a ;
select * from a;
/*
xxx
xxx
*/
select user from b;`;
const result = legacy_lexer(sql);
expect(result.length).toEqual(4);
});
test('lexer for comments', () => {
const sql = `select * from a;--comments`;
const expected = `--comments`;
const result = legacy_lexer(sql);
const comments = result.find((token) => token.type === Legacy_TokenType.Comment);
expect(comments?.value).toEqual(expected);
});
test('cleanSql', () => {
const sql = `-- a ;
select * from a;
/*
xxx
xxx
*/
select user from b`;
const result = legacy_cleanSql(sql);
expect(result.indexOf('xxx')).toEqual(-1);
});
test('clean SQL white spaces', () => {
const sql = `
select * from a; `;
const expected = 'select * from a;';
const result = legacy_cleanSql(sql);
expect(result).toEqual(expected);
});
});