Compare commits
10 Commits
8baabd027b
...
bcf8880520
Author | SHA1 | Date | |
---|---|---|---|
|
bcf8880520 | ||
|
f6477b35e2 | ||
|
ffb575e317 | ||
|
a5c6d2e1f8 | ||
|
fe2b0fd9ea | ||
|
9ac6039146 | ||
|
63df067ae6 | ||
|
f1c3bbe17c | ||
|
a5387e4729 | ||
|
25358ec653 |
@ -1,6 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
. "$(dirname -- "$0")/_/husky.sh"
|
||||
|
||||
echo 'commitlint'
|
||||
|
||||
npx --no -- commitlint --edit
|
@ -1,4 +0,0 @@
|
||||
#!/usr/bin/env sh
|
||||
. "$(dirname -- "$0")/_/husky.sh"
|
||||
|
||||
npx lint-staged
|
14
CHANGELOG.md
14
CHANGELOG.md
@ -2,6 +2,20 @@
|
||||
|
||||
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
|
||||
|
||||
### [4.0.1](https://github.com/DTStack/dt-sql-parser/compare/v4.0.0...v4.0.1) (2024-04-28)
|
||||
|
||||
|
||||
### Features
|
||||
|
||||
* deprecate legacy APIs ([#304](https://github.com/DTStack/dt-sql-parser/issues/304)) ([f1c3bbe](https://github.com/DTStack/dt-sql-parser/commit/f1c3bbe17c2f48a55f8ac1664ce635323c88b579))
|
||||
|
||||
## [4.0.0](https://github.com/DTStack/dt-sql-parser/compare/v4.0.0-beta.4.15...v4.0.0) (2024-04-28)
|
||||
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* [#301](https://github.com/DTStack/dt-sql-parser/issues/301) pgsql truncate table suggestion ([#302](https://github.com/DTStack/dt-sql-parser/issues/302)) ([25358ec](https://github.com/DTStack/dt-sql-parser/commit/25358ec65353129933c0711212f2f90d854fa242))
|
||||
|
||||
## [4.0.0-beta.4.15](https://github.com/DTStack/dt-sql-parser/compare/v4.0.0-beta.4.14...v4.0.0-beta.4.15) (2024-04-21)
|
||||
|
||||
|
||||
|
16
package.json
16
package.json
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "dt-sql-parser",
|
||||
"version": "4.0.0-beta.4.15",
|
||||
"name": "lava-oushudb-dt-sql-parser",
|
||||
"version": "4.0.2-9",
|
||||
"authors": "DTStack Corporation",
|
||||
"description": "SQL Parsers for BigData, built with antlr4",
|
||||
"keywords": [
|
||||
@ -54,14 +54,14 @@
|
||||
"typescript": "^5.0.4",
|
||||
"yargs-parser": "^21.1.1"
|
||||
},
|
||||
"git repository": "https://github.com/DTStack/dt-sql-parser",
|
||||
"repository": "https://github.com/DTStack/dt-sql-parser",
|
||||
"publishConfig": {
|
||||
"registry": "https://registry.npmjs.org/"
|
||||
},
|
||||
"git repository": "https://git.yevpt.com/oushu/lava-fe-dt-sql-parser-oushudb",
|
||||
"repository": "https://git.yevpt.com/oushu/lava-fe-dt-sql-parser-oushudb",
|
||||
"dependencies": {
|
||||
"antlr4-c3": "3.3.7",
|
||||
"antlr4ng": "2.0.11"
|
||||
},
|
||||
"sideEffects": false
|
||||
"sideEffects": false,
|
||||
"volta": {
|
||||
"node": "14.21.3"
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -668,6 +668,8 @@ KW_EXTENDED : 'EXTENDED';
|
||||
KW_MAIN : 'MAIN';
|
||||
KW_SKIP_LOCKED : 'SKIP_LOCKED';
|
||||
KW_BUFFER_USAGE_LIMIT : 'BUFFER_USAGE_LIMIT';
|
||||
KW_DISTRIBUTED : 'DISTRIBUTED';
|
||||
KW_WRITABLE : 'WRITABLE';
|
||||
//
|
||||
|
||||
// IDENTIFIERS (4.1.1)
|
||||
|
@ -722,14 +722,32 @@ copy_generic_opt_arg_list_item
|
||||
;
|
||||
|
||||
createstmt
|
||||
: KW_CREATE opttemp? KW_TABLE opt_if_not_exists? table_name_create (
|
||||
OPEN_PAREN table_column_list? CLOSE_PAREN optinherit? optpartitionspec? table_access_method_clause? optwith? oncommitoption? opttablespace?
|
||||
: create_table_clause opt_if_not_exists? table_name_create (
|
||||
OPEN_PAREN table_column_list? CLOSE_PAREN create_table_options*
|
||||
| KW_OF any_name opttypedtableelementlist? optpartitionspec? table_access_method_clause? optwith? oncommitoption? opttablespace?
|
||||
| KW_PARTITION KW_OF qualified_name opttypedtableelementlist? partitionboundspec optpartitionspec? table_access_method_clause? optwith?
|
||||
oncommitoption? opttablespace?
|
||||
) # columnCreateTable
|
||||
;
|
||||
|
||||
create_table_options
|
||||
: optinherit
|
||||
| optpartitionspec
|
||||
| table_access_method_clause
|
||||
| optwith
|
||||
| oncommitoption
|
||||
| opttablespace
|
||||
| optdistributed
|
||||
;
|
||||
|
||||
create_table_clause
|
||||
: KW_CREATE opttemp? KW_WRITABLE? KW_EXTERNAL? KW_TABLE
|
||||
;
|
||||
|
||||
optdistributed
|
||||
: KW_DISTRIBUTED KW_BY OPEN_PAREN column_list CLOSE_PAREN
|
||||
;
|
||||
|
||||
opttemp
|
||||
: KW_TEMPORARY
|
||||
| KW_TEMP
|
||||
@ -997,7 +1015,7 @@ alterstatsstmt
|
||||
;
|
||||
|
||||
createasstmt
|
||||
: KW_CREATE opttemp? KW_TABLE opt_if_not_exists? create_as_target KW_AS selectstmt opt_with_data? # queryCreateTable
|
||||
: create_table_clause opt_if_not_exists? create_as_target KW_AS selectstmt opt_with_data? # queryCreateTable
|
||||
;
|
||||
|
||||
create_as_target
|
||||
@ -2838,7 +2856,7 @@ preparablestmt
|
||||
|
||||
executestmt
|
||||
: KW_EXECUTE name execute_param_clause?
|
||||
| KW_CREATE opttemp? KW_TABLE opt_if_not_exists? create_as_target KW_AS KW_EXECUTE name execute_param_clause? opt_with_data?
|
||||
| create_table_clause opt_if_not_exists? create_as_target KW_AS KW_EXECUTE name execute_param_clause? opt_with_data?
|
||||
;
|
||||
|
||||
execute_param_clause
|
||||
@ -5556,7 +5574,6 @@ plsql_unreserved_keyword
|
||||
| KW_SLICE
|
||||
| KW_SQLSTATE
|
||||
| KW_STACKED
|
||||
| KW_TABLE
|
||||
//| TABLE_NAME
|
||||
| KW_TYPE
|
||||
| KW_USE_COLUMN
|
||||
|
26
src/index.ts
26
src/index.ts
@ -1,13 +1,4 @@
|
||||
export {
|
||||
MySQL,
|
||||
FlinkSQL,
|
||||
SparkSQL,
|
||||
HiveSQL,
|
||||
PostgreSQL,
|
||||
TrinoSQL,
|
||||
ImpalaSQL,
|
||||
PLSQL,
|
||||
} from './parser';
|
||||
export { MySQL, FlinkSQL, SparkSQL, HiveSQL, PostgreSQL, TrinoSQL, ImpalaSQL } from './parser';
|
||||
|
||||
export {
|
||||
MySqlParserListener,
|
||||
@ -18,8 +9,6 @@ export {
|
||||
SparkSqlParserVisitor,
|
||||
HiveSqlParserListener,
|
||||
HiveSqlParserVisitor,
|
||||
PlSqlParserListener,
|
||||
PlSqlParserVisitor,
|
||||
PostgreSqlParserListener,
|
||||
PostgreSqlParserVisitor,
|
||||
TrinoSqlListener,
|
||||
@ -30,14 +19,6 @@ export {
|
||||
|
||||
export { EntityContextType } from './parser/common/types';
|
||||
|
||||
export {
|
||||
/**
|
||||
* @deprecated SyntaxContextType has been renamed to {@link EntityContextType},
|
||||
* It will be removed when the stable version is released.
|
||||
*/
|
||||
EntityContextType as SyntaxContextType,
|
||||
} from './parser/common/types';
|
||||
|
||||
export { StmtContextType } from './parser/common/entityCollector';
|
||||
|
||||
export type { CaretPosition, Suggestions, SyntaxSuggestion } from './parser/common/types';
|
||||
@ -47,8 +28,3 @@ export type { WordRange, TextSlice } from './parser/common/textAndWord';
|
||||
export type { SyntaxError, ParseError, ErrorListener } from './parser/common/parseErrorListener';
|
||||
|
||||
export type { StmtContext, EntityContext } from './parser/common/entityCollector';
|
||||
|
||||
/**
|
||||
* @deprecated Legacy utils will be removed when the stable version is released.
|
||||
*/
|
||||
export * from './utils';
|
||||
|
@ -7,9 +7,6 @@ export { MySqlParserVisitor } from './mysql/MySqlParserVisitor';
|
||||
export { HiveSqlParserListener } from './hive/HiveSqlParserListener';
|
||||
export { HiveSqlParserVisitor } from './hive/HiveSqlParserVisitor';
|
||||
|
||||
export { PlSqlParserListener } from './plsql/PlSqlParserListener';
|
||||
export { PlSqlParserVisitor } from './plsql/PlSqlParserVisitor';
|
||||
|
||||
export { SparkSqlParserListener } from './spark/SparkSqlParserListener';
|
||||
export { SparkSqlParserVisitor } from './spark/SparkSqlParserVisitor';
|
||||
|
||||
|
@ -1,8 +0,0 @@
|
||||
import { Lexer } from "antlr4ng";
|
||||
|
||||
export abstract class PlSqlBaseLexer extends Lexer {
|
||||
IsNewlineAtPos(pos: number): boolean {
|
||||
const la = this._input.LA(pos);
|
||||
return la == -1 || String.fromCharCode(la) == '\n';
|
||||
}
|
||||
}
|
@ -1,29 +0,0 @@
|
||||
import { TokenStream } from "antlr4ng";
|
||||
import {SQLParserBase} from "../SQLParserBase";
|
||||
export abstract class PlSqlBaseParser extends SQLParserBase {
|
||||
|
||||
private _isVersion10: boolean = false;
|
||||
private _isVersion12: boolean = true;
|
||||
|
||||
constructor(input: TokenStream) {
|
||||
super(input);
|
||||
this._isVersion10 = false;
|
||||
this._isVersion12 = true;
|
||||
}
|
||||
|
||||
isVersion10(): boolean {
|
||||
return this._isVersion10;
|
||||
}
|
||||
|
||||
isVersion12(): boolean {
|
||||
return this._isVersion12;
|
||||
}
|
||||
|
||||
setVersion10(value: boolean): void {
|
||||
this._isVersion10 = value;
|
||||
}
|
||||
|
||||
setVersion12(value: boolean): void {
|
||||
this._isVersion12 = value;
|
||||
}
|
||||
}
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
187028
src/lib/plsql/PlSqlParser.ts
187028
src/lib/plsql/PlSqlParser.ts
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
@ -546,48 +546,50 @@ KW_EXTENDED=545
|
||||
KW_MAIN=546
|
||||
KW_SKIP_LOCKED=547
|
||||
KW_BUFFER_USAGE_LIMIT=548
|
||||
Identifier=549
|
||||
QuotedIdentifier=550
|
||||
UnterminatedQuotedIdentifier=551
|
||||
InvalidQuotedIdentifier=552
|
||||
InvalidUnterminatedQuotedIdentifier=553
|
||||
UnicodeQuotedIdentifier=554
|
||||
UnterminatedUnicodeQuotedIdentifier=555
|
||||
InvalidUnicodeQuotedIdentifier=556
|
||||
InvalidUnterminatedUnicodeQuotedIdentifier=557
|
||||
StringConstant=558
|
||||
UnterminatedStringConstant=559
|
||||
UnicodeEscapeStringConstant=560
|
||||
UnterminatedUnicodeEscapeStringConstant=561
|
||||
BeginDollarStringConstant=562
|
||||
BinaryStringConstant=563
|
||||
UnterminatedBinaryStringConstant=564
|
||||
InvalidBinaryStringConstant=565
|
||||
InvalidUnterminatedBinaryStringConstant=566
|
||||
HexadecimalStringConstant=567
|
||||
UnterminatedHexadecimalStringConstant=568
|
||||
InvalidHexadecimalStringConstant=569
|
||||
InvalidUnterminatedHexadecimalStringConstant=570
|
||||
Integral=571
|
||||
NumericFail=572
|
||||
Numeric=573
|
||||
PLSQLVARIABLENAME=574
|
||||
PLSQLIDENTIFIER=575
|
||||
Whitespace=576
|
||||
Newline=577
|
||||
LineComment=578
|
||||
BlockComment=579
|
||||
UnterminatedBlockComment=580
|
||||
MetaCommand=581
|
||||
EndMetaCommand=582
|
||||
ErrorCharacter=583
|
||||
EscapeStringConstant=584
|
||||
UnterminatedEscapeStringConstant=585
|
||||
InvalidEscapeStringConstant=586
|
||||
InvalidUnterminatedEscapeStringConstant=587
|
||||
DollarText=588
|
||||
EndDollarStringConstant=589
|
||||
AfterEscapeStringConstantWithNewlineMode_Continued=590
|
||||
KW_DISTRIBUTED=549
|
||||
KW_WRITABLE=550
|
||||
Identifier=551
|
||||
QuotedIdentifier=552
|
||||
UnterminatedQuotedIdentifier=553
|
||||
InvalidQuotedIdentifier=554
|
||||
InvalidUnterminatedQuotedIdentifier=555
|
||||
UnicodeQuotedIdentifier=556
|
||||
UnterminatedUnicodeQuotedIdentifier=557
|
||||
InvalidUnicodeQuotedIdentifier=558
|
||||
InvalidUnterminatedUnicodeQuotedIdentifier=559
|
||||
StringConstant=560
|
||||
UnterminatedStringConstant=561
|
||||
UnicodeEscapeStringConstant=562
|
||||
UnterminatedUnicodeEscapeStringConstant=563
|
||||
BeginDollarStringConstant=564
|
||||
BinaryStringConstant=565
|
||||
UnterminatedBinaryStringConstant=566
|
||||
InvalidBinaryStringConstant=567
|
||||
InvalidUnterminatedBinaryStringConstant=568
|
||||
HexadecimalStringConstant=569
|
||||
UnterminatedHexadecimalStringConstant=570
|
||||
InvalidHexadecimalStringConstant=571
|
||||
InvalidUnterminatedHexadecimalStringConstant=572
|
||||
Integral=573
|
||||
NumericFail=574
|
||||
Numeric=575
|
||||
PLSQLVARIABLENAME=576
|
||||
PLSQLIDENTIFIER=577
|
||||
Whitespace=578
|
||||
Newline=579
|
||||
LineComment=580
|
||||
BlockComment=581
|
||||
UnterminatedBlockComment=582
|
||||
MetaCommand=583
|
||||
EndMetaCommand=584
|
||||
ErrorCharacter=585
|
||||
EscapeStringConstant=586
|
||||
UnterminatedEscapeStringConstant=587
|
||||
InvalidEscapeStringConstant=588
|
||||
InvalidUnterminatedEscapeStringConstant=589
|
||||
DollarText=590
|
||||
EndDollarStringConstant=591
|
||||
AfterEscapeStringConstantWithNewlineMode_Continued=592
|
||||
'$'=1
|
||||
'('=2
|
||||
')'=3
|
||||
@ -1134,5 +1136,7 @@ AfterEscapeStringConstantWithNewlineMode_Continued=590
|
||||
'MAIN'=546
|
||||
'SKIP_LOCKED'=547
|
||||
'BUFFER_USAGE_LIMIT'=548
|
||||
'\\\\'=582
|
||||
'\''=590
|
||||
'DISTRIBUTED'=549
|
||||
'WRITABLE'=550
|
||||
'\\\\'=584
|
||||
'\''=592
|
||||
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
@ -546,48 +546,50 @@ KW_EXTENDED=545
|
||||
KW_MAIN=546
|
||||
KW_SKIP_LOCKED=547
|
||||
KW_BUFFER_USAGE_LIMIT=548
|
||||
Identifier=549
|
||||
QuotedIdentifier=550
|
||||
UnterminatedQuotedIdentifier=551
|
||||
InvalidQuotedIdentifier=552
|
||||
InvalidUnterminatedQuotedIdentifier=553
|
||||
UnicodeQuotedIdentifier=554
|
||||
UnterminatedUnicodeQuotedIdentifier=555
|
||||
InvalidUnicodeQuotedIdentifier=556
|
||||
InvalidUnterminatedUnicodeQuotedIdentifier=557
|
||||
StringConstant=558
|
||||
UnterminatedStringConstant=559
|
||||
UnicodeEscapeStringConstant=560
|
||||
UnterminatedUnicodeEscapeStringConstant=561
|
||||
BeginDollarStringConstant=562
|
||||
BinaryStringConstant=563
|
||||
UnterminatedBinaryStringConstant=564
|
||||
InvalidBinaryStringConstant=565
|
||||
InvalidUnterminatedBinaryStringConstant=566
|
||||
HexadecimalStringConstant=567
|
||||
UnterminatedHexadecimalStringConstant=568
|
||||
InvalidHexadecimalStringConstant=569
|
||||
InvalidUnterminatedHexadecimalStringConstant=570
|
||||
Integral=571
|
||||
NumericFail=572
|
||||
Numeric=573
|
||||
PLSQLVARIABLENAME=574
|
||||
PLSQLIDENTIFIER=575
|
||||
Whitespace=576
|
||||
Newline=577
|
||||
LineComment=578
|
||||
BlockComment=579
|
||||
UnterminatedBlockComment=580
|
||||
MetaCommand=581
|
||||
EndMetaCommand=582
|
||||
ErrorCharacter=583
|
||||
EscapeStringConstant=584
|
||||
UnterminatedEscapeStringConstant=585
|
||||
InvalidEscapeStringConstant=586
|
||||
InvalidUnterminatedEscapeStringConstant=587
|
||||
DollarText=588
|
||||
EndDollarStringConstant=589
|
||||
AfterEscapeStringConstantWithNewlineMode_Continued=590
|
||||
KW_DISTRIBUTED=549
|
||||
KW_WRITABLE=550
|
||||
Identifier=551
|
||||
QuotedIdentifier=552
|
||||
UnterminatedQuotedIdentifier=553
|
||||
InvalidQuotedIdentifier=554
|
||||
InvalidUnterminatedQuotedIdentifier=555
|
||||
UnicodeQuotedIdentifier=556
|
||||
UnterminatedUnicodeQuotedIdentifier=557
|
||||
InvalidUnicodeQuotedIdentifier=558
|
||||
InvalidUnterminatedUnicodeQuotedIdentifier=559
|
||||
StringConstant=560
|
||||
UnterminatedStringConstant=561
|
||||
UnicodeEscapeStringConstant=562
|
||||
UnterminatedUnicodeEscapeStringConstant=563
|
||||
BeginDollarStringConstant=564
|
||||
BinaryStringConstant=565
|
||||
UnterminatedBinaryStringConstant=566
|
||||
InvalidBinaryStringConstant=567
|
||||
InvalidUnterminatedBinaryStringConstant=568
|
||||
HexadecimalStringConstant=569
|
||||
UnterminatedHexadecimalStringConstant=570
|
||||
InvalidHexadecimalStringConstant=571
|
||||
InvalidUnterminatedHexadecimalStringConstant=572
|
||||
Integral=573
|
||||
NumericFail=574
|
||||
Numeric=575
|
||||
PLSQLVARIABLENAME=576
|
||||
PLSQLIDENTIFIER=577
|
||||
Whitespace=578
|
||||
Newline=579
|
||||
LineComment=580
|
||||
BlockComment=581
|
||||
UnterminatedBlockComment=582
|
||||
MetaCommand=583
|
||||
EndMetaCommand=584
|
||||
ErrorCharacter=585
|
||||
EscapeStringConstant=586
|
||||
UnterminatedEscapeStringConstant=587
|
||||
InvalidEscapeStringConstant=588
|
||||
InvalidUnterminatedEscapeStringConstant=589
|
||||
DollarText=590
|
||||
EndDollarStringConstant=591
|
||||
AfterEscapeStringConstantWithNewlineMode_Continued=592
|
||||
'$'=1
|
||||
'('=2
|
||||
')'=3
|
||||
@ -1134,5 +1136,7 @@ AfterEscapeStringConstantWithNewlineMode_Continued=590
|
||||
'MAIN'=546
|
||||
'SKIP_LOCKED'=547
|
||||
'BUFFER_USAGE_LIMIT'=548
|
||||
'\\\\'=582
|
||||
'\''=590
|
||||
'DISTRIBUTED'=549
|
||||
'WRITABLE'=550
|
||||
'\\\\'=584
|
||||
'\''=592
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -103,6 +103,9 @@ import { Copy_generic_opt_argContext } from "./PostgreSqlParser.js";
|
||||
import { Copy_generic_opt_arg_listContext } from "./PostgreSqlParser.js";
|
||||
import { Copy_generic_opt_arg_list_itemContext } from "./PostgreSqlParser.js";
|
||||
import { ColumnCreateTableContext } from "./PostgreSqlParser.js";
|
||||
import { Create_table_optionsContext } from "./PostgreSqlParser.js";
|
||||
import { Create_table_clauseContext } from "./PostgreSqlParser.js";
|
||||
import { OptdistributedContext } from "./PostgreSqlParser.js";
|
||||
import { OpttempContext } from "./PostgreSqlParser.js";
|
||||
import { Table_column_listContext } from "./PostgreSqlParser.js";
|
||||
import { OpttableelementlistContext } from "./PostgreSqlParser.js";
|
||||
@ -1852,6 +1855,36 @@ export class PostgreSqlParserListener implements ParseTreeListener {
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitColumnCreateTable?: (ctx: ColumnCreateTableContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `PostgreSqlParser.create_table_options`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterCreate_table_options?: (ctx: Create_table_optionsContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `PostgreSqlParser.create_table_options`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitCreate_table_options?: (ctx: Create_table_optionsContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `PostgreSqlParser.create_table_clause`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterCreate_table_clause?: (ctx: Create_table_clauseContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `PostgreSqlParser.create_table_clause`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitCreate_table_clause?: (ctx: Create_table_clauseContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `PostgreSqlParser.optdistributed`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
enterOptdistributed?: (ctx: OptdistributedContext) => void;
|
||||
/**
|
||||
* Exit a parse tree produced by `PostgreSqlParser.optdistributed`.
|
||||
* @param ctx the parse tree
|
||||
*/
|
||||
exitOptdistributed?: (ctx: OptdistributedContext) => void;
|
||||
/**
|
||||
* Enter a parse tree produced by `PostgreSqlParser.opttemp`.
|
||||
* @param ctx the parse tree
|
||||
|
@ -103,6 +103,9 @@ import { Copy_generic_opt_argContext } from "./PostgreSqlParser.js";
|
||||
import { Copy_generic_opt_arg_listContext } from "./PostgreSqlParser.js";
|
||||
import { Copy_generic_opt_arg_list_itemContext } from "./PostgreSqlParser.js";
|
||||
import { ColumnCreateTableContext } from "./PostgreSqlParser.js";
|
||||
import { Create_table_optionsContext } from "./PostgreSqlParser.js";
|
||||
import { Create_table_clauseContext } from "./PostgreSqlParser.js";
|
||||
import { OptdistributedContext } from "./PostgreSqlParser.js";
|
||||
import { OpttempContext } from "./PostgreSqlParser.js";
|
||||
import { Table_column_listContext } from "./PostgreSqlParser.js";
|
||||
import { OpttableelementlistContext } from "./PostgreSqlParser.js";
|
||||
@ -1473,6 +1476,24 @@ export class PostgreSqlParserVisitor<Result> extends AbstractParseTreeVisitor<Re
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitColumnCreateTable?: (ctx: ColumnCreateTableContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `PostgreSqlParser.create_table_options`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitCreate_table_options?: (ctx: Create_table_optionsContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `PostgreSqlParser.create_table_clause`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitCreate_table_clause?: (ctx: Create_table_clauseContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `PostgreSqlParser.optdistributed`.
|
||||
* @param ctx the parse tree
|
||||
* @return the visitor result
|
||||
*/
|
||||
visitOptdistributed?: (ctx: OptdistributedContext) => Result;
|
||||
/**
|
||||
* Visit a parse tree produced by `PostgreSqlParser.opttemp`.
|
||||
* @param ctx the parse tree
|
||||
|
@ -1,5 +1,4 @@
|
||||
export { MySQL } from './mysql';
|
||||
export { PLSQL } from './plsql';
|
||||
export { HiveSQL } from './hive';
|
||||
export { FlinkSQL } from './flink';
|
||||
export { SparkSQL } from './spark';
|
||||
|
@ -1,37 +0,0 @@
|
||||
import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
|
||||
import { CandidatesCollection } from 'antlr4-c3';
|
||||
import { PlSqlLexer } from '../lib/plsql/PlSqlLexer';
|
||||
import { PlSqlParser, ProgramContext } from '../lib/plsql/PlSqlParser';
|
||||
import { BasicSQL } from './common/basicSQL';
|
||||
import { Suggestions } from './common/types';
|
||||
|
||||
export class PLSQL extends BasicSQL<PlSqlLexer, ProgramContext, PlSqlParser> {
|
||||
protected createLexerFromCharStream(charStreams: CharStream) {
|
||||
return new PlSqlLexer(charStreams);
|
||||
}
|
||||
|
||||
protected createParserFromTokenStream(tokenStream: CommonTokenStream) {
|
||||
return new PlSqlParser(tokenStream);
|
||||
}
|
||||
|
||||
protected preferredRules: Set<number> = new Set();
|
||||
|
||||
protected get splitListener() {
|
||||
return null as any;
|
||||
}
|
||||
|
||||
protected createEntityCollector(input: string, caretTokenIndex?: number) {
|
||||
return null as any;
|
||||
}
|
||||
|
||||
protected processCandidates(
|
||||
candidates: CandidatesCollection,
|
||||
allTokens: Token[],
|
||||
caretTokenIndex: number
|
||||
): Suggestions<Token> {
|
||||
return {
|
||||
syntax: [],
|
||||
keywords: [],
|
||||
};
|
||||
}
|
||||
}
|
@ -1,202 +0,0 @@
|
||||
import { Legacy_TokenType, Legacy_Token, Legacy_TokenReg } from './token';
|
||||
|
||||
/**
|
||||
* @param {String} sql
|
||||
* @deprecated use parser.createLexer() instead.
|
||||
*/
|
||||
function legacy_lexer(input: string): Legacy_Token[] {
|
||||
let current = 0;
|
||||
let line = 1;
|
||||
const tokens: Legacy_Token[] = [];
|
||||
|
||||
const extract = (
|
||||
currentChar: string,
|
||||
validator: RegExp,
|
||||
TokenType: Legacy_TokenType
|
||||
): Legacy_Token => {
|
||||
let value = '';
|
||||
const start = current;
|
||||
while (validator.test(currentChar)) {
|
||||
value += currentChar;
|
||||
currentChar = input[++current];
|
||||
}
|
||||
return {
|
||||
type: TokenType,
|
||||
start: start,
|
||||
end: current,
|
||||
lineNumber: line,
|
||||
value: value,
|
||||
};
|
||||
};
|
||||
|
||||
const matchFunction = () => {
|
||||
const bracketNum = [current];
|
||||
for (let i = current + 1; i < input.length; i++) {
|
||||
const currentChar = input[i];
|
||||
if (currentChar === '\n') {
|
||||
line++;
|
||||
}
|
||||
if (Legacy_TokenReg.LeftSmallBracket.test(currentChar)) {
|
||||
bracketNum.push(i);
|
||||
}
|
||||
if (Legacy_TokenReg.RightSmallBracket.test(currentChar)) {
|
||||
const start = bracketNum.pop();
|
||||
const end = i + 1;
|
||||
if (bracketNum.length === 0) {
|
||||
current = end;
|
||||
tokens.push({
|
||||
type: Legacy_TokenType.FunctionArguments,
|
||||
value: input.slice(start, end),
|
||||
start,
|
||||
lineNumber: line,
|
||||
end,
|
||||
});
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
const matchQuotation = (
|
||||
currentChar: string,
|
||||
validator: RegExp,
|
||||
TokenType: Legacy_TokenType
|
||||
) => {
|
||||
do {
|
||||
if (currentChar === '\n') {
|
||||
line++;
|
||||
}
|
||||
currentChar = input[++current];
|
||||
} while (!validator.test(currentChar));
|
||||
|
||||
++current;
|
||||
};
|
||||
|
||||
while (current < input.length) {
|
||||
let char = input[current];
|
||||
|
||||
if (char === '\n') {
|
||||
line++;
|
||||
current++;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Legacy_TokenReg.LeftSmallBracket.test(char)) {
|
||||
matchFunction();
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Legacy_TokenReg.BackQuotation.test(char)) {
|
||||
matchQuotation(char, Legacy_TokenReg.BackQuotation, Legacy_TokenType.BackQuotation);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Legacy_TokenReg.SingleQuotation.test(char)) {
|
||||
matchQuotation(char, Legacy_TokenReg.SingleQuotation, Legacy_TokenType.SingleQuotation);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Legacy_TokenReg.DoubleQuotation.test(char)) {
|
||||
matchQuotation(char, Legacy_TokenReg.DoubleQuotation, Legacy_TokenType.DoubleQuotation);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (char === '-' && input[current + 1] === '-') {
|
||||
let value = '';
|
||||
const start = current;
|
||||
|
||||
while (char !== '\n' && current < input.length) {
|
||||
value += char;
|
||||
char = input[++current];
|
||||
}
|
||||
tokens.push({
|
||||
type: Legacy_TokenType.Comment,
|
||||
value,
|
||||
start: start,
|
||||
lineNumber: line,
|
||||
end: current,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (char === '/' && input[current + 1] === '*') {
|
||||
let value = '';
|
||||
const start = current;
|
||||
const startLine = line;
|
||||
|
||||
while (!(char === '/' && input[current - 1] === '*')) {
|
||||
if (char === '\n') {
|
||||
line++;
|
||||
}
|
||||
value += char;
|
||||
char = input[++current];
|
||||
}
|
||||
value += char;
|
||||
++current;
|
||||
|
||||
tokens.push({
|
||||
type: Legacy_TokenType.Comment,
|
||||
value,
|
||||
start: start,
|
||||
lineNumber: startLine,
|
||||
end: current,
|
||||
});
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Legacy_TokenReg.StatementTerminator.test(char)) {
|
||||
const newToken = extract(
|
||||
char,
|
||||
Legacy_TokenReg.StatementTerminator,
|
||||
Legacy_TokenType.StatementTerminator
|
||||
);
|
||||
tokens.push(newToken);
|
||||
continue;
|
||||
}
|
||||
|
||||
current++;
|
||||
}
|
||||
return tokens;
|
||||
}
|
||||
|
||||
/**
|
||||
* split sql
|
||||
* @param {String} sql
|
||||
* @deprecated use parser.splitSQLByStatement() instead.
|
||||
*/
|
||||
function legacy_splitSql(sql: string) {
|
||||
const tokens = legacy_lexer(sql);
|
||||
const sqlArr = [];
|
||||
let startIndex = 0;
|
||||
tokens.forEach((ele: Legacy_Token) => {
|
||||
if (ele.type === Legacy_TokenType.StatementTerminator) {
|
||||
sqlArr.push(sql.slice(startIndex, ele.end));
|
||||
startIndex = ele.end;
|
||||
}
|
||||
});
|
||||
if (startIndex < sql.length) {
|
||||
sqlArr.push(sql.slice(startIndex));
|
||||
}
|
||||
return sqlArr;
|
||||
}
|
||||
|
||||
/**
|
||||
* clean comment
|
||||
* @param {String} sql
|
||||
* @deprecated will be removed in future.
|
||||
*/
|
||||
function legacy_cleanSql(sql: string) {
|
||||
sql = sql.trim();
|
||||
const tokens = legacy_lexer(sql);
|
||||
let resultSql = '';
|
||||
let startIndex = 0;
|
||||
tokens.forEach((ele: Legacy_Token) => {
|
||||
if (ele.type === Legacy_TokenType.Comment) {
|
||||
resultSql += sql.slice(startIndex, ele.start);
|
||||
startIndex = ele.end + 1;
|
||||
}
|
||||
});
|
||||
resultSql += sql.slice(startIndex);
|
||||
return resultSql;
|
||||
}
|
||||
export { legacy_cleanSql, legacy_splitSql, legacy_lexer };
|
@ -1,62 +0,0 @@
|
||||
/**
|
||||
* @deprecated will be removed in future.
|
||||
*/
|
||||
export enum Legacy_TokenType {
|
||||
/**
|
||||
* Enclosed in single/double/back quotation, `` Symbol
|
||||
* 'abc', "abc", `abc`
|
||||
*/
|
||||
SingleQuotation = 'SingleQuotation',
|
||||
DoubleQuotation = 'DoubleQuotation',
|
||||
BackQuotation = 'BackQuotation',
|
||||
/**
|
||||
* Language element type
|
||||
*/
|
||||
Comment = 'Comment',
|
||||
|
||||
/**
|
||||
* Statement
|
||||
*/
|
||||
StatementTerminator = 'StatementTerminator',
|
||||
/**
|
||||
* Others
|
||||
*/
|
||||
Error = 'Error',
|
||||
/**
|
||||
* Left small Bracket
|
||||
*/
|
||||
LeftSmallBracket = 'LeftSmallBracket',
|
||||
/**
|
||||
* Left small Bracket
|
||||
*/
|
||||
RightSmallBracket = 'RightSmallBracket',
|
||||
Comma = 'Comma',
|
||||
FunctionArguments = 'FunctionArguments',
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated will be removed in future.
|
||||
* Token object
|
||||
*/
|
||||
export interface Legacy_Token {
|
||||
type: Legacy_TokenType;
|
||||
value: string;
|
||||
start?: number;
|
||||
end: number;
|
||||
lineNumber: number;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* @deprecated will be removed in future.
|
||||
* Token recognition rules
|
||||
*/
|
||||
export const Legacy_TokenReg = {
|
||||
[Legacy_TokenType.StatementTerminator]: /[;]/,
|
||||
[Legacy_TokenType.SingleQuotation]: /['|\']/,
|
||||
[Legacy_TokenType.DoubleQuotation]: /["]/,
|
||||
[Legacy_TokenType.BackQuotation]: /[`]/,
|
||||
[Legacy_TokenType.LeftSmallBracket]: /[(]/,
|
||||
[Legacy_TokenType.RightSmallBracket]: /[)]/,
|
||||
[Legacy_TokenType.Comma]: /[,]/,
|
||||
};
|
@ -1,12 +0,0 @@
|
||||
import { PLSQL } from 'src/parser/plsql';
|
||||
|
||||
describe('PLSQL Lexer tests', () => {
|
||||
const plsql = new PLSQL();
|
||||
|
||||
const sql = 'select id,name,sex from user1;';
|
||||
const tokens = plsql.getAllTokens(sql);
|
||||
|
||||
test('token counts', () => {
|
||||
expect(tokens.length).toBe(12);
|
||||
});
|
||||
});
|
@ -1,24 +0,0 @@
|
||||
import { PLSQL } from 'src/parser/plsql';
|
||||
import { PlSqlParserListener } from 'src/lib/plsql/PlSqlParserListener';
|
||||
|
||||
describe('PLSQL Listener Tests', () => {
|
||||
const expectTableName = 'user1';
|
||||
const sql = `select id,name,sex from ${expectTableName};`;
|
||||
const plsql = new PLSQL();
|
||||
|
||||
const parseTree = plsql.parse(sql);
|
||||
|
||||
test('Listener enterTableName', async () => {
|
||||
class MyListener extends PlSqlParserListener {
|
||||
result = '';
|
||||
|
||||
enterTable_ref_list = (ctx) => {
|
||||
this.result = ctx.getText().toLowerCase();
|
||||
};
|
||||
}
|
||||
const listener = new MyListener();
|
||||
|
||||
plsql.listen(listener, parseTree);
|
||||
expect(listener.result).toBe(expectTableName);
|
||||
});
|
||||
});
|
@ -1,23 +0,0 @@
|
||||
import { PLSQL } from 'src/parser/plsql';
|
||||
|
||||
describe('PLSQL Syntax Tests', () => {
|
||||
const plsql = new PLSQL();
|
||||
|
||||
test('Test simple select Statement', () => {
|
||||
const sql = 'select id,name from user1;';
|
||||
const result = plsql.validate(sql);
|
||||
|
||||
expect(result.length).toBe(0);
|
||||
});
|
||||
|
||||
test(`Test select, where, order by`, () => {
|
||||
const sql = `
|
||||
select eid, emp_last, mgr_id, reportlevel
|
||||
from reports_to_101 r, auto a
|
||||
where r.c1 = a.c2
|
||||
order by reportlevel, eid
|
||||
`;
|
||||
const result = plsql.validate(sql);
|
||||
expect(result.length).toBe(0);
|
||||
});
|
||||
});
|
@ -1,31 +0,0 @@
|
||||
import { PLSQL } from 'src/parser/plsql';
|
||||
import { PlSqlParserVisitor } from 'src/lib/plsql/PlSqlParserVisitor';
|
||||
|
||||
describe('PLSQL Visitor Tests', () => {
|
||||
const expectTableName = 'user1';
|
||||
const sql = `select id,name,sex from ${expectTableName};`;
|
||||
const plsql = new PLSQL();
|
||||
|
||||
const parseTree = plsql.parse(sql);
|
||||
|
||||
test('Visitor visitTable_ref_list', () => {
|
||||
class MyVisitor extends PlSqlParserVisitor<string> {
|
||||
defaultResult(): string {
|
||||
return '';
|
||||
}
|
||||
aggregateResult(aggregate: string, nextResult: string): string {
|
||||
return aggregate + nextResult;
|
||||
}
|
||||
visitProgram = (ctx) => {
|
||||
return this.visitChildren(ctx);
|
||||
};
|
||||
visitTable_ref_list = (ctx) => {
|
||||
return ctx.getText().toLowerCase();
|
||||
};
|
||||
}
|
||||
const visitor = new MyVisitor();
|
||||
const result = visitor.visit(parseTree);
|
||||
|
||||
expect(result).toBe(expectTableName);
|
||||
});
|
||||
});
|
@ -75,3 +75,7 @@ UPDATE tablename SET columnname = a + b, (col1, col2) = (a+3, b+4);
|
||||
VACUUM tablename (col1, col2);
|
||||
|
||||
SELECT * FROM db.tbs GROUP BY (col1, col2) ORDER BY col3;
|
||||
|
||||
TRUNCATE TABLE ;
|
||||
|
||||
TRUNCATE TABLE t1;
|
||||
|
@ -970,4 +970,36 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
expect(suggestion3).not.toBeUndefined();
|
||||
expect(suggestion3?.wordRanges.map((token) => token.text)).toEqual(['col3']);
|
||||
});
|
||||
|
||||
test('TRUNCATE TABLE', () => {
|
||||
const pos1: CaretPosition = {
|
||||
lineNumber: 79,
|
||||
column: 16,
|
||||
};
|
||||
const pos2: CaretPosition = {
|
||||
lineNumber: 81,
|
||||
column: 18,
|
||||
};
|
||||
|
||||
const syntaxes1 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos1.lineNumber),
|
||||
pos1
|
||||
)?.syntax;
|
||||
const syntaxes2 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos2.lineNumber),
|
||||
pos2
|
||||
)?.syntax;
|
||||
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion1).not.toBeUndefined();
|
||||
expect(suggestion1?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
expect(suggestion2).not.toBeUndefined();
|
||||
expect(suggestion2?.wordRanges.map((token) => token.text)).toEqual(['t1']);
|
||||
});
|
||||
});
|
||||
|
@ -1,89 +0,0 @@
|
||||
import { legacy_lexer, legacy_splitSql, legacy_cleanSql } from '../../src/utils';
|
||||
import { Legacy_TokenType } from '../../src/utils/token';
|
||||
|
||||
describe('utils', () => {
|
||||
test('split single sql', () => {
|
||||
const sql = 'select id,name from user';
|
||||
const result = legacy_splitSql(sql);
|
||||
expect(result.length).toEqual(1);
|
||||
});
|
||||
|
||||
test('split multiple sql', () => {
|
||||
const sql = `-- a ;
|
||||
select * from a;
|
||||
/*
|
||||
xxx
|
||||
xxx
|
||||
*/
|
||||
select user from b`;
|
||||
const result = legacy_splitSql(sql);
|
||||
expect(result.length).toEqual(2);
|
||||
});
|
||||
|
||||
test('split special quotation sql', () => {
|
||||
const sql = `select regexp_replace('a', 'bc', 'xfe'feefe', '233');
|
||||
select regexp_replace('abc', "fe", '233');`;
|
||||
const result = legacy_splitSql(sql);
|
||||
expect(result.length).toEqual(2);
|
||||
});
|
||||
|
||||
// test nested bracket
|
||||
test('split nested bracket sql', () => {
|
||||
const sql = `WITH cte_sales_amounts (staff, sales, year)
|
||||
AS
|
||||
(
|
||||
SELECT
|
||||
first_name + ' ' + last_name,
|
||||
SUM(quantity * list_price * (1 - discount)),
|
||||
YEAR(order_date)
|
||||
FROM
|
||||
sales.orders o
|
||||
INNER JOIN sales.order_items i ON i.order_id = o.order_id
|
||||
INNER JOIN sales.staffs s ON s.staff_id = o.staff_id
|
||||
)
|
||||
SELECT staff, sales
|
||||
FROM cte_sales_amounts
|
||||
WHERE year = 2018;
|
||||
SELECT * FROM table;`;
|
||||
const result = legacy_splitSql(sql);
|
||||
expect(result.length).toEqual(2);
|
||||
});
|
||||
|
||||
test('lexer', () => {
|
||||
const sql = `-- a ;
|
||||
select * from a;
|
||||
/*
|
||||
xxx
|
||||
xxx
|
||||
*/
|
||||
select user from b;`;
|
||||
const result = legacy_lexer(sql);
|
||||
expect(result.length).toEqual(4);
|
||||
});
|
||||
test('lexer for comments', () => {
|
||||
const sql = `select * from a;--comments`;
|
||||
const expected = `--comments`;
|
||||
const result = legacy_lexer(sql);
|
||||
const comments = result.find((token) => token.type === Legacy_TokenType.Comment);
|
||||
expect(comments?.value).toEqual(expected);
|
||||
});
|
||||
test('cleanSql', () => {
|
||||
const sql = `-- a ;
|
||||
select * from a;
|
||||
/*
|
||||
xxx
|
||||
xxx
|
||||
*/
|
||||
select user from b`;
|
||||
const result = legacy_cleanSql(sql);
|
||||
expect(result.indexOf('xxx')).toEqual(-1);
|
||||
});
|
||||
|
||||
test('clean SQL white spaces', () => {
|
||||
const sql = `
|
||||
select * from a; `;
|
||||
const expected = 'select * from a;';
|
||||
const result = legacy_cleanSql(sql);
|
||||
expect(result).toEqual(expected);
|
||||
});
|
||||
});
|
Loading…
Reference in New Issue
Block a user