Compare commits

..

No commits in common. "bcf888052045d387e4f39cc9101c0f279315bb90" and "8baabd027bc1cc60f5a90a99c7687ad723ad39fe" have entirely different histories.

39 changed files with 266766 additions and 19255 deletions

6
.husky/commit-msg Executable file
View File

@ -0,0 +1,6 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
echo 'commitlint'
npx --no -- commitlint --edit

4
.husky/pre-commit Executable file
View File

@ -0,0 +1,4 @@
#!/usr/bin/env sh
. "$(dirname -- "$0")/_/husky.sh"
npx lint-staged

View File

@ -2,20 +2,6 @@
All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines.
### [4.0.1](https://github.com/DTStack/dt-sql-parser/compare/v4.0.0...v4.0.1) (2024-04-28)
### Features
* deprecate legacy APIs ([#304](https://github.com/DTStack/dt-sql-parser/issues/304)) ([f1c3bbe](https://github.com/DTStack/dt-sql-parser/commit/f1c3bbe17c2f48a55f8ac1664ce635323c88b579))
## [4.0.0](https://github.com/DTStack/dt-sql-parser/compare/v4.0.0-beta.4.15...v4.0.0) (2024-04-28)
### Bug Fixes
* [#301](https://github.com/DTStack/dt-sql-parser/issues/301) pgsql truncate table suggestion ([#302](https://github.com/DTStack/dt-sql-parser/issues/302)) ([25358ec](https://github.com/DTStack/dt-sql-parser/commit/25358ec65353129933c0711212f2f90d854fa242))
## [4.0.0-beta.4.15](https://github.com/DTStack/dt-sql-parser/compare/v4.0.0-beta.4.14...v4.0.0-beta.4.15) (2024-04-21) ## [4.0.0-beta.4.15](https://github.com/DTStack/dt-sql-parser/compare/v4.0.0-beta.4.14...v4.0.0-beta.4.15) (2024-04-21)

View File

@ -1,6 +1,6 @@
{ {
"name": "lava-oushudb-dt-sql-parser", "name": "dt-sql-parser",
"version": "4.0.2-9", "version": "4.0.0-beta.4.15",
"authors": "DTStack Corporation", "authors": "DTStack Corporation",
"description": "SQL Parsers for BigData, built with antlr4", "description": "SQL Parsers for BigData, built with antlr4",
"keywords": [ "keywords": [
@ -54,14 +54,14 @@
"typescript": "^5.0.4", "typescript": "^5.0.4",
"yargs-parser": "^21.1.1" "yargs-parser": "^21.1.1"
}, },
"git repository": "https://git.yevpt.com/oushu/lava-fe-dt-sql-parser-oushudb", "git repository": "https://github.com/DTStack/dt-sql-parser",
"repository": "https://git.yevpt.com/oushu/lava-fe-dt-sql-parser-oushudb", "repository": "https://github.com/DTStack/dt-sql-parser",
"publishConfig": {
"registry": "https://registry.npmjs.org/"
},
"dependencies": { "dependencies": {
"antlr4-c3": "3.3.7", "antlr4-c3": "3.3.7",
"antlr4ng": "2.0.11" "antlr4ng": "2.0.11"
}, },
"sideEffects": false, "sideEffects": false
"volta": {
"node": "14.21.3"
}
} }

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -668,8 +668,6 @@ KW_EXTENDED : 'EXTENDED';
KW_MAIN : 'MAIN'; KW_MAIN : 'MAIN';
KW_SKIP_LOCKED : 'SKIP_LOCKED'; KW_SKIP_LOCKED : 'SKIP_LOCKED';
KW_BUFFER_USAGE_LIMIT : 'BUFFER_USAGE_LIMIT'; KW_BUFFER_USAGE_LIMIT : 'BUFFER_USAGE_LIMIT';
KW_DISTRIBUTED : 'DISTRIBUTED';
KW_WRITABLE : 'WRITABLE';
// //
// IDENTIFIERS (4.1.1) // IDENTIFIERS (4.1.1)

View File

@ -722,32 +722,14 @@ copy_generic_opt_arg_list_item
; ;
createstmt createstmt
: create_table_clause opt_if_not_exists? table_name_create ( : KW_CREATE opttemp? KW_TABLE opt_if_not_exists? table_name_create (
OPEN_PAREN table_column_list? CLOSE_PAREN create_table_options* OPEN_PAREN table_column_list? CLOSE_PAREN optinherit? optpartitionspec? table_access_method_clause? optwith? oncommitoption? opttablespace?
| KW_OF any_name opttypedtableelementlist? optpartitionspec? table_access_method_clause? optwith? oncommitoption? opttablespace? | KW_OF any_name opttypedtableelementlist? optpartitionspec? table_access_method_clause? optwith? oncommitoption? opttablespace?
| KW_PARTITION KW_OF qualified_name opttypedtableelementlist? partitionboundspec optpartitionspec? table_access_method_clause? optwith? | KW_PARTITION KW_OF qualified_name opttypedtableelementlist? partitionboundspec optpartitionspec? table_access_method_clause? optwith?
oncommitoption? opttablespace? oncommitoption? opttablespace?
) # columnCreateTable ) # columnCreateTable
; ;
create_table_options
: optinherit
| optpartitionspec
| table_access_method_clause
| optwith
| oncommitoption
| opttablespace
| optdistributed
;
create_table_clause
: KW_CREATE opttemp? KW_WRITABLE? KW_EXTERNAL? KW_TABLE
;
optdistributed
: KW_DISTRIBUTED KW_BY OPEN_PAREN column_list CLOSE_PAREN
;
opttemp opttemp
: KW_TEMPORARY : KW_TEMPORARY
| KW_TEMP | KW_TEMP
@ -1015,7 +997,7 @@ alterstatsstmt
; ;
createasstmt createasstmt
: create_table_clause opt_if_not_exists? create_as_target KW_AS selectstmt opt_with_data? # queryCreateTable : KW_CREATE opttemp? KW_TABLE opt_if_not_exists? create_as_target KW_AS selectstmt opt_with_data? # queryCreateTable
; ;
create_as_target create_as_target
@ -2856,7 +2838,7 @@ preparablestmt
executestmt executestmt
: KW_EXECUTE name execute_param_clause? : KW_EXECUTE name execute_param_clause?
| create_table_clause opt_if_not_exists? create_as_target KW_AS KW_EXECUTE name execute_param_clause? opt_with_data? | KW_CREATE opttemp? KW_TABLE opt_if_not_exists? create_as_target KW_AS KW_EXECUTE name execute_param_clause? opt_with_data?
; ;
execute_param_clause execute_param_clause
@ -5574,6 +5556,7 @@ plsql_unreserved_keyword
| KW_SLICE | KW_SLICE
| KW_SQLSTATE | KW_SQLSTATE
| KW_STACKED | KW_STACKED
| KW_TABLE
//| TABLE_NAME //| TABLE_NAME
| KW_TYPE | KW_TYPE
| KW_USE_COLUMN | KW_USE_COLUMN

View File

@ -1,4 +1,13 @@
export { MySQL, FlinkSQL, SparkSQL, HiveSQL, PostgreSQL, TrinoSQL, ImpalaSQL } from './parser'; export {
MySQL,
FlinkSQL,
SparkSQL,
HiveSQL,
PostgreSQL,
TrinoSQL,
ImpalaSQL,
PLSQL,
} from './parser';
export { export {
MySqlParserListener, MySqlParserListener,
@ -9,6 +18,8 @@ export {
SparkSqlParserVisitor, SparkSqlParserVisitor,
HiveSqlParserListener, HiveSqlParserListener,
HiveSqlParserVisitor, HiveSqlParserVisitor,
PlSqlParserListener,
PlSqlParserVisitor,
PostgreSqlParserListener, PostgreSqlParserListener,
PostgreSqlParserVisitor, PostgreSqlParserVisitor,
TrinoSqlListener, TrinoSqlListener,
@ -19,6 +30,14 @@ export {
export { EntityContextType } from './parser/common/types'; export { EntityContextType } from './parser/common/types';
export {
/**
* @deprecated SyntaxContextType has been renamed to {@link EntityContextType},
* It will be removed when the stable version is released.
*/
EntityContextType as SyntaxContextType,
} from './parser/common/types';
export { StmtContextType } from './parser/common/entityCollector'; export { StmtContextType } from './parser/common/entityCollector';
export type { CaretPosition, Suggestions, SyntaxSuggestion } from './parser/common/types'; export type { CaretPosition, Suggestions, SyntaxSuggestion } from './parser/common/types';
@ -28,3 +47,8 @@ export type { WordRange, TextSlice } from './parser/common/textAndWord';
export type { SyntaxError, ParseError, ErrorListener } from './parser/common/parseErrorListener'; export type { SyntaxError, ParseError, ErrorListener } from './parser/common/parseErrorListener';
export type { StmtContext, EntityContext } from './parser/common/entityCollector'; export type { StmtContext, EntityContext } from './parser/common/entityCollector';
/**
* @deprecated Legacy utils will be removed when the stable version is released.
*/
export * from './utils';

View File

@ -7,6 +7,9 @@ export { MySqlParserVisitor } from './mysql/MySqlParserVisitor';
export { HiveSqlParserListener } from './hive/HiveSqlParserListener'; export { HiveSqlParserListener } from './hive/HiveSqlParserListener';
export { HiveSqlParserVisitor } from './hive/HiveSqlParserVisitor'; export { HiveSqlParserVisitor } from './hive/HiveSqlParserVisitor';
export { PlSqlParserListener } from './plsql/PlSqlParserListener';
export { PlSqlParserVisitor } from './plsql/PlSqlParserVisitor';
export { SparkSqlParserListener } from './spark/SparkSqlParserListener'; export { SparkSqlParserListener } from './spark/SparkSqlParserListener';
export { SparkSqlParserVisitor } from './spark/SparkSqlParserVisitor'; export { SparkSqlParserVisitor } from './spark/SparkSqlParserVisitor';

View File

@ -0,0 +1,8 @@
import { Lexer } from "antlr4ng";
export abstract class PlSqlBaseLexer extends Lexer {
IsNewlineAtPos(pos: number): boolean {
const la = this._input.LA(pos);
return la == -1 || String.fromCharCode(la) == '\n';
}
}

View File

@ -0,0 +1,29 @@
import { TokenStream } from "antlr4ng";
import {SQLParserBase} from "../SQLParserBase";
export abstract class PlSqlBaseParser extends SQLParserBase {
private _isVersion10: boolean = false;
private _isVersion12: boolean = true;
constructor(input: TokenStream) {
super(input);
this._isVersion10 = false;
this._isVersion12 = true;
}
isVersion10(): boolean {
return this._isVersion10;
}
isVersion12(): boolean {
return this._isVersion12;
}
setVersion10(value: boolean): void {
this._isVersion10 = value;
}
setVersion12(value: boolean): void {
this._isVersion12 = value;
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

16468
src/lib/plsql/PlSqlLexer.ts Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

187028
src/lib/plsql/PlSqlParser.ts Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@ -546,50 +546,48 @@ KW_EXTENDED=545
KW_MAIN=546 KW_MAIN=546
KW_SKIP_LOCKED=547 KW_SKIP_LOCKED=547
KW_BUFFER_USAGE_LIMIT=548 KW_BUFFER_USAGE_LIMIT=548
KW_DISTRIBUTED=549 Identifier=549
KW_WRITABLE=550 QuotedIdentifier=550
Identifier=551 UnterminatedQuotedIdentifier=551
QuotedIdentifier=552 InvalidQuotedIdentifier=552
UnterminatedQuotedIdentifier=553 InvalidUnterminatedQuotedIdentifier=553
InvalidQuotedIdentifier=554 UnicodeQuotedIdentifier=554
InvalidUnterminatedQuotedIdentifier=555 UnterminatedUnicodeQuotedIdentifier=555
UnicodeQuotedIdentifier=556 InvalidUnicodeQuotedIdentifier=556
UnterminatedUnicodeQuotedIdentifier=557 InvalidUnterminatedUnicodeQuotedIdentifier=557
InvalidUnicodeQuotedIdentifier=558 StringConstant=558
InvalidUnterminatedUnicodeQuotedIdentifier=559 UnterminatedStringConstant=559
StringConstant=560 UnicodeEscapeStringConstant=560
UnterminatedStringConstant=561 UnterminatedUnicodeEscapeStringConstant=561
UnicodeEscapeStringConstant=562 BeginDollarStringConstant=562
UnterminatedUnicodeEscapeStringConstant=563 BinaryStringConstant=563
BeginDollarStringConstant=564 UnterminatedBinaryStringConstant=564
BinaryStringConstant=565 InvalidBinaryStringConstant=565
UnterminatedBinaryStringConstant=566 InvalidUnterminatedBinaryStringConstant=566
InvalidBinaryStringConstant=567 HexadecimalStringConstant=567
InvalidUnterminatedBinaryStringConstant=568 UnterminatedHexadecimalStringConstant=568
HexadecimalStringConstant=569 InvalidHexadecimalStringConstant=569
UnterminatedHexadecimalStringConstant=570 InvalidUnterminatedHexadecimalStringConstant=570
InvalidHexadecimalStringConstant=571 Integral=571
InvalidUnterminatedHexadecimalStringConstant=572 NumericFail=572
Integral=573 Numeric=573
NumericFail=574 PLSQLVARIABLENAME=574
Numeric=575 PLSQLIDENTIFIER=575
PLSQLVARIABLENAME=576 Whitespace=576
PLSQLIDENTIFIER=577 Newline=577
Whitespace=578 LineComment=578
Newline=579 BlockComment=579
LineComment=580 UnterminatedBlockComment=580
BlockComment=581 MetaCommand=581
UnterminatedBlockComment=582 EndMetaCommand=582
MetaCommand=583 ErrorCharacter=583
EndMetaCommand=584 EscapeStringConstant=584
ErrorCharacter=585 UnterminatedEscapeStringConstant=585
EscapeStringConstant=586 InvalidEscapeStringConstant=586
UnterminatedEscapeStringConstant=587 InvalidUnterminatedEscapeStringConstant=587
InvalidEscapeStringConstant=588 DollarText=588
InvalidUnterminatedEscapeStringConstant=589 EndDollarStringConstant=589
DollarText=590 AfterEscapeStringConstantWithNewlineMode_Continued=590
EndDollarStringConstant=591
AfterEscapeStringConstantWithNewlineMode_Continued=592
'$'=1 '$'=1
'('=2 '('=2
')'=3 ')'=3
@ -1136,7 +1134,5 @@ AfterEscapeStringConstantWithNewlineMode_Continued=592
'MAIN'=546 'MAIN'=546
'SKIP_LOCKED'=547 'SKIP_LOCKED'=547
'BUFFER_USAGE_LIMIT'=548 'BUFFER_USAGE_LIMIT'=548
'DISTRIBUTED'=549 '\\\\'=582
'WRITABLE'=550 '\''=590
'\\\\'=584
'\''=592

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

View File

@ -546,50 +546,48 @@ KW_EXTENDED=545
KW_MAIN=546 KW_MAIN=546
KW_SKIP_LOCKED=547 KW_SKIP_LOCKED=547
KW_BUFFER_USAGE_LIMIT=548 KW_BUFFER_USAGE_LIMIT=548
KW_DISTRIBUTED=549 Identifier=549
KW_WRITABLE=550 QuotedIdentifier=550
Identifier=551 UnterminatedQuotedIdentifier=551
QuotedIdentifier=552 InvalidQuotedIdentifier=552
UnterminatedQuotedIdentifier=553 InvalidUnterminatedQuotedIdentifier=553
InvalidQuotedIdentifier=554 UnicodeQuotedIdentifier=554
InvalidUnterminatedQuotedIdentifier=555 UnterminatedUnicodeQuotedIdentifier=555
UnicodeQuotedIdentifier=556 InvalidUnicodeQuotedIdentifier=556
UnterminatedUnicodeQuotedIdentifier=557 InvalidUnterminatedUnicodeQuotedIdentifier=557
InvalidUnicodeQuotedIdentifier=558 StringConstant=558
InvalidUnterminatedUnicodeQuotedIdentifier=559 UnterminatedStringConstant=559
StringConstant=560 UnicodeEscapeStringConstant=560
UnterminatedStringConstant=561 UnterminatedUnicodeEscapeStringConstant=561
UnicodeEscapeStringConstant=562 BeginDollarStringConstant=562
UnterminatedUnicodeEscapeStringConstant=563 BinaryStringConstant=563
BeginDollarStringConstant=564 UnterminatedBinaryStringConstant=564
BinaryStringConstant=565 InvalidBinaryStringConstant=565
UnterminatedBinaryStringConstant=566 InvalidUnterminatedBinaryStringConstant=566
InvalidBinaryStringConstant=567 HexadecimalStringConstant=567
InvalidUnterminatedBinaryStringConstant=568 UnterminatedHexadecimalStringConstant=568
HexadecimalStringConstant=569 InvalidHexadecimalStringConstant=569
UnterminatedHexadecimalStringConstant=570 InvalidUnterminatedHexadecimalStringConstant=570
InvalidHexadecimalStringConstant=571 Integral=571
InvalidUnterminatedHexadecimalStringConstant=572 NumericFail=572
Integral=573 Numeric=573
NumericFail=574 PLSQLVARIABLENAME=574
Numeric=575 PLSQLIDENTIFIER=575
PLSQLVARIABLENAME=576 Whitespace=576
PLSQLIDENTIFIER=577 Newline=577
Whitespace=578 LineComment=578
Newline=579 BlockComment=579
LineComment=580 UnterminatedBlockComment=580
BlockComment=581 MetaCommand=581
UnterminatedBlockComment=582 EndMetaCommand=582
MetaCommand=583 ErrorCharacter=583
EndMetaCommand=584 EscapeStringConstant=584
ErrorCharacter=585 UnterminatedEscapeStringConstant=585
EscapeStringConstant=586 InvalidEscapeStringConstant=586
UnterminatedEscapeStringConstant=587 InvalidUnterminatedEscapeStringConstant=587
InvalidEscapeStringConstant=588 DollarText=588
InvalidUnterminatedEscapeStringConstant=589 EndDollarStringConstant=589
DollarText=590 AfterEscapeStringConstantWithNewlineMode_Continued=590
EndDollarStringConstant=591
AfterEscapeStringConstantWithNewlineMode_Continued=592
'$'=1 '$'=1
'('=2 '('=2
')'=3 ')'=3
@ -1136,7 +1134,5 @@ AfterEscapeStringConstantWithNewlineMode_Continued=592
'MAIN'=546 'MAIN'=546
'SKIP_LOCKED'=547 'SKIP_LOCKED'=547
'BUFFER_USAGE_LIMIT'=548 'BUFFER_USAGE_LIMIT'=548
'DISTRIBUTED'=549 '\\\\'=582
'WRITABLE'=550 '\''=590
'\\\\'=584
'\''=592

File diff suppressed because it is too large Load Diff

View File

@ -103,9 +103,6 @@ import { Copy_generic_opt_argContext } from "./PostgreSqlParser.js";
import { Copy_generic_opt_arg_listContext } from "./PostgreSqlParser.js"; import { Copy_generic_opt_arg_listContext } from "./PostgreSqlParser.js";
import { Copy_generic_opt_arg_list_itemContext } from "./PostgreSqlParser.js"; import { Copy_generic_opt_arg_list_itemContext } from "./PostgreSqlParser.js";
import { ColumnCreateTableContext } from "./PostgreSqlParser.js"; import { ColumnCreateTableContext } from "./PostgreSqlParser.js";
import { Create_table_optionsContext } from "./PostgreSqlParser.js";
import { Create_table_clauseContext } from "./PostgreSqlParser.js";
import { OptdistributedContext } from "./PostgreSqlParser.js";
import { OpttempContext } from "./PostgreSqlParser.js"; import { OpttempContext } from "./PostgreSqlParser.js";
import { Table_column_listContext } from "./PostgreSqlParser.js"; import { Table_column_listContext } from "./PostgreSqlParser.js";
import { OpttableelementlistContext } from "./PostgreSqlParser.js"; import { OpttableelementlistContext } from "./PostgreSqlParser.js";
@ -1855,36 +1852,6 @@ export class PostgreSqlParserListener implements ParseTreeListener {
* @param ctx the parse tree * @param ctx the parse tree
*/ */
exitColumnCreateTable?: (ctx: ColumnCreateTableContext) => void; exitColumnCreateTable?: (ctx: ColumnCreateTableContext) => void;
/**
* Enter a parse tree produced by `PostgreSqlParser.create_table_options`.
* @param ctx the parse tree
*/
enterCreate_table_options?: (ctx: Create_table_optionsContext) => void;
/**
* Exit a parse tree produced by `PostgreSqlParser.create_table_options`.
* @param ctx the parse tree
*/
exitCreate_table_options?: (ctx: Create_table_optionsContext) => void;
/**
* Enter a parse tree produced by `PostgreSqlParser.create_table_clause`.
* @param ctx the parse tree
*/
enterCreate_table_clause?: (ctx: Create_table_clauseContext) => void;
/**
* Exit a parse tree produced by `PostgreSqlParser.create_table_clause`.
* @param ctx the parse tree
*/
exitCreate_table_clause?: (ctx: Create_table_clauseContext) => void;
/**
* Enter a parse tree produced by `PostgreSqlParser.optdistributed`.
* @param ctx the parse tree
*/
enterOptdistributed?: (ctx: OptdistributedContext) => void;
/**
* Exit a parse tree produced by `PostgreSqlParser.optdistributed`.
* @param ctx the parse tree
*/
exitOptdistributed?: (ctx: OptdistributedContext) => void;
/** /**
* Enter a parse tree produced by `PostgreSqlParser.opttemp`. * Enter a parse tree produced by `PostgreSqlParser.opttemp`.
* @param ctx the parse tree * @param ctx the parse tree

View File

@ -103,9 +103,6 @@ import { Copy_generic_opt_argContext } from "./PostgreSqlParser.js";
import { Copy_generic_opt_arg_listContext } from "./PostgreSqlParser.js"; import { Copy_generic_opt_arg_listContext } from "./PostgreSqlParser.js";
import { Copy_generic_opt_arg_list_itemContext } from "./PostgreSqlParser.js"; import { Copy_generic_opt_arg_list_itemContext } from "./PostgreSqlParser.js";
import { ColumnCreateTableContext } from "./PostgreSqlParser.js"; import { ColumnCreateTableContext } from "./PostgreSqlParser.js";
import { Create_table_optionsContext } from "./PostgreSqlParser.js";
import { Create_table_clauseContext } from "./PostgreSqlParser.js";
import { OptdistributedContext } from "./PostgreSqlParser.js";
import { OpttempContext } from "./PostgreSqlParser.js"; import { OpttempContext } from "./PostgreSqlParser.js";
import { Table_column_listContext } from "./PostgreSqlParser.js"; import { Table_column_listContext } from "./PostgreSqlParser.js";
import { OpttableelementlistContext } from "./PostgreSqlParser.js"; import { OpttableelementlistContext } from "./PostgreSqlParser.js";
@ -1476,24 +1473,6 @@ export class PostgreSqlParserVisitor<Result> extends AbstractParseTreeVisitor<Re
* @return the visitor result * @return the visitor result
*/ */
visitColumnCreateTable?: (ctx: ColumnCreateTableContext) => Result; visitColumnCreateTable?: (ctx: ColumnCreateTableContext) => Result;
/**
* Visit a parse tree produced by `PostgreSqlParser.create_table_options`.
* @param ctx the parse tree
* @return the visitor result
*/
visitCreate_table_options?: (ctx: Create_table_optionsContext) => Result;
/**
* Visit a parse tree produced by `PostgreSqlParser.create_table_clause`.
* @param ctx the parse tree
* @return the visitor result
*/
visitCreate_table_clause?: (ctx: Create_table_clauseContext) => Result;
/**
* Visit a parse tree produced by `PostgreSqlParser.optdistributed`.
* @param ctx the parse tree
* @return the visitor result
*/
visitOptdistributed?: (ctx: OptdistributedContext) => Result;
/** /**
* Visit a parse tree produced by `PostgreSqlParser.opttemp`. * Visit a parse tree produced by `PostgreSqlParser.opttemp`.
* @param ctx the parse tree * @param ctx the parse tree

View File

@ -1,4 +1,5 @@
export { MySQL } from './mysql'; export { MySQL } from './mysql';
export { PLSQL } from './plsql';
export { HiveSQL } from './hive'; export { HiveSQL } from './hive';
export { FlinkSQL } from './flink'; export { FlinkSQL } from './flink';
export { SparkSQL } from './spark'; export { SparkSQL } from './spark';

37
src/parser/plsql.ts Normal file
View File

@ -0,0 +1,37 @@
import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3';
import { PlSqlLexer } from '../lib/plsql/PlSqlLexer';
import { PlSqlParser, ProgramContext } from '../lib/plsql/PlSqlParser';
import { BasicSQL } from './common/basicSQL';
import { Suggestions } from './common/types';
export class PLSQL extends BasicSQL<PlSqlLexer, ProgramContext, PlSqlParser> {
protected createLexerFromCharStream(charStreams: CharStream) {
return new PlSqlLexer(charStreams);
}
protected createParserFromTokenStream(tokenStream: CommonTokenStream) {
return new PlSqlParser(tokenStream);
}
protected preferredRules: Set<number> = new Set();
protected get splitListener() {
return null as any;
}
protected createEntityCollector(input: string, caretTokenIndex?: number) {
return null as any;
}
protected processCandidates(
candidates: CandidatesCollection,
allTokens: Token[],
caretTokenIndex: number
): Suggestions<Token> {
return {
syntax: [],
keywords: [],
};
}
}

202
src/utils/index.ts Normal file
View File

@ -0,0 +1,202 @@
import { Legacy_TokenType, Legacy_Token, Legacy_TokenReg } from './token';
/**
* @param {String} sql
* @deprecated use parser.createLexer() instead.
*/
function legacy_lexer(input: string): Legacy_Token[] {
let current = 0;
let line = 1;
const tokens: Legacy_Token[] = [];
const extract = (
currentChar: string,
validator: RegExp,
TokenType: Legacy_TokenType
): Legacy_Token => {
let value = '';
const start = current;
while (validator.test(currentChar)) {
value += currentChar;
currentChar = input[++current];
}
return {
type: TokenType,
start: start,
end: current,
lineNumber: line,
value: value,
};
};
const matchFunction = () => {
const bracketNum = [current];
for (let i = current + 1; i < input.length; i++) {
const currentChar = input[i];
if (currentChar === '\n') {
line++;
}
if (Legacy_TokenReg.LeftSmallBracket.test(currentChar)) {
bracketNum.push(i);
}
if (Legacy_TokenReg.RightSmallBracket.test(currentChar)) {
const start = bracketNum.pop();
const end = i + 1;
if (bracketNum.length === 0) {
current = end;
tokens.push({
type: Legacy_TokenType.FunctionArguments,
value: input.slice(start, end),
start,
lineNumber: line,
end,
});
return;
}
}
}
};
const matchQuotation = (
currentChar: string,
validator: RegExp,
TokenType: Legacy_TokenType
) => {
do {
if (currentChar === '\n') {
line++;
}
currentChar = input[++current];
} while (!validator.test(currentChar));
++current;
};
while (current < input.length) {
let char = input[current];
if (char === '\n') {
line++;
current++;
continue;
}
if (Legacy_TokenReg.LeftSmallBracket.test(char)) {
matchFunction();
continue;
}
if (Legacy_TokenReg.BackQuotation.test(char)) {
matchQuotation(char, Legacy_TokenReg.BackQuotation, Legacy_TokenType.BackQuotation);
continue;
}
if (Legacy_TokenReg.SingleQuotation.test(char)) {
matchQuotation(char, Legacy_TokenReg.SingleQuotation, Legacy_TokenType.SingleQuotation);
continue;
}
if (Legacy_TokenReg.DoubleQuotation.test(char)) {
matchQuotation(char, Legacy_TokenReg.DoubleQuotation, Legacy_TokenType.DoubleQuotation);
continue;
}
if (char === '-' && input[current + 1] === '-') {
let value = '';
const start = current;
while (char !== '\n' && current < input.length) {
value += char;
char = input[++current];
}
tokens.push({
type: Legacy_TokenType.Comment,
value,
start: start,
lineNumber: line,
end: current,
});
continue;
}
if (char === '/' && input[current + 1] === '*') {
let value = '';
const start = current;
const startLine = line;
while (!(char === '/' && input[current - 1] === '*')) {
if (char === '\n') {
line++;
}
value += char;
char = input[++current];
}
value += char;
++current;
tokens.push({
type: Legacy_TokenType.Comment,
value,
start: start,
lineNumber: startLine,
end: current,
});
continue;
}
if (Legacy_TokenReg.StatementTerminator.test(char)) {
const newToken = extract(
char,
Legacy_TokenReg.StatementTerminator,
Legacy_TokenType.StatementTerminator
);
tokens.push(newToken);
continue;
}
current++;
}
return tokens;
}
/**
* split sql
* @param {String} sql
* @deprecated use parser.splitSQLByStatement() instead.
*/
function legacy_splitSql(sql: string) {
const tokens = legacy_lexer(sql);
const sqlArr = [];
let startIndex = 0;
tokens.forEach((ele: Legacy_Token) => {
if (ele.type === Legacy_TokenType.StatementTerminator) {
sqlArr.push(sql.slice(startIndex, ele.end));
startIndex = ele.end;
}
});
if (startIndex < sql.length) {
sqlArr.push(sql.slice(startIndex));
}
return sqlArr;
}
/**
* clean comment
* @param {String} sql
* @deprecated will be removed in future.
*/
function legacy_cleanSql(sql: string) {
sql = sql.trim();
const tokens = legacy_lexer(sql);
let resultSql = '';
let startIndex = 0;
tokens.forEach((ele: Legacy_Token) => {
if (ele.type === Legacy_TokenType.Comment) {
resultSql += sql.slice(startIndex, ele.start);
startIndex = ele.end + 1;
}
});
resultSql += sql.slice(startIndex);
return resultSql;
}
export { legacy_cleanSql, legacy_splitSql, legacy_lexer };

62
src/utils/token.ts Executable file
View File

@ -0,0 +1,62 @@
/**
* @deprecated will be removed in future.
*/
export enum Legacy_TokenType {
/**
* Enclosed in single/double/back quotation, `` Symbol
* 'abc', "abc", `abc`
*/
SingleQuotation = 'SingleQuotation',
DoubleQuotation = 'DoubleQuotation',
BackQuotation = 'BackQuotation',
/**
* Language element type
*/
Comment = 'Comment',
/**
* Statement
*/
StatementTerminator = 'StatementTerminator',
/**
* Others
*/
Error = 'Error',
/**
* Left small Bracket
*/
LeftSmallBracket = 'LeftSmallBracket',
/**
* Left small Bracket
*/
RightSmallBracket = 'RightSmallBracket',
Comma = 'Comma',
FunctionArguments = 'FunctionArguments',
}
/**
* @deprecated will be removed in future.
* Token object
*/
export interface Legacy_Token {
type: Legacy_TokenType;
value: string;
start?: number;
end: number;
lineNumber: number;
message?: string;
}
/**
* @deprecated will be removed in future.
* Token recognition rules
*/
export const Legacy_TokenReg = {
[Legacy_TokenType.StatementTerminator]: /[;]/,
[Legacy_TokenType.SingleQuotation]: /['|\']/,
[Legacy_TokenType.DoubleQuotation]: /["]/,
[Legacy_TokenType.BackQuotation]: /[`]/,
[Legacy_TokenType.LeftSmallBracket]: /[(]/,
[Legacy_TokenType.RightSmallBracket]: /[)]/,
[Legacy_TokenType.Comma]: /[,]/,
};

View File

@ -0,0 +1,12 @@
import { PLSQL } from 'src/parser/plsql';
describe('PLSQL Lexer tests', () => {
const plsql = new PLSQL();
const sql = 'select id,name,sex from user1;';
const tokens = plsql.getAllTokens(sql);
test('token counts', () => {
expect(tokens.length).toBe(12);
});
});

View File

@ -0,0 +1,24 @@
import { PLSQL } from 'src/parser/plsql';
import { PlSqlParserListener } from 'src/lib/plsql/PlSqlParserListener';
describe('PLSQL Listener Tests', () => {
const expectTableName = 'user1';
const sql = `select id,name,sex from ${expectTableName};`;
const plsql = new PLSQL();
const parseTree = plsql.parse(sql);
test('Listener enterTableName', async () => {
class MyListener extends PlSqlParserListener {
result = '';
enterTable_ref_list = (ctx) => {
this.result = ctx.getText().toLowerCase();
};
}
const listener = new MyListener();
plsql.listen(listener, parseTree);
expect(listener.result).toBe(expectTableName);
});
});

View File

@ -0,0 +1,23 @@
import { PLSQL } from 'src/parser/plsql';
describe('PLSQL Syntax Tests', () => {
const plsql = new PLSQL();
test('Test simple select Statement', () => {
const sql = 'select id,name from user1;';
const result = plsql.validate(sql);
expect(result.length).toBe(0);
});
test(`Test select, where, order by`, () => {
const sql = `
select eid, emp_last, mgr_id, reportlevel
from reports_to_101 r, auto a
where r.c1 = a.c2
order by reportlevel, eid
`;
const result = plsql.validate(sql);
expect(result.length).toBe(0);
});
});

View File

@ -0,0 +1,31 @@
import { PLSQL } from 'src/parser/plsql';
import { PlSqlParserVisitor } from 'src/lib/plsql/PlSqlParserVisitor';
describe('PLSQL Visitor Tests', () => {
const expectTableName = 'user1';
const sql = `select id,name,sex from ${expectTableName};`;
const plsql = new PLSQL();
const parseTree = plsql.parse(sql);
test('Visitor visitTable_ref_list', () => {
class MyVisitor extends PlSqlParserVisitor<string> {
defaultResult(): string {
return '';
}
aggregateResult(aggregate: string, nextResult: string): string {
return aggregate + nextResult;
}
visitProgram = (ctx) => {
return this.visitChildren(ctx);
};
visitTable_ref_list = (ctx) => {
return ctx.getText().toLowerCase();
};
}
const visitor = new MyVisitor();
const result = visitor.visit(parseTree);
expect(result).toBe(expectTableName);
});
});

View File

@ -75,7 +75,3 @@ UPDATE tablename SET columnname = a + b, (col1, col2) = (a+3, b+4);
VACUUM tablename (col1, col2); VACUUM tablename (col1, col2);
SELECT * FROM db.tbs GROUP BY (col1, col2) ORDER BY col3; SELECT * FROM db.tbs GROUP BY (col1, col2) ORDER BY col3;
TRUNCATE TABLE ;
TRUNCATE TABLE t1;

View File

@ -970,36 +970,4 @@ describe('Postgre SQL Syntax Suggestion', () => {
expect(suggestion3).not.toBeUndefined(); expect(suggestion3).not.toBeUndefined();
expect(suggestion3?.wordRanges.map((token) => token.text)).toEqual(['col3']); expect(suggestion3?.wordRanges.map((token) => token.text)).toEqual(['col3']);
}); });
test('TRUNCATE TABLE', () => {
const pos1: CaretPosition = {
lineNumber: 79,
column: 16,
};
const pos2: CaretPosition = {
lineNumber: 81,
column: 18,
};
const syntaxes1 = postgresql.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos1.lineNumber),
pos1
)?.syntax;
const syntaxes2 = postgresql.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos2.lineNumber),
pos2
)?.syntax;
const suggestion1 = syntaxes1?.find(
(syn) => syn.syntaxContextType === EntityContextType.TABLE
);
const suggestion2 = syntaxes2?.find(
(syn) => syn.syntaxContextType === EntityContextType.TABLE
);
expect(suggestion1).not.toBeUndefined();
expect(suggestion1?.wordRanges.map((token) => token.text)).toEqual([]);
expect(suggestion2).not.toBeUndefined();
expect(suggestion2?.wordRanges.map((token) => token.text)).toEqual(['t1']);
});
}); });

89
test/utils/index.test.ts Normal file
View File

@ -0,0 +1,89 @@
import { legacy_lexer, legacy_splitSql, legacy_cleanSql } from '../../src/utils';
import { Legacy_TokenType } from '../../src/utils/token';
describe('utils', () => {
test('split single sql', () => {
const sql = 'select id,name from user';
const result = legacy_splitSql(sql);
expect(result.length).toEqual(1);
});
test('split multiple sql', () => {
const sql = `-- a ;
select * from a;
/*
xxx
xxx
*/
select user from b`;
const result = legacy_splitSql(sql);
expect(result.length).toEqual(2);
});
test('split special quotation sql', () => {
const sql = `select regexp_replace('a', 'bc', 'xfe'feefe', '233');
select regexp_replace('abc', "fe", '233');`;
const result = legacy_splitSql(sql);
expect(result.length).toEqual(2);
});
// test nested bracket
test('split nested bracket sql', () => {
const sql = `WITH cte_sales_amounts (staff, sales, year)
AS
(
SELECT
first_name + ' ' + last_name,
SUM(quantity * list_price * (1 - discount)),
YEAR(order_date)
FROM
sales.orders o
INNER JOIN sales.order_items i ON i.order_id = o.order_id
INNER JOIN sales.staffs s ON s.staff_id = o.staff_id
)
SELECT staff, sales
FROM cte_sales_amounts
WHERE year = 2018;
SELECT * FROM table;`;
const result = legacy_splitSql(sql);
expect(result.length).toEqual(2);
});
test('lexer', () => {
const sql = `-- a ;
select * from a;
/*
xxx
xxx
*/
select user from b;`;
const result = legacy_lexer(sql);
expect(result.length).toEqual(4);
});
test('lexer for comments', () => {
const sql = `select * from a;--comments`;
const expected = `--comments`;
const result = legacy_lexer(sql);
const comments = result.find((token) => token.type === Legacy_TokenType.Comment);
expect(comments?.value).toEqual(expected);
});
test('cleanSql', () => {
const sql = `-- a ;
select * from a;
/*
xxx
xxx
*/
select user from b`;
const result = legacy_cleanSql(sql);
expect(result.indexOf('xxx')).toEqual(-1);
});
test('clean SQL white spaces', () => {
const sql = `
select * from a; `;
const expected = 'select * from a;';
const result = legacy_cleanSql(sql);
expect(result).toEqual(expected);
});
});