feat: migrate to antlr4ng (#267)

* feat: replace antlr4ts with antlr4ng

* feat: switch caseInsensitive option on

* feat: recompile all g4 file

* feat:  update parser to fit antlr4ng

* test: update test to fit antlr4ng
This commit is contained in:
Hayden 2024-02-26 20:25:09 +08:00 committed by GitHub
parent 5ce89cb421
commit 195878da9b
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
112 changed files with 648433 additions and 659067 deletions

View File

@ -17,13 +17,13 @@ module.exports = {
clearMocks: true, clearMocks: true,
// Indicates whether the coverage information should be collected while executing the test // Indicates whether the coverage information should be collected while executing the test
collectCoverage: true, collectCoverage: false,
// An array of glob patterns indicating a set of files for which coverage information should be collected // An array of glob patterns indicating a set of files for which coverage information should be collected
// collectCoverageFrom: undefined, // collectCoverageFrom: undefined,
// The directory where Jest should output its coverage files // The directory where Jest should output its coverage files
coverageDirectory: 'coverage', // coverageDirectory: 'coverage',
// An array of regexp pattern strings used to skip coverage collection // An array of regexp pattern strings used to skip coverage collection
// coveragePathIgnorePatterns: [ // coveragePathIgnorePatterns: [
@ -76,7 +76,7 @@ module.exports = {
// ], // ],
// An array of file extensions your modules use // An array of file extensions your modules use
moduleFileExtensions: ['js', 'mjs', 'cjs', 'jsx', 'ts', 'tsx', 'json', 'node'], moduleFileExtensions: ['js', 'mjs', 'ts'],
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module // A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
moduleNameMapper: { moduleNameMapper: {
@ -166,10 +166,11 @@ module.exports = {
// A map from regular expressions to paths to transformers // A map from regular expressions to paths to transformers
transform: { transform: {
'\\.[jt]sx?$': ['@swc/jest'], '\\.[jt]sx?$': ['@swc/jest'],
'\\.mjs$': ['@swc/jest'],
}, },
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
extensionsToTreatAsEsm: ['.ts', '.tsx'], extensionsToTreatAsEsm: ['.ts'],
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// unmockedModulePathPatterns: undefined, // unmockedModulePathPatterns: undefined,
@ -181,4 +182,6 @@ module.exports = {
// Whether to use watchman for file crawling // Whether to use watchman for file crawling
// watchman: true // watchman: true
transformIgnorePatterns: ['dist/', '<rootDir>/node_modules/.pnpm/(?!(antlr4ng|antlr4-c3)@)'],
}; };

View File

@ -41,7 +41,7 @@
"@types/jest": "^29.5.1", "@types/jest": "^29.5.1",
"@types/node": "^18.15.11", "@types/node": "^18.15.11",
"antlr-format-cli": "^1.2.1", "antlr-format-cli": "^1.2.1",
"antlr4ts-cli": "^0.5.0-alpha.4", "antlr4ng-cli": "^1.0.7",
"chalk": "4.1.2", "chalk": "4.1.2",
"commitizen": "^4.3.0", "commitizen": "^4.3.0",
"glob": "^10.3.10", "glob": "^10.3.10",
@ -60,8 +60,8 @@
"registry": "https://registry.npmjs.org/" "registry": "https://registry.npmjs.org/"
}, },
"dependencies": { "dependencies": {
"antlr4-c3": "3.1.1", "antlr4-c3": "3.3.7",
"antlr4ts": "0.5.0-alpha.4" "antlr4ng": "2.0.11"
}, },
"sideEffects": false "sideEffects": false
} }

View File

@ -9,9 +9,9 @@ specifiers:
'@types/jest': ^29.5.1 '@types/jest': ^29.5.1
'@types/node': ^18.15.11 '@types/node': ^18.15.11
antlr-format-cli: ^1.2.1 antlr-format-cli: ^1.2.1
antlr4-c3: 3.1.1 antlr4-c3: 3.3.7
antlr4ts: 0.5.0-alpha.4 antlr4ng: 2.0.11
antlr4ts-cli: ^0.5.0-alpha.4 antlr4ng-cli: ^1.0.7
chalk: 4.1.2 chalk: 4.1.2
commitizen: ^4.3.0 commitizen: ^4.3.0
glob: ^10.3.10 glob: ^10.3.10
@ -25,8 +25,8 @@ specifiers:
yargs-parser: ^21.1.1 yargs-parser: ^21.1.1
dependencies: dependencies:
antlr4-c3: 3.1.1 antlr4-c3: 3.3.7_antlr4ng-cli@1.0.7
antlr4ts: 0.5.0-alpha.4 antlr4ng: 2.0.11_antlr4ng-cli@1.0.7
devDependencies: devDependencies:
'@commitlint/cli': 17.7.2_@swc+core@1.3.60 '@commitlint/cli': 17.7.2_@swc+core@1.3.60
@ -36,8 +36,8 @@ devDependencies:
'@swc/jest': 0.2.26_@swc+core@1.3.60 '@swc/jest': 0.2.26_@swc+core@1.3.60
'@types/jest': 29.5.1 '@types/jest': 29.5.1
'@types/node': 18.16.16 '@types/node': 18.16.16
antlr-format-cli: 1.2.1 antlr-format-cli: 1.2.1_antlr4ng-cli@1.0.7
antlr4ts-cli: 0.5.0-alpha.4 antlr4ng-cli: 1.0.7
chalk: 4.1.2 chalk: 4.1.2
commitizen: 4.3.0_@swc+core@1.3.60 commitizen: 4.3.0_@swc+core@1.3.60
glob: 10.3.10 glob: 10.3.10
@ -1267,13 +1267,13 @@ packages:
engines: {node: '>=12'} engines: {node: '>=12'}
dev: true dev: true
/antlr-format-cli/1.2.1: /antlr-format-cli/1.2.1_antlr4ng-cli@1.0.7:
resolution: {integrity: sha512-vqpoL9x3bXiNnC/vzZG3XOyk2vUAHPmBbI/ufyAqbxQHD27OPuUM4n/6m6NBEZZ7V4U2aEiefnZg2SCaSU89oA==} resolution: {integrity: sha512-vqpoL9x3bXiNnC/vzZG3XOyk2vUAHPmBbI/ufyAqbxQHD27OPuUM4n/6m6NBEZZ7V4U2aEiefnZg2SCaSU89oA==}
hasBin: true hasBin: true
dependencies: dependencies:
'@readme/better-ajv-errors': 1.6.0_ajv@8.12.0 '@readme/better-ajv-errors': 1.6.0_ajv@8.12.0
ajv: 8.12.0 ajv: 8.12.0
antlr4ng: 2.0.2 antlr4ng: 2.0.2_antlr4ng-cli@1.0.7
commander: 11.1.0 commander: 11.1.0
glob: 10.3.10 glob: 10.3.10
ts-json-schema-generator: 1.4.0 ts-json-schema-generator: 1.4.0
@ -1281,27 +1281,34 @@ packages:
- antlr4ng-cli - antlr4ng-cli
dev: true dev: true
/antlr4-c3/3.1.1: /antlr4-c3/3.3.7_antlr4ng-cli@1.0.7:
resolution: {integrity: sha512-S7DixV12kxWexTkQYGvooCgHYU5AjF74oYio+ZNgm0XN3EzxDY3J6Si9GprQ4KksvgWwK//EgZnL/26WB+bOpw==} resolution: {integrity: sha512-F3ndE38wwA6z6AjUbL3heSdEGl4TxulGDPf9xB0/IY4dbRHWBh6XNaqFwur8vHKQk9FS5yNABHeg2wqlqIYO0w==}
dependencies: dependencies:
antlr4ts: 0.5.0-alpha.4 antlr4ng: 2.0.11_antlr4ng-cli@1.0.7
transitivePeerDependencies:
- antlr4ng-cli
dev: false dev: false
/antlr4ng/2.0.2: /antlr4ng-cli/1.0.7:
resolution: {integrity: sha512-qN2FsDBmLvsQcA5CWTrPz8I8gNXeS1fgXBBhI78VyxBSBV/EJgqy8ks6IDTC9jyugpl40csCQ4sL5K4i2YZ/2w==}
hasBin: true
/antlr4ng/2.0.11_antlr4ng-cli@1.0.7:
resolution: {integrity: sha512-9jM91VVtHSqHkAHQsXHaoaiewFETMvUTI1/tXvwTiFw4f7zke3IGlwEyoKN9NS0FqIwDKFvUNW2e1cKPniTkVQ==}
peerDependencies:
antlr4ng-cli: 1.0.7
dependencies:
antlr4ng-cli: 1.0.7
dev: false
/antlr4ng/2.0.2_antlr4ng-cli@1.0.7:
resolution: {integrity: sha512-Fhs3AvhoGigRt3RpHw0wGA7n03j9BpskH9yCUViNB7NtKuCA+imy2orEZ8qcgPG98f7IryEPYlG9sx99f3ZOyw==} resolution: {integrity: sha512-Fhs3AvhoGigRt3RpHw0wGA7n03j9BpskH9yCUViNB7NtKuCA+imy2orEZ8qcgPG98f7IryEPYlG9sx99f3ZOyw==}
peerDependencies: peerDependencies:
antlr4ng-cli: 1.0.4 antlr4ng-cli: 1.0.4
dependencies:
antlr4ng-cli: 1.0.7
dev: true dev: true
/antlr4ts-cli/0.5.0-alpha.4:
resolution: {integrity: sha512-lVPVBTA2CVHRYILSKilL6Jd4hAumhSZZWA7UbQNQrmaSSj7dPmmYaN4bOmZG79cOy0lS00i4LY68JZZjZMWVrw==}
hasBin: true
dev: true
/antlr4ts/0.5.0-alpha.4:
resolution: {integrity: sha512-WPQDt1B74OfPv/IMS2ekXAKkTZIHl88uMetg6q3OTqgFxZ/dxDXI0EWLyZid/1Pe6hTftyg5N7gel5wNAGxXyQ==}
dev: false
/anymatch/3.1.3: /anymatch/3.1.3:
resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==}
engines: {node: '>= 8'} engines: {node: '>= 8'}

View File

@ -11,7 +11,7 @@ const outputPath = path.resolve(__dirname, '../src/lib');
const languageEntries = fs.readdirSync(grammarsPath); const languageEntries = fs.readdirSync(grammarsPath);
const baseCmd = 'antlr4ts -visitor -listener -Xexact-output-dir -o'; const baseCmd = 'antlr4ng -Dlanguage=TypeScript -visitor -listener -Xexact-output-dir -o';
function compile(language) { function compile(language) {
const cmd = `${baseCmd} ${outputPath}/${language} ${grammarsPath}/${language}/*.g4`; const cmd = `${baseCmd} ${outputPath}/${language} ${grammarsPath}/${language}/*.g4`;
@ -31,7 +31,7 @@ function compile(language) {
); );
} else { } else {
cleanComment(language); cleanComment(language);
console.log(chalk.greenBright(`Compile ${language} succeeded!`)); console.info(chalk.greenBright(`Compile ${language} succeeded!`));
} }
}); });
} }

View File

@ -53,15 +53,13 @@ function execStandardVersion(res) {
cmd += ` --tag-prefix ${tagPrefix} `; cmd += ` --tag-prefix ${tagPrefix} `;
cmd += ' --infile CHANGELOG.md '; cmd += ' --infile CHANGELOG.md ';
console.log(`Executing: ${cmd} \n`); console.info(`Executing: ${cmd} \n`);
runCommand(cmd) runCommand(cmd)
.then(({ message }) => { .then(({ message }) => {
console.log('Please checkout recent commit, and then'); console.info('Please checkout recent commit, and then');
console.log( console.info('Push branch and new tag to github, publish package to npm');
'Push branch and new tag to github, publish package to npm' // message && console.info(message)
);
// message && console.log(message)
}) })
.catch(({ error, code }) => { .catch(({ error, code }) => {
code && console.error('Error: process exit code' + code); code && console.error('Error: process exit code' + code);

View File

@ -5,6 +5,10 @@
lexer grammar FlinkSqlLexer; lexer grammar FlinkSqlLexer;
options {
caseInsensitive= true;
}
// SKIP // SKIP
SPACE : [ \t\r\n]+ -> channel(HIDDEN); SPACE : [ \t\r\n]+ -> channel(HIDDEN);
@ -585,9 +589,9 @@ ID_LITERAL : ID_LITERAL_FRAG;
fragment JAR_FILE_PARTTARN : '`' ( '\\' . | '``' | ~('`' | '\\'))* '`'; fragment JAR_FILE_PARTTARN : '`' ( '\\' . | '``' | ~('`' | '\\'))* '`';
fragment EXPONENT_NUM_PART : 'E' [-+]? DEC_DIGIT+; fragment EXPONENT_NUM_PART : 'E' [-+]? DEC_DIGIT+;
fragment ID_LITERAL_FRAG : [A-Z_0-9a-z]*? [A-Z_a-z]+? [A-Z_0-9a-z]*; fragment ID_LITERAL_FRAG : [A-Z_0-9]*? [A-Z_]+? [A-Z_0-9]*;
fragment DEC_DIGIT : [0-9]; fragment DEC_DIGIT : [0-9];
fragment DEC_LETTER : [A-Za-z]; fragment DEC_LETTER : [A-Z];
fragment DQUOTA_STRING : '"' ( '\\' . | '""' | ~('"' | '\\'))* '"'; fragment DQUOTA_STRING : '"' ( '\\' . | '""' | ~('"' | '\\'))* '"';
fragment SQUOTA_STRING : '\'' ('\\' . | '\'\'' | ~('\'' | '\\'))* '\''; fragment SQUOTA_STRING : '\'' ('\\' . | '\'\'' | ~('\'' | '\\'))* '\'';
fragment BIT_STRING_L : 'B' '\'' [01]+ '\''; fragment BIT_STRING_L : 'B' '\'' [01]+ '\'';

View File

@ -6,6 +6,7 @@ parser grammar FlinkSqlParser;
options { options {
tokenVocab=FlinkSqlLexer; tokenVocab=FlinkSqlLexer;
caseInsensitive= true;
} }
program program

View File

@ -27,8 +27,9 @@
lexer grammar HiveSqlLexer; lexer grammar HiveSqlLexer;
// unsupported option caseInsensitive in antlr4@4.9 options {
// options { caseInsensitive = true; } caseInsensitive= true;
}
// Keywords // Keywords
KW_ABORT : 'ABORT'; KW_ABORT : 'ABORT';
@ -502,7 +503,7 @@ Identifier: (Letter | Digit) (Letter | Digit | '_')* | QuotedIdentifier | '`' Re
fragment QuotedIdentifier: '`' ('``' | ~'`')* '`'; fragment QuotedIdentifier: '`' ('``' | ~'`')* '`';
fragment Letter: 'A' ..'Z' | 'a' ..'z'; fragment Letter: 'A' ..'Z';
fragment HexDigit: 'A' ..'F'; fragment HexDigit: 'A' ..'F';

View File

@ -29,6 +29,7 @@ parser grammar HiveSqlParser;
options options
{ {
tokenVocab=HiveSqlLexer; tokenVocab=HiveSqlLexer;
caseInsensitive= true;
} }
program program

View File

@ -21,6 +21,7 @@ parser grammar ImpalaSqlParser;
options options
{ {
tokenVocab=ImpalaSqlLexer; tokenVocab=ImpalaSqlLexer;
caseInsensitive= true;
} }
program program
@ -873,7 +874,7 @@ booleanExpression
| left=booleanExpression operator=KW_OR right=booleanExpression # logicalBinary | left=booleanExpression operator=KW_OR right=booleanExpression # logicalBinary
; ;
predicate[ParserRuleContext value] predicate[antlr.ParserRuleContext value]
: comparisonOperator right=valueExpression # comparison : comparisonOperator right=valueExpression # comparison
| comparisonOperator comparisonQuantifier subQueryRelation # quantifiedComparison | comparisonOperator comparisonQuantifier subQueryRelation # quantifiedComparison
| KW_NOT? KW_BETWEEN lower=valueExpression KW_AND upper=valueExpression # between | KW_NOT? KW_BETWEEN lower=valueExpression KW_AND upper=valueExpression # between

View File

@ -35,6 +35,7 @@ parser grammar MySqlParser;
options { options {
tokenVocab= MySqlLexer; tokenVocab= MySqlLexer;
caseInsensitive= true;
} }
// Top Level Description // Top Level Description

View File

@ -36,6 +36,10 @@
lexer grammar PostgreSQLLexer; lexer grammar PostgreSQLLexer;
options {
caseInsensitive= true;
}
/** /**
* Reference Doc: https://www.postgresql.org/docs/16.1/sql-commands.html * Reference Doc: https://www.postgresql.org/docs/16.1/sql-commands.html
*/ */
@ -673,9 +677,9 @@ KW_BUFFER_USAGE_LIMIT : 'BUFFER_USAGE_LIMIT';
Identifier: IdentifierStartChar IdentifierChar*; Identifier: IdentifierStartChar IdentifierChar*;
fragment IdentifierStartChar: // these are the valid identifier start characters below 0x7F fragment IdentifierStartChar: // these are the valid identifier start characters below 0x7F
[a-zA-Z_] [A-Z_]
| // these are the valid characters from 0x80 to 0xFF | // these are the valid characters from 0x80 to 0xFF
[\u00AA\u00B5\u00BA\u00C0-\u00D6\u00D8-\u00F6\u00F8-\u00FF] [\u00AA\u00B5\u00BA\u00C0-\u00D6\u00F8-\u00FF]
| // these are the letters above 0xFF which only need a single UTF-16 code unit | // these are the letters above 0xFF which only need a single UTF-16 code unit
[\u0100-\uD7FF\uE000-\uFFFF] [\u0100-\uD7FF\uE000-\uFFFF]
| // letters which require multiple UTF-16 code units | // letters which require multiple UTF-16 code units
@ -771,7 +775,7 @@ InvalidUnterminatedBinaryStringConstant: 'B' UnterminatedStringConstant;
HexadecimalStringConstant: UnterminatedHexadecimalStringConstant '\''; HexadecimalStringConstant: UnterminatedHexadecimalStringConstant '\'';
UnterminatedHexadecimalStringConstant: 'X' '\'' [0-9a-fA-F]*; UnterminatedHexadecimalStringConstant: 'X' '\'' [0-9A-F]*;
InvalidHexadecimalStringConstant: InvalidUnterminatedHexadecimalStringConstant '\''; InvalidHexadecimalStringConstant: InvalidUnterminatedHexadecimalStringConstant '\'';
@ -791,7 +795,7 @@ Numeric:
fragment Digits: [0-9]+; fragment Digits: [0-9]+;
PLSQLVARIABLENAME: ':' [a-zA-Z_] [a-zA-Z_0-9$]*; PLSQLVARIABLENAME: ':' [A-Z_] [A-Z_0-9$]*;
PLSQLIDENTIFIER: ':"' ('\\' . | '""' | ~ ('"' | '\\'))* '"'; PLSQLIDENTIFIER: ':"' ('\\' . | '""' | ~ ('"' | '\\'))* '"';
// //
@ -861,13 +865,13 @@ fragment EscapeStringText: (
'\'\'' '\'\''
| '\\' ( | '\\' (
// two-digit hex escapes are still valid when treated as single-digit escapes // two-digit hex escapes are still valid when treated as single-digit escapes
'x' [0-9a-fA-F] 'x' [0-9A-F]
| 'u' [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] | 'u' [0-9A-F] [0-9A-F] [0-9A-F] [0-9A-F]
| 'U' [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] [0-9a-fA-F] | 'U' [0-9A-F] [0-9A-F] [0-9A-F] [0-9A-F] [0-9A-F] [0-9A-F] [0-9A-F] [0-9A-F]
| |
// Any character other than the Unicode escapes can follow a backslash. Some have // Any character other than the Unicode escapes can follow a backslash. Some have
// special meaning, but that doesn't affect the syntax. // special meaning, but that doesn't affect the syntax.
~ [xuU] ~ [xu]
) )
| ~ ['\\] | ~ ['\\]
)*; )*;

View File

@ -41,6 +41,7 @@ parser grammar PostgreSQLParser;
options { options {
tokenVocab= PostgreSQLLexer; tokenVocab= PostgreSQLLexer;
caseInsensitive= true;
} }
program program

View File

@ -2366,7 +2366,7 @@ BIT_STRING_LIT: 'B' ('\'' [01]* '\'')+;
// Rule #284 <HEX_STRING_LIT> - subtoken typecast in <REGULAR_ID> // Rule #284 <HEX_STRING_LIT> - subtoken typecast in <REGULAR_ID>
// Lowercase 'x' is a usual addition to the standard // Lowercase 'x' is a usual addition to the standard
HEX_STRING_LIT : 'X' ('\'' [A-Fa-f0-9]* '\'')+; HEX_STRING_LIT : 'X' ('\'' [A-F0-9]* '\'')+;
DOUBLE_PERIOD : '..'; DOUBLE_PERIOD : '..';
PERIOD : '.'; PERIOD : '.';
@ -2468,9 +2468,7 @@ REMARK_COMMENT:
PROMPT_MESSAGE: 'PRO' {this.IsNewlineAtPos(-4)}? 'MPT'? (' ' ~('\r' | '\n')*)? NEWLINE_EOF; PROMPT_MESSAGE: 'PRO' {this.IsNewlineAtPos(-4)}? 'MPT'? (' ' ~('\r' | '\n')*)? NEWLINE_EOF;
// TODO: should starts with newline // TODO: should starts with newline
START_CMD START_CMD: // https://docs.oracle.com/cd/B19306_01/server.102/b14357/ch12002.htm
//: 'STA' 'RT'? SPACE ~('\r' | '\n')* NEWLINE_EOF
: // https://docs.oracle.com/cd/B19306_01/server.102/b14357/ch12002.htm
'@' {this.IsNewlineAtPos(-2)}? '@'? ~('\r' | '\n')* NEWLINE_EOF; // https://docs.oracle.com/cd/B19306_01/server.102/b14357/ch12003.htm '@' {this.IsNewlineAtPos(-2)}? '@'? ~('\r' | '\n')* NEWLINE_EOF; // https://docs.oracle.com/cd/B19306_01/server.102/b14357/ch12003.htm
REGULAR_ID: SIMPLE_LETTER (SIMPLE_LETTER | '$' | '_' | '#' | [0-9])*; REGULAR_ID: SIMPLE_LETTER (SIMPLE_LETTER | '$' | '_' | '#' | [0-9])*;
@ -2481,7 +2479,7 @@ SPACES: [ \t\r\n]+ -> channel(HIDDEN);
fragment NEWLINE_EOF : NEWLINE | EOF; fragment NEWLINE_EOF : NEWLINE | EOF;
fragment QUESTION_MARK : '?'; fragment QUESTION_MARK : '?';
fragment SIMPLE_LETTER : [a-zA-Z]; fragment SIMPLE_LETTER : [A-Z];
fragment FLOAT_FRAGMENT : UNSIGNED_INTEGER* '.'? UNSIGNED_INTEGER+; fragment FLOAT_FRAGMENT : UNSIGNED_INTEGER* '.'? UNSIGNED_INTEGER+;
fragment NEWLINE : '\r'? '\n'; fragment NEWLINE : '\r'? '\n';
fragment SPACE : [ \t]; fragment SPACE : [ \t];

View File

@ -32,6 +32,7 @@ parser grammar PlSqlParser;
options { options {
tokenVocab=PlSqlLexer; tokenVocab=PlSqlLexer;
superClass=PlSqlBaseParser; superClass=PlSqlBaseParser;
caseInsensitive= true;
} }
@parser::header { @parser::header {

View File

@ -25,6 +25,10 @@
lexer grammar SparkSqlLexer; lexer grammar SparkSqlLexer;
options {
caseInsensitive= true;
}
@members { @members {
/** /**
* When true, parser should throw ParseException for unclosed bracketed comment. * When true, parser should throw ParseException for unclosed bracketed comment.
@ -469,7 +473,7 @@ fragment EXPONENT: 'E' [+-]? DIGIT+;
fragment DIGIT: [0-9]; fragment DIGIT: [0-9];
fragment LETTER: [A-Za-z]; fragment LETTER: [A-Z];
SIMPLE_COMMENT: '--' ('\\\n' | ~[\r\n])* '\r'? '\n'? -> channel(HIDDEN); SIMPLE_COMMENT: '--' ('\\\n' | ~[\r\n])* '\r'? '\n'? -> channel(HIDDEN);

View File

@ -26,6 +26,7 @@ parser grammar SparkSqlParser;
options { options {
tokenVocab=SparkSqlLexer; tokenVocab=SparkSqlLexer;
caseInsensitive= true;
} }
program program

View File

@ -23,6 +23,10 @@
grammar TrinoSql; grammar TrinoSql;
options {
caseInsensitive= true;
}
tokens { tokens {
DELIMITER DELIMITER
} }
@ -419,7 +423,7 @@ booleanExpression
; ;
// workaround for https://github.com/antlr/antlr4/issues/780 // workaround for https://github.com/antlr/antlr4/issues/780
predicate[ParserRuleContext value] predicate[antlr.ParserRuleContext value]
: comparisonOperator right= valueExpression # comparison : comparisonOperator right= valueExpression # comparison
| comparisonOperator comparisonQuantifier '(' query ')' # quantifiedComparison | comparisonOperator comparisonQuantifier '(' query ')' # quantifiedComparison
| KW_NOT? KW_BETWEEN lower= valueExpression KW_AND upper= valueExpression # between | KW_NOT? KW_BETWEEN lower= valueExpression KW_AND upper= valueExpression # between
@ -1231,7 +1235,7 @@ fragment EXPONENT: 'E' [+-]? DIGIT+;
fragment DIGIT: [0-9]; fragment DIGIT: [0-9];
fragment LETTER: [A-Za-z]; fragment LETTER: [A-Z];
SIMPLE_COMMENT: '--' ~[\r\n]* '\r'? '\n'? -> channel(HIDDEN); SIMPLE_COMMENT: '--' ~[\r\n]* '\r'? '\n'? -> channel(HIDDEN);

View File

@ -1,4 +1,4 @@
export { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor'; export { AbstractParseTreeVisitor } from 'antlr4ng';
export { export {
MySQL, MySQL,

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,4 +1,4 @@
import { Lexer } from "antlr4ts/Lexer"; import { Lexer } from "antlr4ng";
export default abstract class PlSqlBaseLexer extends Lexer { export default abstract class PlSqlBaseLexer extends Lexer {

View File

@ -1,6 +1,4 @@
import { Parser } from "antlr4ts/Parser"; import { Parser, TokenStream } from "antlr4ng";
import { TokenStream } from "antlr4ts/TokenStream";
export default abstract class PlSqlBaseParser extends Parser { export default abstract class PlSqlBaseParser extends Parser {
private _isVersion10: boolean = false; private _isVersion10: boolean = false;

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

View File

@ -4,10 +4,11 @@ import {
Token, Token,
CharStreams, CharStreams,
CommonTokenStream, CommonTokenStream,
CodePointCharStream, CharStream,
ParserRuleContext, ParserRuleContext,
} from 'antlr4ts'; ParseTreeWalker,
import { ParseTreeWalker, ParseTreeListener } from 'antlr4ts/tree'; ParseTreeListener,
} from 'antlr4ng';
import { CandidatesCollection, CodeCompletionCore } from 'antlr4-c3'; import { CandidatesCollection, CodeCompletionCore } from 'antlr4-c3';
import { findCaretTokenIndex } from './utils/findCaretTokenIndex'; import { findCaretTokenIndex } from './utils/findCaretTokenIndex';
import { import {
@ -38,7 +39,7 @@ export default abstract class BasicParser<
P extends IParser<PRC> = IParser<PRC>, P extends IParser<PRC> = IParser<PRC>,
> { > {
/** members for cache start */ /** members for cache start */
protected _charStreams: CodePointCharStream; protected _charStreams: CharStream;
protected _lexer: L; protected _lexer: L;
protected _tokenStream: CommonTokenStream; protected _tokenStream: CommonTokenStream;
protected _parser: P; protected _parser: P;
@ -60,7 +61,7 @@ export default abstract class BasicParser<
* Create a antlr4 Lexer instance. * Create a antlr4 Lexer instance.
* @param input source string * @param input source string
*/ */
protected abstract createLexerFromCharStream(charStreams: CodePointCharStream): L; protected abstract createLexerFromCharStream(charStreams: CharStream): L;
/** /**
* Create Parser by CommonTokenStream * Create Parser by CommonTokenStream
@ -92,7 +93,7 @@ export default abstract class BasicParser<
* @param input string * @param input string
*/ */
public createLexer(input: string, errorListener?: ErrorListener<any>) { public createLexer(input: string, errorListener?: ErrorListener<any>) {
const charStreams = CharStreams.fromString(input.toUpperCase()); const charStreams = CharStreams.fromString(input);
const lexer = this.createLexerFromCharStream(charStreams); const lexer = this.createLexerFromCharStream(charStreams);
if (errorListener) { if (errorListener) {
lexer.removeErrorListeners(); lexer.removeErrorListeners();
@ -126,7 +127,7 @@ export default abstract class BasicParser<
*/ */
public parse(input: string, errorListener?: ErrorListener<any>) { public parse(input: string, errorListener?: ErrorListener<any>) {
const parser = this.createParser(input, errorListener); const parser = this.createParser(input, errorListener);
parser.buildParseTree = true; parser.buildParseTrees = true;
parser.errorHandler = new ErrorStrategy(); parser.errorHandler = new ErrorStrategy();
return parser.program(); return parser.program();
@ -139,7 +140,7 @@ export default abstract class BasicParser<
*/ */
private createParserWithCache(input: string): P { private createParserWithCache(input: string): P {
this._parseTree = null; this._parseTree = null;
this._charStreams = CharStreams.fromString(input.toUpperCase()); this._charStreams = CharStreams.fromString(input);
this._lexer = this.createLexerFromCharStream(this._charStreams); this._lexer = this.createLexerFromCharStream(this._charStreams);
this._lexer.removeErrorListeners(); this._lexer.removeErrorListeners();
@ -154,7 +155,7 @@ export default abstract class BasicParser<
this._tokenStream.fill(); this._tokenStream.fill();
this._parser = this.createParserFromTokenStream(this._tokenStream); this._parser = this.createParserFromTokenStream(this._tokenStream);
this._parser.buildParseTree = true; this._parser.buildParseTrees = true;
this._parser.errorHandler = new ErrorStrategy(); this._parser.errorHandler = new ErrorStrategy();
return this._parser; return this._parser;
@ -239,13 +240,13 @@ export default abstract class BasicParser<
const res = splitListener.statementsContext.map((context) => { const res = splitListener.statementsContext.map((context) => {
const { start, stop } = context; const { start, stop } = context;
return { return {
startIndex: start.startIndex, startIndex: start.start,
endIndex: stop.stopIndex, endIndex: stop.stop,
startLine: start.line, startLine: start.line,
endLine: stop.line, endLine: stop.line,
startColumn: start.charPositionInLine + 1, startColumn: start.column + 1,
endColumn: stop.charPositionInLine + 1 + stop.text.length, endColumn: stop.column + 1 + stop.text.length,
text: this._parsedInput.slice(start.startIndex, stop.stopIndex + 1), text: this._parsedInput.slice(start.start, stop.stop + 1),
}; };
}); });
@ -317,8 +318,8 @@ export default abstract class BasicParser<
} }
// A boundary consisting of the index of the input. // A boundary consisting of the index of the input.
const startIndex = startStatement?.start?.startIndex ?? 0; const startIndex = startStatement?.start?.start ?? 0;
const stopIndex = stopStatement?.stop?.stopIndex ?? input.length - 1; const stopIndex = stopStatement?.stop?.stop ?? input.length - 1;
/** /**
* Save offset of the tokenIndex in the range of input * Save offset of the tokenIndex in the range of input
@ -340,7 +341,7 @@ export default abstract class BasicParser<
const parser = this.createParserFromTokenStream(tokenStream); const parser = this.createParserFromTokenStream(tokenStream);
parser.removeErrorListeners(); parser.removeErrorListeners();
parser.buildParseTree = true; parser.buildParseTrees = true;
parser.errorHandler = new ErrorStrategy(); parser.errorHandler = new ErrorStrategy();
sqlParserIns = parser; sqlParserIns = parser;
@ -362,12 +363,12 @@ export default abstract class BasicParser<
(syntaxCtx) => { (syntaxCtx) => {
const wordRanges: WordRange[] = syntaxCtx.wordRanges.map((token) => { const wordRanges: WordRange[] = syntaxCtx.wordRanges.map((token) => {
return { return {
text: this._parsedInput.slice(token.startIndex, token.stopIndex + 1), text: this._parsedInput.slice(token.start, token.stop + 1),
startIndex: token.startIndex, startIndex: token.start,
endIndex: token.stopIndex, endIndex: token.stop,
line: token.line, line: token.line,
startColumn: token.charPositionInLine + 1, startColumn: token.column + 1,
stopColumn: token.charPositionInLine + 1 + token.text.length, stopColumn: token.column + 1 + token.text.length,
}; };
}); });
return { return {

View File

@ -1,10 +1,12 @@
import { DefaultErrorStrategy } from 'antlr4ts/DefaultErrorStrategy'; import {
import { Parser } from 'antlr4ts/Parser'; DefaultErrorStrategy,
import { InputMismatchException } from 'antlr4ts/InputMismatchException'; Parser,
import { IntervalSet } from 'antlr4ts/misc/IntervalSet'; InputMismatchException,
import { ParserRuleContext } from 'antlr4ts/ParserRuleContext'; IntervalSet,
import { RecognitionException } from 'antlr4ts/RecognitionException'; ParserRuleContext,
import { Token } from 'antlr4ts/Token'; RecognitionException,
Token,
} from 'antlr4ng';
/** /**
* Base on DefaultErrorStrategy. * Base on DefaultErrorStrategy.
@ -33,7 +35,7 @@ export class ErrorStrategy extends DefaultErrorStrategy {
if (!this.lastErrorStates) { if (!this.lastErrorStates) {
this.lastErrorStates = new IntervalSet(); this.lastErrorStates = new IntervalSet();
} }
this.lastErrorStates.add(recognizer.state); this.lastErrorStates.addOne(recognizer.state);
let followSet: IntervalSet = this.getErrorRecoverySet(recognizer); let followSet: IntervalSet = this.getErrorRecoverySet(recognizer);
this.consumeUntil(recognizer, followSet); this.consumeUntil(recognizer, followSet);
} }
@ -43,11 +45,7 @@ export class ErrorStrategy extends DefaultErrorStrategy {
if (this.nextTokensContext === undefined) { if (this.nextTokensContext === undefined) {
e = new InputMismatchException(recognizer); e = new InputMismatchException(recognizer);
} else { } else {
e = new InputMismatchException( e = new InputMismatchException(recognizer);
recognizer,
this.nextTokensState,
this.nextTokensContext
);
} }
// Mark the context as an anomaly // Mark the context as an anomaly

View File

@ -1,5 +1,10 @@
import { Token, Recognizer, ANTLRErrorListener, RecognitionException } from 'antlr4ts'; import {
import { ATNSimulator } from 'antlr4ts/atn/ATNSimulator'; Token,
Recognizer,
ANTLRErrorListener,
RecognitionException,
ATNSimulator,
} from 'antlr4ng';
/** /**
* Converted from {@link SyntaxError}. * Converted from {@link SyntaxError}.
@ -20,7 +25,7 @@ export interface ParseError {
* The type of error resulting from lexical parsing and parsing. * The type of error resulting from lexical parsing and parsing.
*/ */
export interface SyntaxError<T> { export interface SyntaxError<T> {
readonly recognizer: Recognizer<T, ATNSimulator>; readonly recognizer: Recognizer<ATNSimulator>;
readonly offendingSymbol: Token; readonly offendingSymbol: Token;
readonly line: number; readonly line: number;
readonly charPositionInLine: number; readonly charPositionInLine: number;
@ -34,15 +39,21 @@ export interface SyntaxError<T> {
*/ */
export type ErrorListener<T> = (parseError: ParseError, originalError: SyntaxError<T>) => void; export type ErrorListener<T> = (parseError: ParseError, originalError: SyntaxError<T>) => void;
export default class ParseErrorListener implements ANTLRErrorListener<Token> { export default class ParseErrorListener implements ANTLRErrorListener {
private _errorListener: ErrorListener<Token>; private _errorListener: ErrorListener<Token>;
constructor(errorListener: ErrorListener<Token>) { constructor(errorListener: ErrorListener<Token>) {
this._errorListener = errorListener; this._errorListener = errorListener;
} }
reportAmbiguity() {}
reportAttemptingFullContext() {}
reportContextSensitivity() {}
syntaxError( syntaxError(
recognizer: Recognizer<Token, ATNSimulator>, recognizer: Recognizer<ATNSimulator>,
offendingSymbol, offendingSymbol,
line: number, line: number,
charPositionInLine: number, charPositionInLine: number,

View File

@ -1,4 +1,4 @@
import { Token } from 'antlr4ts'; import { Token } from 'antlr4ng';
import { CaretPosition } from '../basic-parser-types'; import { CaretPosition } from '../basic-parser-types';
/** /**
@ -15,15 +15,11 @@ export function findCaretTokenIndex(caretPosition: CaretPosition, allTokens: Tok
while (left <= right) { while (left <= right) {
const mid = left + ((right - left) >> 1); const mid = left + ((right - left) >> 1);
const token = allTokens[mid]; const token = allTokens[mid];
if ( if (token.line > caretLine || (token.line === caretLine && token.column + 1 >= caretCol)) {
token.line > caretLine ||
(token.line === caretLine && token.charPositionInLine + 1 >= caretCol)
) {
right = mid - 1; right = mid - 1;
} else if ( } else if (
token.line < caretLine || token.line < caretLine ||
(token.line === caretLine && (token.line === caretLine && token.column + token.text.length + 1 < caretCol)
token.charPositionInLine + token.text.length + 1 < caretCol)
) { ) {
left = mid + 1; left = mid + 1;
} else { } else {

View File

@ -1,4 +1,4 @@
import { Token } from 'antlr4ts'; import { Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3'; import { CandidatesCollection } from 'antlr4-c3';
import { FlinkSqlLexer } from '../lib/flinksql/FlinkSqlLexer'; import { FlinkSqlLexer } from '../lib/flinksql/FlinkSqlLexer';
import { import {
@ -139,7 +139,10 @@ export class FlinkSqlSplitListener implements FlinkSqlParserListener {
this._statementsContext.push(ctx); this._statementsContext.push(ctx);
}; };
enterSingleStatement = (ctx: SingleStatementContext) => {}; visitTerminal() {}
visitErrorNode() {}
enterEveryRule() {}
exitEveryRule() {}
get statementsContext() { get statementsContext() {
return this._statementsContext; return this._statementsContext;

View File

@ -1,4 +1,4 @@
import { Token } from 'antlr4ts'; import { Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3'; import { CandidatesCollection } from 'antlr4-c3';
import { HiveSqlLexer } from '../lib/hive/HiveSqlLexer'; import { HiveSqlLexer } from '../lib/hive/HiveSqlLexer';
import { HiveSqlParser, ProgramContext, StatementContext } from '../lib/hive/HiveSqlParser'; import { HiveSqlParser, ProgramContext, StatementContext } from '../lib/hive/HiveSqlParser';
@ -130,7 +130,10 @@ export class HiveSqlSplitListener implements HiveSqlParserListener {
this._statementContext.push(ctx); this._statementContext.push(ctx);
}; };
enterStatement = (ctx: StatementContext) => {}; visitTerminal() {}
visitErrorNode() {}
enterEveryRule() {}
exitEveryRule() {}
get statementsContext() { get statementsContext() {
return this._statementContext; return this._statementContext;

View File

@ -1,4 +1,4 @@
import { Token } from 'antlr4ts'; import { Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3'; import { CandidatesCollection } from 'antlr4-c3';
import { ImpalaSqlLexer } from '../lib/impala/ImpalaSqlLexer'; import { ImpalaSqlLexer } from '../lib/impala/ImpalaSqlLexer';
import { import {
@ -135,7 +135,10 @@ export class ImpalaSqlSplitListener implements ImpalaSqlParserListener {
this._statementContext.push(ctx); this._statementContext.push(ctx);
}; };
enterSingleStatement = (ctx: SingleStatementContext) => {}; visitTerminal() {}
visitErrorNode() {}
enterEveryRule() {}
exitEveryRule() {}
get statementsContext() { get statementsContext() {
return this._statementContext; return this._statementContext;

View File

@ -1,4 +1,4 @@
import { Token } from 'antlr4ts'; import { Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3'; import { CandidatesCollection } from 'antlr4-c3';
import { MySqlLexer } from '../lib/mysql/MySqlLexer'; import { MySqlLexer } from '../lib/mysql/MySqlLexer';
import { MySqlParser, ProgramContext, SingleStatementContext } from '../lib/mysql/MySqlParser'; import { MySqlParser, ProgramContext, SingleStatementContext } from '../lib/mysql/MySqlParser';
@ -130,7 +130,10 @@ export class MysqlSplitListener implements MySqlParserListener {
this._statementsContext.push(ctx); this._statementsContext.push(ctx);
}; };
enterSingleStatement = (ctx: SingleStatementContext) => {}; visitTerminal() {}
visitErrorNode() {}
enterEveryRule() {}
exitEveryRule() {}
get statementsContext() { get statementsContext() {
return this._statementsContext; return this._statementsContext;

View File

@ -1,4 +1,4 @@
import { Token } from 'antlr4ts'; import { Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3'; import { CandidatesCollection } from 'antlr4-c3';
import { PostgreSQLLexer } from '../lib/pgsql/PostgreSQLLexer'; import { PostgreSQLLexer } from '../lib/pgsql/PostgreSQLLexer';
import { PostgreSQLParser, ProgramContext, SingleStmtContext } from '../lib/pgsql/PostgreSQLParser'; import { PostgreSQLParser, ProgramContext, SingleStmtContext } from '../lib/pgsql/PostgreSQLParser';
@ -152,7 +152,10 @@ export class PgSqlSplitListener implements PostgreSQLParserListener {
this._statementsContext.push(ctx); this._statementsContext.push(ctx);
}; };
enterSingleStmt = (ctx: SingleStmtContext) => {}; visitTerminal() {}
visitErrorNode() {}
enterEveryRule() {}
exitEveryRule() {}
get statementsContext() { get statementsContext() {
return this._statementsContext; return this._statementsContext;

View File

@ -1,4 +1,4 @@
import { Token } from 'antlr4ts'; import { Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3'; import { CandidatesCollection } from 'antlr4-c3';
import { PlSqlLexer } from '../lib/plsql/PlSqlLexer'; import { PlSqlLexer } from '../lib/plsql/PlSqlLexer';
import { PlSqlParser, ProgramContext } from '../lib/plsql/PlSqlParser'; import { PlSqlParser, ProgramContext } from '../lib/plsql/PlSqlParser';

View File

@ -1,4 +1,4 @@
import { Token } from 'antlr4ts'; import { Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3'; import { CandidatesCollection } from 'antlr4-c3';
import { SparkSqlLexer } from '../lib/spark/SparkSqlLexer'; import { SparkSqlLexer } from '../lib/spark/SparkSqlLexer';
import { import {
@ -135,7 +135,10 @@ export class SparkSqlSplitListener implements SparkSqlParserListener {
this._statementsContext.push(ctx); this._statementsContext.push(ctx);
}; };
enterSingleStatement = (ctx: SingleStatementContext) => {}; visitTerminal() {}
visitErrorNode() {}
enterEveryRule() {}
exitEveryRule() {}
get statementsContext() { get statementsContext() {
return this._statementsContext; return this._statementsContext;

View File

@ -1,4 +1,4 @@
import { Token } from 'antlr4ts'; import { Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3'; import { CandidatesCollection } from 'antlr4-c3';
import { TrinoSqlLexer } from '../lib/trinosql/TrinoSqlLexer'; import { TrinoSqlLexer } from '../lib/trinosql/TrinoSqlLexer';
import { import {
@ -135,6 +135,11 @@ export class TrinoSqlSplitListener implements TrinoSqlListener {
this._statementsContext.push(ctx); this._statementsContext.push(ctx);
}; };
visitTerminal() {}
visitErrorNode() {}
enterEveryRule() {}
exitEveryRule() {}
get statementsContext() { get statementsContext() {
return this._statementsContext; return this._statementsContext;
} }

View File

@ -1,7 +1,7 @@
import FlinkSQL from 'src/parser/flinksql'; import FlinkSQL from 'src/parser/flinksql';
import { FlinkSqlLexer } from 'src/lib/flinksql/FlinkSqlLexer'; import { FlinkSqlLexer } from 'src/lib/flinksql/FlinkSqlLexer';
import { ErrorListener } from 'src/parser/common/parseErrorListener'; import { ErrorListener } from 'src/parser/common/parseErrorListener';
import { CommonTokenStream } from 'antlr4ts'; import { CommonTokenStream } from 'antlr4ng';
describe('BasicParser unit tests', () => { describe('BasicParser unit tests', () => {
const flinkParser = new FlinkSQL(); const flinkParser = new FlinkSQL();

View File

@ -50,7 +50,7 @@ export function benchmark(name: string, fn: Function, times: number = 1): [numbe
const msg = `Benchmark: ${name} executed ${times} times. Total time: ${totalTime.toFixed( const msg = `Benchmark: ${name} executed ${times} times. Total time: ${totalTime.toFixed(
2 2
)}ms. Average time: ${averageTime.toFixed(2)}ms`; )}ms. Average time: ${averageTime.toFixed(2)}ms`;
console.log(msg); console.info(msg);
return [totalTime, averageTime, msg]; return [totalTime, averageTime, msg];
} }

View File

@ -19,9 +19,9 @@ describe('FlinkSQL ErrorStrategy test', () => {
const statementCount = splitListener.statementsContext.length; const statementCount = splitListener.statementsContext.length;
splitListener.statementsContext.map((item, index) => { splitListener.statementsContext.map((item, index) => {
if (index !== statementCount - 1 && index !== statementCount - 2) { if (index !== statementCount - 1 && index !== statementCount - 2) {
expect(item.exception).not.toBe(undefined); expect(item.exception).not.toBe(null);
} else { } else {
expect(item.exception).toBe(undefined); expect(item.exception).toBe(null);
} }
}); });
}); });
@ -36,9 +36,9 @@ describe('FlinkSQL ErrorStrategy test', () => {
const statementCount = splitListener.statementsContext.length; const statementCount = splitListener.statementsContext.length;
splitListener.statementsContext.map((item, index) => { splitListener.statementsContext.map((item, index) => {
if (index !== statementCount - 1 && index !== 0) { if (index !== statementCount - 1 && index !== 0) {
expect(item.exception).not.toBe(undefined); expect(item.exception).not.toBe(null);
} else { } else {
expect(item.exception).toBe(undefined); expect(item.exception).toBe(null);
} }
}); });
}); });
@ -52,9 +52,9 @@ describe('FlinkSQL ErrorStrategy test', () => {
splitListener.statementsContext.map((item, index) => { splitListener.statementsContext.map((item, index) => {
if (index !== 0 && index !== 1) { if (index !== 0 && index !== 1) {
expect(item.exception).not.toBe(undefined); expect(item.exception).not.toBe(null);
} else { } else {
expect(item.exception).toBe(undefined); expect(item.exception).toBe(null);
} }
}); });
}); });

View File

@ -1,4 +1,4 @@
import { ParseTreeListener } from 'antlr4ts/tree/ParseTreeListener'; import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from 'antlr4ng';
import FlinkSQL from 'src/parser/flinksql'; import FlinkSQL from 'src/parser/flinksql';
import { FlinkSqlParserListener } from 'src/lib/flinksql/FlinkSqlParserListener'; import { FlinkSqlParserListener } from 'src/lib/flinksql/FlinkSqlParserListener';
import { TableExpressionContext } from 'src/lib/flinksql/FlinkSqlParser'; import { TableExpressionContext } from 'src/lib/flinksql/FlinkSqlParser';
@ -14,12 +14,16 @@ describe('Flink SQL Listener Tests', () => {
let result = ''; let result = '';
class MyListener implements FlinkSqlParserListener { class MyListener implements FlinkSqlParserListener {
enterTableExpression = (ctx: TableExpressionContext): void => { enterTableExpression = (ctx: TableExpressionContext): void => {
result = ctx.text.toLowerCase(); result = ctx.getText().toLowerCase();
}; };
visitTerminal(node: TerminalNode): void {}
visitErrorNode(node: ErrorNode): void {}
enterEveryRule(node: ParserRuleContext): void {}
exitEveryRule(node: ParserRuleContext): void {}
} }
const listenTableName = new MyListener(); const listenTableName = new MyListener();
await parser.listen(listenTableName as ParseTreeListener, parseTree); await parser.listen(listenTableName, parseTree);
expect(result).toBe(expectTableName); expect(result).toBe(expectTableName);
}); });

View File

@ -1,6 +1,7 @@
import FlinkSQL from 'src/parser/flinksql'; import FlinkSQL from 'src/parser/flinksql';
import { FlinkSqlParserVisitor } from 'src/lib/flinksql/FlinkSqlParserVisitor'; import { FlinkSqlParserVisitor } from 'src/lib/flinksql/FlinkSqlParserVisitor';
import { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor'; import { AbstractParseTreeVisitor } from 'antlr4ng';
import { TableExpressionContext } from 'src/lib/flinksql/FlinkSqlParser';
describe('Flink SQL Visitor Tests', () => { describe('Flink SQL Visitor Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -8,7 +9,7 @@ describe('Flink SQL Visitor Tests', () => {
const parser = new FlinkSQL(); const parser = new FlinkSQL();
const parseTree = parser.parse(sql, (error) => { const parseTree = parser.parse(sql, (error) => {
console.log('Parse error:', error); console.error('Parse error:', error);
}); });
test('Visitor visitTableName', () => { test('Visitor visitTableName', () => {
@ -20,9 +21,9 @@ describe('Flink SQL Visitor Tests', () => {
protected defaultResult() { protected defaultResult() {
return result; return result;
} }
visitTableExpression = (ctx): void => { visitTableExpression(ctx: TableExpressionContext) {
result = ctx.text.toLowerCase(); result = ctx.getText().toLowerCase();
}; }
} }
const visitor: any = new MyVisitor(); const visitor: any = new MyVisitor();
visitor.visit(parseTree); visitor.visit(parseTree);

View File

@ -19,9 +19,9 @@ describe('HiveSQL ErrorStrategy test', () => {
const statementCount = splitListener.statementsContext.length; const statementCount = splitListener.statementsContext.length;
splitListener.statementsContext.map((item, index) => { splitListener.statementsContext.map((item, index) => {
if (index !== statementCount - 1 && index !== statementCount - 2) { if (index !== statementCount - 1 && index !== statementCount - 2) {
expect(item.exception).not.toBe(undefined); expect(item.exception).not.toBe(null);
} else { } else {
expect(item.exception).toBe(undefined); expect(item.exception).toBe(null);
} }
}); });
}); });
@ -36,9 +36,9 @@ describe('HiveSQL ErrorStrategy test', () => {
const statementCount = splitListener.statementsContext.length; const statementCount = splitListener.statementsContext.length;
splitListener.statementsContext.map((item, index) => { splitListener.statementsContext.map((item, index) => {
if (index !== statementCount - 1 && index !== 0) { if (index !== statementCount - 1 && index !== 0) {
expect(item.exception).not.toBe(undefined); expect(item.exception).not.toBe(null);
} else { } else {
expect(item.exception).toBe(undefined); expect(item.exception).toBe(null);
} }
}); });
}); });
@ -52,9 +52,9 @@ describe('HiveSQL ErrorStrategy test', () => {
splitListener.statementsContext.map((item, index) => { splitListener.statementsContext.map((item, index) => {
if (index !== 0 && index !== 1) { if (index !== 0 && index !== 1) {
expect(item.exception).not.toBe(undefined); expect(item.exception).not.toBe(null);
} else { } else {
expect(item.exception).toBe(undefined); expect(item.exception).toBe(null);
} }
}); });
}); });

View File

@ -1,6 +1,6 @@
import { ParseTreeListener } from 'antlr4ts/tree/ParseTreeListener'; import { ParseTreeListener } from 'antlr4ng';
import HiveSQL from 'src/parser/hive'; import HiveSQL from 'src/parser/hive';
import { ProgramContext } from 'src/lib/hive/HiveSqlParser'; import { ProgramContext, SelectItemContext } from 'src/lib/hive/HiveSqlParser';
import { HiveSqlParserListener } from 'src/lib/hive/HiveSqlParserListener'; import { HiveSqlParserListener } from 'src/lib/hive/HiveSqlParserListener';
describe('HiveSQL Listener Tests', () => { describe('HiveSQL Listener Tests', () => {
@ -12,14 +12,18 @@ describe('HiveSQL Listener Tests', () => {
let result = ''; let result = '';
class MyListener implements HiveSqlParserListener { class MyListener implements HiveSqlParserListener {
enterSelectItem(ctx) { enterSelectItem(ctx: SelectItemContext) {
result = ctx.text; result = ctx.getText();
} }
visitTerminal() {}
visitErrorNode() {}
enterEveryRule() {}
exitEveryRule() {}
} }
const listenTableName = new MyListener(); const listenTableName = new MyListener();
await parser.listen(listenTableName as ParseTreeListener, parseTree as ProgramContext); await parser.listen(listenTableName as ParseTreeListener, parseTree as ProgramContext);
expect(result).toBe(expectTableName.toUpperCase()); expect(result).toBe(expectTableName);
}); });
test('Listener enterCreateTable', async () => { test('Listener enterCreateTable', async () => {
const sql = `drop table table_name;`; const sql = `drop table table_name;`;
@ -27,13 +31,18 @@ describe('HiveSQL Listener Tests', () => {
let result = ''; let result = '';
class MyListener implements HiveSqlParserListener { class MyListener implements HiveSqlParserListener {
enterDropTableStatement(ctx) { enterDropTableStatement(ctx) {
result = ctx.text; result = ctx.getText();
} }
visitTerminal() {}
visitErrorNode() {}
enterEveryRule() {}
exitEveryRule() {}
} }
const listenTableName = new MyListener(); const listenTableName = new MyListener();
await parser.listen(listenTableName as ParseTreeListener, parseTree as ProgramContext); await parser.listen(listenTableName as ParseTreeListener, parseTree as ProgramContext);
expect(result).toBe('DROPTABLETABLE_NAME'); expect(result).toBe('droptabletable_name');
}); });
test('Split sql listener', async () => { test('Split sql listener', async () => {

View File

@ -1,8 +1,8 @@
import { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor'; import { AbstractParseTreeVisitor } from 'antlr4ng';
import HiveSQL from 'src/parser/hive'; import HiveSQL from 'src/parser/hive';
import { HiveSqlParserVisitor } from 'src/lib/hive/HiveSqlParserVisitor'; import { HiveSqlParserVisitor } from 'src/lib/hive/HiveSqlParserVisitor';
import { ProgramContext } from 'src/lib/hive/HiveSqlParser'; import { ProgramContext, TableNameContext } from 'src/lib/hive/HiveSqlParser';
describe('HiveSQL Visitor Tests', () => { describe('HiveSQL Visitor Tests', () => {
const expectTableName = 'dm_gis.dlv_addr_tc_count'; const expectTableName = 'dm_gis.dlv_addr_tc_count';
@ -10,7 +10,7 @@ describe('HiveSQL Visitor Tests', () => {
const parser = new HiveSQL(); const parser = new HiveSQL();
const parseTree = parser.parse(sql, (error) => { const parseTree = parser.parse(sql, (error) => {
console.log('Parse error:', error); console.error('Parse error:', error);
}); });
test('Visitor visitTableName', () => { test('Visitor visitTableName', () => {
@ -20,8 +20,8 @@ describe('HiveSQL Visitor Tests', () => {
return result; return result;
} }
visitTableName(ctx) { visitTableName(ctx: TableNameContext) {
result = ctx.text.toLowerCase(); result = ctx.getText().toLowerCase();
} }
} }

View File

@ -19,9 +19,9 @@ describe('ImpalaSQL ErrorStrategy test', () => {
const statementCount = splitListener.statementsContext.length; const statementCount = splitListener.statementsContext.length;
splitListener.statementsContext.map((item, index) => { splitListener.statementsContext.map((item, index) => {
if (index !== statementCount - 1 && index !== statementCount - 2) { if (index !== statementCount - 1 && index !== statementCount - 2) {
expect(item.exception).not.toBe(undefined); expect(item.exception).not.toBe(null);
} else { } else {
expect(item.exception).toBe(undefined); expect(item.exception).toBe(null);
} }
}); });
}); });
@ -36,9 +36,9 @@ describe('ImpalaSQL ErrorStrategy test', () => {
const statementCount = splitListener.statementsContext.length; const statementCount = splitListener.statementsContext.length;
splitListener.statementsContext.map((item, index) => { splitListener.statementsContext.map((item, index) => {
if (index !== statementCount - 1 && index !== 0) { if (index !== statementCount - 1 && index !== 0) {
expect(item.exception).not.toBe(undefined); expect(item.exception).not.toBe(null);
} else { } else {
expect(item.exception).toBe(undefined); expect(item.exception).toBe(null);
} }
}); });
}); });
@ -52,9 +52,9 @@ describe('ImpalaSQL ErrorStrategy test', () => {
splitListener.statementsContext.map((item, index) => { splitListener.statementsContext.map((item, index) => {
if (index !== 0 && index !== 1) { if (index !== 0 && index !== 1) {
expect(item.exception).not.toBe(undefined); expect(item.exception).not.toBe(null);
} else { } else {
expect(item.exception).toBe(undefined); expect(item.exception).toBe(null);
} }
}); });
}); });

View File

@ -1,6 +1,6 @@
import ImpalaSQL from 'src/parser/impala'; import ImpalaSQL from 'src/parser/impala';
import { ImpalaSqlParserListener } from 'src/lib/impala/ImpalaSqlParserListener'; import { ImpalaSqlParserListener } from 'src/lib/impala/ImpalaSqlParserListener';
import { ParseTreeListener } from 'antlr4ts/tree/ParseTreeListener'; import { ParseTreeListener } from 'antlr4ng';
describe('impala SQL Listener Tests', () => { describe('impala SQL Listener Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -13,8 +13,13 @@ describe('impala SQL Listener Tests', () => {
let result = ''; let result = '';
class MyListener implements ImpalaSqlParserListener { class MyListener implements ImpalaSqlParserListener {
enterTableNamePath = (ctx): void => { enterTableNamePath = (ctx): void => {
result = ctx.text.toLowerCase(); result = ctx.getText().toLowerCase();
}; };
visitTerminal() {}
visitErrorNode() {}
enterEveryRule() {}
exitEveryRule() {}
} }
const listenTableName = new MyListener(); const listenTableName = new MyListener();

View File

@ -1,5 +1,5 @@
import ImpalaSQL from 'src/parser/impala'; import ImpalaSQL from 'src/parser/impala';
import { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor'; import { AbstractParseTreeVisitor } from 'antlr4ng';
import { ImpalaSqlParserVisitor } from 'src/lib/impala/ImpalaSqlParserVisitor'; import { ImpalaSqlParserVisitor } from 'src/lib/impala/ImpalaSqlParserVisitor';
describe('impala SQL Visitor Tests', () => { describe('impala SQL Visitor Tests', () => {
@ -8,7 +8,7 @@ describe('impala SQL Visitor Tests', () => {
const parser = new ImpalaSQL(); const parser = new ImpalaSQL();
const parseTree = parser.parse(sql, (error) => { const parseTree = parser.parse(sql, (error) => {
console.log('Parse error:', error); console.error('Parse error:', error);
}); });
test('Visitor visitTableNamePath', () => { test('Visitor visitTableNamePath', () => {
@ -21,7 +21,7 @@ describe('impala SQL Visitor Tests', () => {
return result; return result;
} }
visitTableNamePath = (ctx): void => { visitTableNamePath = (ctx): void => {
result = ctx.text.toLowerCase(); result = ctx.getText().toLowerCase();
}; };
} }
const visitor: any = new MyVisitor(); const visitor: any = new MyVisitor();

View File

@ -19,9 +19,9 @@ describe('MySQL ErrorStrategy test', () => {
const statementCount = splitListener.statementsContext.length; const statementCount = splitListener.statementsContext.length;
splitListener.statementsContext.map((item, index) => { splitListener.statementsContext.map((item, index) => {
if (index !== statementCount - 1 && index !== statementCount - 2) { if (index !== statementCount - 1 && index !== statementCount - 2) {
expect(item.exception).not.toBe(undefined); expect(item.exception).not.toBe(null);
} else { } else {
expect(item.exception).toBe(undefined); expect(item.exception).toBe(null);
} }
}); });
}); });
@ -36,9 +36,9 @@ describe('MySQL ErrorStrategy test', () => {
const statementCount = splitListener.statementsContext.length; const statementCount = splitListener.statementsContext.length;
splitListener.statementsContext.map((item, index) => { splitListener.statementsContext.map((item, index) => {
if (index !== statementCount - 1 && index !== 0) { if (index !== statementCount - 1 && index !== 0) {
expect(item.exception).not.toBe(undefined); expect(item.exception).not.toBe(null);
} else { } else {
expect(item.exception).toBe(undefined); expect(item.exception).toBe(null);
} }
}); });
}); });
@ -52,9 +52,9 @@ describe('MySQL ErrorStrategy test', () => {
splitListener.statementsContext.map((item, index) => { splitListener.statementsContext.map((item, index) => {
if (index !== 0 && index !== 1) { if (index !== 0 && index !== 1) {
expect(item.exception).not.toBe(undefined); expect(item.exception).not.toBe(null);
} else { } else {
expect(item.exception).toBe(undefined); expect(item.exception).toBe(null);
} }
}); });
}); });

View File

@ -1,6 +1,6 @@
import MySQL from 'src/parser/mysql'; import MySQL from 'src/parser/mysql';
import { MySqlParserListener } from 'src/lib/mysql/MySqlParserListener'; import { MySqlParserListener } from 'src/lib/mysql/MySqlParserListener';
import { ParseTreeListener } from 'antlr4ts/tree/ParseTreeListener'; import { ParseTreeListener } from 'antlr4ng';
describe('MySQL Listener Tests', () => { describe('MySQL Listener Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -13,8 +13,12 @@ describe('MySQL Listener Tests', () => {
let result = ''; let result = '';
class MyListener implements MySqlParserListener { class MyListener implements MySqlParserListener {
enterTableName = (ctx): void => { enterTableName = (ctx): void => {
result = ctx.text.toLowerCase(); result = ctx.getText().toLowerCase();
}; };
visitTerminal() {}
visitErrorNode() {}
enterEveryRule() {}
exitEveryRule() {}
} }
const listenTableName: any = new MyListener(); const listenTableName: any = new MyListener();

View File

@ -42,7 +42,7 @@ describe('MySQL Database Administration Syntax Tests', () => {
it(sql, () => { it(sql, () => {
const result = parser.validate(sql); const result = parser.validate(sql);
if (result.length) { if (result.length) {
console.log(result, `\nPlease check sql: ${sql}`); console.error(result, `\nPlease check sql: ${sql}`);
} }
expect(result.length).toBe(0); expect(result.length).toBe(0);
}); });

View File

@ -49,7 +49,7 @@ describe('MySQL DDL Syntax Tests', () => {
it(sql, () => { it(sql, () => {
const result = parser.validate(sql); const result = parser.validate(sql);
if (result.length) { if (result.length) {
console.log(result, `\nPlease check sql: ${sql}`); console.error(result, `\nPlease check sql: ${sql}`);
} }
expect(result.length).toBe(0); expect(result.length).toBe(0);
}); });

View File

@ -33,7 +33,7 @@ describe('MySQL DML Syntax Tests', () => {
it(sql, () => { it(sql, () => {
const result = parser.validate(sql); const result = parser.validate(sql);
if (result.length) { if (result.length) {
console.log(result, `\nPlease check sql: ${sql}`); console.error(result, `\nPlease check sql: ${sql}`);
} }
expect(result.length).toBe(0); expect(result.length).toBe(0);
}); });

View File

@ -27,7 +27,7 @@ describe('MySQL Transactional and Locking, Replication, Prepared Compound and Ut
it(sql, () => { it(sql, () => {
const result = parser.validate(sql); const result = parser.validate(sql);
if (result.length) { if (result.length) {
console.log(result, `\nPlease check sql: ${sql}`); console.error(result, `\nPlease check sql: ${sql}`);
} }
expect(result.length).toBe(0); expect(result.length).toBe(0);
}); });

View File

@ -1,6 +1,6 @@
import MySQL from 'src/parser/mysql'; import MySQL from 'src/parser/mysql';
import { MySqlParserVisitor } from 'src/lib/mysql/MySqlParserVisitor'; import { MySqlParserVisitor } from 'src/lib/mysql/MySqlParserVisitor';
import { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor'; import { AbstractParseTreeVisitor } from 'antlr4ng';
describe('MySQL Visitor Tests', () => { describe('MySQL Visitor Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -8,7 +8,7 @@ describe('MySQL Visitor Tests', () => {
const parser = new MySQL(); const parser = new MySQL();
const parseTree = parser.parse(sql, (error) => { const parseTree = parser.parse(sql, (error) => {
console.log('Parse error:', error); console.error('Parse error:', error);
}); });
test('Visitor visitTableName', () => { test('Visitor visitTableName', () => {
@ -19,7 +19,7 @@ describe('MySQL Visitor Tests', () => {
} }
visitTableName = (ctx): void => { visitTableName = (ctx): void => {
result = ctx.text.toLowerCase(); result = ctx.getText().toLowerCase();
}; };
} }
const visitor = new MyVisitor(); const visitor = new MyVisitor();

View File

@ -21,9 +21,9 @@ describe('PgSQL ErrorStrategy test', () => {
// const statementCount = splitListener.statementsContext.length; // const statementCount = splitListener.statementsContext.length;
// splitListener.statementsContext.map((item, index) => { // splitListener.statementsContext.map((item, index) => {
// if(index !== statementCount-1 && index !== statementCount - 2) { // if(index !== statementCount-1 && index !== statementCount - 2) {
// expect(item.exception).not.toBe(undefined); // expect(item.exception).not.toBe(null);
// } else { // } else {
// expect(item.exception).toBe(undefined); // expect(item.exception).toBe(null);
// } // }
// }) // })
// }); // });
@ -38,9 +38,9 @@ describe('PgSQL ErrorStrategy test', () => {
const statementCount = splitListener.statementsContext.length; const statementCount = splitListener.statementsContext.length;
splitListener.statementsContext.map((item, index) => { splitListener.statementsContext.map((item, index) => {
if (index !== statementCount - 1 && index !== 0) { if (index !== statementCount - 1 && index !== 0) {
expect(item.exception).not.toBe(undefined); expect(item.exception).not.toBe(null);
} else { } else {
expect(item.exception).toBe(undefined); expect(item.exception).toBe(null);
} }
}); });
}); });
@ -54,9 +54,9 @@ describe('PgSQL ErrorStrategy test', () => {
splitListener.statementsContext.map((item, index) => { splitListener.statementsContext.map((item, index) => {
if (index !== 0 && index !== 1) { if (index !== 0 && index !== 1) {
expect(item.exception).not.toBe(undefined); expect(item.exception).not.toBe(null);
} else { } else {
expect(item.exception).toBe(undefined); expect(item.exception).toBe(null);
} }
}); });
}); });

Some files were not shown because too many files have changed in this diff Show More