feat: upgrade antlr4 to 4.12.0 (#88)

This commit is contained in:
Ziv 2023-05-04 10:13:05 +08:00 committed by GitHub
parent c0842b3e07
commit c1c72def30
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
116 changed files with 552721 additions and 609942 deletions

View File

@ -9,7 +9,7 @@ jobs:
strategy: strategy:
matrix: matrix:
node-version: [12.x, 14.x] node-version: [16.x]
steps: steps:
- uses: actions/checkout@v1 - uses: actions/checkout@v1
@ -17,10 +17,11 @@ jobs:
uses: actions/setup-node@v1 uses: actions/setup-node@v1
with: with:
node-version: ${{ matrix.node-version }} node-version: ${{ matrix.node-version }}
- name: npm install, test, and build - name: install, test, build
run: | run: |
npm install export NODE_OPTIONS="--max_old_space_size=4096"
npm test yarn install
npm run build yarn test
yarn build
env: env:
CI: true CI: true

View File

@ -1,10 +1,33 @@
# dt-sql-parser # dt-sql-parser
## Get Start
## Prerequisites installing the dependencies after cloned project:
```bash
yarn install
```
- test
```bash
yarn test
```
## Compile the grammar sources
Compile one language:
```bash
yarn antlr4 --lang=generic
```
Compile all languages:
```bash
yarn antlr4 --all
```
## Branch Organization ## Branch Organization
## Source Code Organization ## Source Code Organization

Binary file not shown.

Binary file not shown.

View File

@ -1,7 +1,8 @@
const path = require('path'); const path = require('path');
const exec = require('child_process').exec; const exec = require('child_process').exec;
const argv = require('yargs-parser')(process.argv.slice(2))
const antlr4 = path.resolve(__dirname, 'antlr-4.8-complete.jar'); const antlr4 = path.resolve(__dirname, './antlr-4.12.0-complete.jar');
const grammars = path.resolve(__dirname, '../src/grammar'); const grammars = path.resolve(__dirname, '../src/grammar');
const output = path.resolve(__dirname, '../src/lib'); const output = path.resolve(__dirname, '../src/lib');
@ -14,16 +15,16 @@ const entry = [
'flinksql', 'flinksql',
]; ];
entry.forEach((language) => { function compile(language) {
const cmd = ` const cmd = `
java -jar ${antlr4} java -jar ${antlr4}
-Dlanguage=JavaScript -Dlanguage=TypeScript
-visitor -visitor
-listener -listener
-o ${output}/${language} -o ${output}/${language}
${grammars}/${language}/*.g4 ${grammars}/${language}/*.g4
`.replace(/\n/g, ''); `.replace(/\n/g, '');
console.log('cmd:', cmd); console.info('Executing:', cmd);
exec(cmd, (err) => { exec(cmd, (err) => {
if (err) { if (err) {
console.error('Antlr4 build error: ' + language, err); console.error('Antlr4 build error: ' + language, err);
@ -31,5 +32,20 @@ entry.forEach((language) => {
console.log(`Build ${language} success.`); console.log(`Build ${language} success.`);
} }
}); });
}); }
if (argv.all) { // build all: yarn antlr4 --all
entry.forEach((language) => {
compile(language);
});
} else if (argv.lang) {// build single: yarn antlr4 --lang=generic
const supportedLanguage = entry.find((language) => language === argv.lang);
if (supportedLanguage) {
compile(argv.lang);
} else {
console.error('Unsupported language: ' + argv.lang);
}
} else {
console.error('Please to specify the language, just like: yarn antlr4 --lang flinksql');
}

View File

@ -1,183 +1,200 @@
// For a detailed explanation regarding each configuration property, visit: /*
// https://jestjs.io/docs/en/configuration.html * For a detailed explanation regarding each configuration property, visit:
* https://jestjs.io/docs/configuration
*/
module.exports = { module.exports = {
// All imported modules in your tests should be mocked automatically // All imported modules in your tests should be mocked automatically
// automock: false, // automock: false,
// Stop running tests after `n` failures // Stop running tests after `n` failures
// bail: 0, // bail: 0,
// Respect "browser" field in package.json when resolving modules // The directory where Jest should store its cached dependency information
// browser: false, // cacheDirectory: "/private/var/folders/70/21p94l8j6cd9vv9t990g8cj00000gn/T/jest_dx",
// The directory where Jest should store its cached dependency information // Automatically clear mock calls, instances, contexts and results before every test
// cacheDirectory: "/private/var/folders/xr/54w2mws93hj3p3_ysc347flc0000gn/T/jest_dx", clearMocks: true,
// Automatically clear mock calls and instances between every test // Indicates whether the coverage information should be collected while executing the test
// clearMocks: false, collectCoverage: true,
// Indicates whether the coverage information should be collected while executing the test // An array of glob patterns indicating a set of files for which coverage information should be collected
// collectCoverage: false, // collectCoverageFrom: undefined,
// An array of glob patterns indicating a set of files for which coverage information should be collected // The directory where Jest should output its coverage files
// collectCoverageFrom: null, coverageDirectory: "coverage",
// The directory where Jest should output its coverage files // An array of regexp pattern strings used to skip coverage collection
// coverageDirectory: null, // coveragePathIgnorePatterns: [
// "/node_modules/"
// ],
// An array of regexp pattern strings used to skip coverage collection // Indicates which provider should be used to instrument code for coverage
coveragePathIgnorePatterns: ['/node_modules/'], // coverageProvider: "babel",
// A list of reporter names that Jest uses when writing coverage reports // A list of reporter names that Jest uses when writing coverage reports
// coverageReporters: [ // coverageReporters: [
// "json", // "json",
// "text", // "text",
// "lcov", // "lcov",
// "clover" // "clover"
// ], // ],
// An object that configures minimum threshold enforcement for coverage results // An object that configures minimum threshold enforcement for coverage results
// coverageThreshold: null, // coverageThreshold: undefined,
// A path to a custom dependency extractor // A path to a custom dependency extractor
// dependencyExtractor: null, // dependencyExtractor: undefined,
// Make calling deprecated APIs throw helpful error messages // Make calling deprecated APIs throw helpful error messages
// errorOnDeprecated: false, // errorOnDeprecated: false,
// Force coverage collection from ignored files using an array of glob patterns // The default configuration for fake timers
// forceCoverageMatch: [], // fakeTimers: {
// "enableGlobally": false
// },
// A path to a module which exports an async function that is triggered once before all test suites // Force coverage collection from ignored files using an array of glob patterns
// globalSetup: null, // forceCoverageMatch: [],
// A path to a module which exports an async function that is triggered once after all test suites // A path to a module which exports an async function that is triggered once before all test suites
// globalTeardown: null, // globalSetup: undefined,
// A set of global variables that need to be available in all test environments // A path to a module which exports an async function that is triggered once after all test suites
globals: { // globalTeardown: undefined,
window: {},
},
// An array of directory names to be searched recursively up from the requiring module's location // A set of global variables that need to be available in all test environments
// moduleDirectories: [ // globals: {},
// "node_modules"
// ],
// An array of file extensions your modules use // The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
// moduleFileExtensions: [ // maxWorkers: "50%",
// "js",
// "json",
// "jsx",
// "ts",
// "tsx",
// "node"
// ],
// A map from regular expressions to module names that allow to stub out resources with a single module // An array of directory names to be searched recursively up from the requiring module's location
// moduleNameMapper: {}, // moduleDirectories: [
// "node_modules"
// ],
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader // An array of file extensions your modules use
// modulePathIgnorePatterns: [], moduleFileExtensions: [
"js",
"mjs",
"cjs",
"jsx",
"ts",
"tsx",
"json",
"node"
],
// Activates notifications for test results // A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
// notify: false, // moduleNameMapper: {},
// An enum that specifies notification mode. Requires { notify: true } // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
// notifyMode: "failure-change", // modulePathIgnorePatterns: [],
// A preset that is used as a base for Jest's configuration // Activates notifications for test results
// preset: null, // notify: false,
// Run tests from one or more projects // An enum that specifies notification mode. Requires { notify: true }
// projects: null, // notifyMode: "failure-change",
// Use this configuration option to add custom reporters to Jest // A preset that is used as a base for Jest's configuration
// reporters: undefined, preset: "ts-jest/presets/js-with-ts-esm",
// Automatically reset mock state between every test // Run tests from one or more projects
// resetMocks: false, // projects: undefined,
// Reset the module registry before running each individual test // Use this configuration option to add custom reporters to Jest
// resetModules: false, // reporters: undefined,
// A path to a custom resolver // Automatically reset mock state before every test
// resolver: null, // resetMocks: false,
// Automatically restore mock state between every test // Reset the module registry before running each individual test
// restoreMocks: false, // resetModules: false,
// The root directory that Jest should scan for tests and modules within // A path to a custom resolver
// rootDir: null, // resolver: undefined,
// A list of paths to directories that Jest should use to search for files in // Automatically restore mock state and implementation before every test
// roots: [ // restoreMocks: false,
// "<rootDir>"
// ],
// Allows you to use a custom runner instead of Jest's default test runner // The root directory that Jest should scan for tests and modules within
// runner: "jest-runner", // rootDir: undefined,
// The paths to modules that run some code to configure or set up the testing environment before each test // A list of paths to directories that Jest should use to search for files in
// setupFiles: [], // roots: [
// "<rootDir>"
// ],
// A list of paths to modules that run some code to configure or set up the testing framework before each test // Allows you to use a custom runner instead of Jest's default test runner
// setupFilesAfterEnv: [], // runner: "jest-runner",
// A list of paths to snapshot serializer modules Jest should use for snapshot testing // The paths to modules that run some code to configure or set up the testing environment before each test
// snapshotSerializers: [], // setupFiles: [],
// The test environment that will be used for testing // A list of paths to modules that run some code to configure or set up the testing framework before each test
testEnvironment: 'node', // setupFilesAfterEnv: [],
// Options that will be passed to the testEnvironment // The number of seconds after which a test is considered as slow and reported as such in the results.
// testEnvironmentOptions: {}, // slowTestThreshold: 5,
// Adds a location field to test results // A list of paths to snapshot serializer modules Jest should use for snapshot testing
// testLocationInResults: false, // snapshotSerializers: [],
// The glob patterns Jest uses to detect test files // The test environment that will be used for testing
// testMatch: [ testEnvironment: "node",
// "**/__tests__/**/*.[jt]s?(x)",
// "**/?(*.)+(spec|test).[tj]s?(x)"
// ],
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped // Options that will be passed to the testEnvironment
testPathIgnorePatterns: ['/node_modules/'], // testEnvironmentOptions: {},
// The regexp pattern or array of patterns that Jest uses to detect test files // Adds a location field to test results
// testRegex: [], // testLocationInResults: false,
// This option allows the use of a custom results processor // The glob patterns Jest uses to detect test files
// testResultsProcessor: null, testMatch: [
"**/__tests__/**/*.[jt]s?(x)",
"**/?(*.)+(spec|test).[tj]s?(x)"
],
// This option allows use of a custom test runner // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
// testRunner: "jasmine2", // testPathIgnorePatterns: [
// "/node_modules/"
// ],
// This option sets the URL for the jsdom environment. It is reflected in properties such as location.href // The regexp pattern or array of patterns that Jest uses to detect test files
// testURL: "http://localhost", // testRegex: [],
// Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout" // This option allows the use of a custom results processor
// timers: "real", // testResultsProcessor: undefined,
// A map from regular expressions to paths to transformers // This option allows use of a custom test runner
transform: { // testRunner: "jest-circus/runner",
'^.+\\.(t|j)sx?$': 'ts-jest',
},
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation // A map from regular expressions to paths to transformers
transformIgnorePatterns: ['/node_modules/'], transform: {
'\\.[jt]sx?$': 'ts-jest',
},
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
// unmockedModulePathPatterns: undefined, transformIgnorePatterns: [
'/node_modules/(?!antlr4)'
],
// Indicates whether each individual test should be reported during the run // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// verbose: null, // unmockedModulePathPatterns: undefined,
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode // Indicates whether each individual test should be reported during the run
// watchPathIgnorePatterns: [], // verbose: undefined,
// Whether to use watchman for file crawling // An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
// watchman: true, // watchPathIgnorePatterns: [],
// Whether to use watchman for file crawling
// watchman: true,
moduleNameMapper : {
'^antlr4$': '<rootDir>/node_modules/antlr4/src/antlr4/index.web.js',
}
}; };

View File

@ -19,21 +19,23 @@
"build": "rm -rf dist && tsc", "build": "rm -rf dist && tsc",
"eslint": "eslint ./src/**/*.ts", "eslint": "eslint ./src/**/*.ts",
"check-types": "tsc --skipLibCheck", "check-types": "tsc --skipLibCheck",
"test": "jest", "test": "NODE_OPTIONS=--max_old_space_size=4096 && jest",
"release": "npm run build && standard-version --infile CHANGELOG.md" "release": "npm run build && standard-version --infile CHANGELOG.md"
}, },
"author": "dt-insight-front", "author": "dt-insight-front",
"license": "MIT", "license": "MIT",
"devDependencies": { "devDependencies": {
"@types/jest": "^24.0.13", "@types/jest": "^29.5.1",
"@types/node": "^18.15.11",
"@typescript-eslint/eslint-plugin": "^3.10.1", "@typescript-eslint/eslint-plugin": "^3.10.1",
"@typescript-eslint/parser": "^3.10.1", "@typescript-eslint/parser": "^3.10.1",
"eslint": "^7.7.0", "eslint": "^7.32.0",
"eslint-config-google": "^0.14.0", "eslint-config-google": "^0.14.0",
"jest": "^24.8.0", "jest": "^29.5.0",
"ts-jest": "^24.1.0", "standard-version": "^9.5.0",
"typescript": "^4.9.4", "ts-jest": "^29.1.0",
"standard-version": "^9.1.0" "typescript": "^5.0.4",
"yargs-parser": "^21.1.1"
}, },
"git repository": "https://github.com/DTStack/dt-sql-parser", "git repository": "https://github.com/DTStack/dt-sql-parser",
"repository": "https://github.com/DTStack/dt-sql-parser", "repository": "https://github.com/DTStack/dt-sql-parser",
@ -41,7 +43,6 @@
"registry": "https://registry.npmjs.org/" "registry": "https://registry.npmjs.org/"
}, },
"dependencies": { "dependencies": {
"@types/antlr4": "4.7.0", "antlr4": "^4.12.0"
"antlr4": "4.7.2"
} }
} }

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

6618
src/lib/generic/SqlLexer.ts Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

78974
src/lib/generic/SqlParser.ts Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

33635
src/lib/hive/HiveSql.ts Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

2108
src/lib/hive/HiveSqlLexer.ts Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,101 +0,0 @@
// https://github.com/antlr/grammars-v4/blob/master/sql/postgresql/Java/PostgreSQLLexerBase.java
// eslint-disable-next-line no-invalid-this
const __extends = (this && this.__extends) || (function() {
let extendStatics = function(d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function(d, b) {
d.__proto__ = b;
}) ||
function(d, b) {
for (const p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p];
};
return extendStatics(d, b);
};
return function(d, b) {
if (typeof b !== 'function' && b !== null) {
throw new TypeError('Class extends value ' + String(b) + ' is not a constructor or null');
}
extendStatics(d, b);
function __() {
this.constructor = d;
}
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
const Lexer = require('antlr4').Lexer;
function isLetter(str) {
return str.length === 1 && str.match(/[a-z]/i);
}
function PostgreSQLLexerBase(input) {
const _this = Lexer.call(this, input) || this;
_this.tags = [];
return _this;
}
__extends(PostgreSQLLexerBase, Lexer);
PostgreSQLLexerBase.prototype.pushTag = function() {
this.tags.push(getText());
};
PostgreSQLLexerBase.prototype.isTag = function() {
return this.getText().equals(this.tags.peek());
};
PostgreSQLLexerBase.prototype.popTag = function() {
this.tags.pop();
};
PostgreSQLLexerBase.prototype.getInputStream = function() {
return this._input;
};
PostgreSQLLexerBase.prototype.checkLA = function(c) {
// eslint-disable-next-line new-cap
return this.getInputStream().LA(1) !== c;
};
PostgreSQLLexerBase.prototype.charIsLetter = function() {
// eslint-disable-next-line new-cap
return isLetter(this.getInputStream().LA(-1));
};
PostgreSQLLexerBase.prototype.HandleNumericFail = function() {
this.getInputStream().seek(this.getInputStream().index() - 2);
const Integral = 535;
this.setType(Integral);
};
PostgreSQLLexerBase.prototype.HandleLessLessGreaterGreater = function() {
const LESS_LESS = 18;
const GREATER_GREATER = 19;
if (this.getText() === '<<') {
this.setType(LESS_LESS);
}
if (this.getText() === '>>') {
this.setType(GREATER_GREATER);
}
};
PostgreSQLLexerBase.prototype.UnterminatedBlockCommentDebugAssert = function() {
// Debug.Assert(InputStream.LA(1) == -1 /*EOF*/);
};
PostgreSQLLexerBase.prototype.CheckIfUtf32Letter = function() {
// eslint-disable-next-line new-cap
let codePoint = this.getInputStream().LA(-2) << 8 + this.getInputStream().LA(-1);
let c;
if (codePoint < 0x10000) {
c = String.fromCharCode(codePoint);
} else {
codePoint -= 0x10000;
c = String.fromCharCode(codePoint / 0x400 + 0xd800, codePoint % 0x400 + 0xdc00);
}
return isLetter(c[0]);
};
exports.PostgreSQLLexerBase = PostgreSQLLexerBase;

View File

@ -0,0 +1,72 @@
import { Lexer } from 'antlr4';
function isLetter(str) {
return str.length === 1 && str.match(/[a-z]/i);
}
export default class PostgreSQLLexerBase extends Lexer {
tags: string[] = [];
CheckIfUtf32Letter() {
// eslint-disable-next-line new-cap
let codePoint = this.getInputStream().LA(-2) << 8 + this.getInputStream().LA(-1);
let c;
if (codePoint < 0x10000) {
c = String.fromCharCode(codePoint);
} else {
codePoint -= 0x10000;
c = String.fromCharCode(codePoint / 0x400 + 0xd800, codePoint % 0x400 + 0xdc00);
}
return isLetter(c[0]);
}
UnterminatedBlockCommentDebugAssert() {
// Debug.Assert(InputStream.LA(1) == -1 /*EOF*/);
}
HandleLessLessGreaterGreater() {
const LESS_LESS = 18;
const GREATER_GREATER = 19;
if (this.text === '<<') {
this._type = LESS_LESS;
}
if (this.text === '>>') {
this._type = GREATER_GREATER;
}
}
HandleNumericFail() {
this.getInputStream().seek(this.getInputStream().index - 2);
const Integral = 535;
this._type = Integral;
}
charIsLetter() {
// eslint-disable-next-line new-cap
return isLetter(this.getInputStream().LA(-1));
}
pushTag() {
this.tags.push(this.text);
};
isTag() {
return this.text === this.tags.pop();
}
popTag() {
this.tags.pop();
}
getInputStream() {
return this._input;
}
checkLA(c) {
// eslint-disable-next-line new-cap
return this.getInputStream().LA(1) !== c;
}
}

View File

@ -1,149 +0,0 @@
/* eslint-disable new-cap,camelcase */
// https://github.com/antlr/grammars-v4/blob/master/sql/postgresql/Java/PostgreSQLParserBase.java
// eslint-disable-next-line no-invalid-this
const __extends = (this && this.__extends) || (function() {
let extendStatics = function(d, b) {
extendStatics = Object.setPrototypeOf ||
({ __proto__: [] } instanceof Array && function(d, b) {
d.__proto__ = b;
}) ||
function(d, b) {
for (const p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p];
};
return extendStatics(d, b);
};
return function(d, b) {
if (typeof b !== 'function' && b !== null) {
throw new TypeError('Class extends value ' + String(b) + ' is not a constructor or null');
}
extendStatics(d, b);
function __() {
this.constructor = d;
}
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
};
})();
const PostgreSQLLexer_1 = require('../PostgreSQLLexer');
const PostgreSQLParser_1 = require('../PostgreSQLParser');
const antlr4 = require('antlr4/index');
const CharStreams = antlr4.CharStreams;
const CommonTokenStream = antlr4.CommonTokenStream;
const Parser = antlr4.Parser;
__extends(PostgreSQLParserBase, Parser);
function PostgreSQLParserBase(input) {
return Parser.call(this, input) || this;
}
PostgreSQLParserBase.prototype.GetParsedSqlTree = function(script, line) {
const ph = this.getPostgreSQLParser(script);
return ph.program();
};
PostgreSQLParserBase.prototype.ParseRoutineBody = function(_localctx) {
let lang = null;
for (let _i = 0, _a = _localctx.createfunc_opt_item(); _i < _a.length; _i++) {
const coi = _a[_i];
if (!!coi.LANGUAGE()) {
if (!!coi.nonreservedword_or_sconst()) {
if (!!coi.nonreservedword_or_sconst().nonreservedword()) {
if (!!coi.nonreservedword_or_sconst().nonreservedword().identifier()) {
if (!!coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier()) {
lang = coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier().getText();
break;
}
}
}
}
}
}
if (!lang) {
return;
}
// eslint-disable-next-line camelcase
let func_as = null;
for (let _b = 0, _c = _localctx.createfunc_opt_item(); _b < _c.length; _b++) {
const a = _c[_b];
if (!a.func_as()) {
// eslint-disable-next-line camelcase
func_as = a;
break;
}
}
// eslint-disable-next-line camelcase
if (!!func_as) {
const txt = this.GetRoutineBodyString(func_as.func_as().sconst(0));
const line = func_as.func_as().sconst(0).start.getLine();
const ph = this.getPostgreSQLParser(txt);
switch (lang) {
case 'plpgsql':
func_as.func_as().Definition = ph.plsqlroot();
break;
case 'sql':
func_as.func_as().Definition = ph.program();
break;
}
}
};
PostgreSQLParserBase.prototype.TrimQuotes = function(s) {
return (!s) ? s : s.substring(1, s.length() - 1);
};
PostgreSQLParserBase.prototype.unquote = function(s) {
const slength = s.length();
const r = '';
let i = 0;
while (i < slength) {
const c = s.charAt(i);
r.append(c);
if (c === '\'' && i < slength - 1 && (s.charAt(i + 1) === '\'')) {
i++;
}
i++;
}
return r.toString();
};
PostgreSQLParserBase.prototype.GetRoutineBodyString = function(rule) {
const anysconst = rule.anysconst();
const StringConstant = anysconst.StringConstant();
if (!!StringConstant) {
return this.unquote(this.TrimQuotes(StringConstant.getText()));
}
const UnicodeEscapeStringConstant = anysconst.UnicodeEscapeStringConstant();
if (!!UnicodeEscapeStringConstant) {
return this.TrimQuotes(UnicodeEscapeStringConstant.getText());
}
const EscapeStringConstant = anysconst.EscapeStringConstant();
if (!!EscapeStringConstant) {
return this.TrimQuotes(EscapeStringConstant.getText());
}
let result = '';
const dollartext = anysconst.DollarText();
for (let _i = 0, dollartext_1 = dollartext; _i < dollartext_1.length; _i++) {
const s = dollartext_1[_i];
result += s.getText();
}
return result;
};
PostgreSQLParserBase.getPostgreSQLParser = function(script) {
const charStream = CharStreams.fromString(script);
const lexer = new PostgreSQLLexer_1.PostgreSQLLexer(charStream);
const tokens = new CommonTokenStream(lexer);
const parser = new PostgreSQLParser_1.PostgreSQLParser(tokens);
lexer.removeErrorListeners();
parser.removeErrorListeners();
// LexerDispatchingErrorListener listener_lexer = new LexerDispatchingErrorListener((Lexer)(((CommonTokenStream)(this.getInputStream())).getTokenSource()));
// ParserDispatchingErrorListener listener_parser = new ParserDispatchingErrorListener(this);
// lexer.addErrorListener(listener_lexer);
// parser.addErrorListener(listener_parser);
return parser;
};
exports.PostgreSQLParserBase = PostgreSQLParserBase;

View File

@ -0,0 +1,110 @@
/* eslint-disable new-cap,camelcase */
import { Parser, CharStreams, CommonTokenStream } from 'antlr4';
import PostgreSQLLexer from '../PostgreSQLLexer';
import PostgreSQLParser from '../PostgreSQLParser';
export default class PostgreSQLParserBase extends Parser {
getPostgreSQLParser(script) {
const charStream = CharStreams.fromString(script);
const lexer = new PostgreSQLLexer(charStream);
const tokens = new CommonTokenStream(lexer);
const parser = new PostgreSQLParser(tokens);
return parser;
}
GetParsedSqlTree(script, line) {
const ph = this.getPostgreSQLParser(script);
return ph.program();
}
ParseRoutineBody(_localctx) {
let lang = null;
for (let _i = 0, _a = _localctx.createfunc_opt_item(); _i < _a.length; _i++) {
const coi = _a[_i];
if (!!coi.LANGUAGE()) {
if (!!coi.nonreservedword_or_sconst()) {
if (!!coi.nonreservedword_or_sconst().nonreservedword()) {
if (!!coi.nonreservedword_or_sconst().nonreservedword().identifier()) {
if (!!coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier()) {
lang = coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier().getText();
break;
}
}
}
}
}
}
if (!lang) {
return;
}
// eslint-disable-next-line camelcase
let func_as = null;
for (let _b = 0, _c = _localctx.createfunc_opt_item(); _b < _c.length; _b++) {
const a = _c[_b];
if (!a.func_as()) {
// eslint-disable-next-line camelcase
func_as = a;
break;
}
}
// eslint-disable-next-line camelcase
if (!!func_as) {
const txt = this.GetRoutineBodyString(func_as.func_as().sconst(0));
// @ts-ignore
const line = func_as.func_as().sconst(0).start.getLine();
const ph = this.getPostgreSQLParser(txt);
switch (lang) {
case 'plpgsql':
func_as.func_as().Definition = ph.plsqlroot();
break;
case 'sql':
func_as.func_as().Definition = ph.program();
break;
}
}
}
TrimQuotes(s: string) {
return (!s) ? s : s.substring(1, s.length - 1);
}
unquote(s: string) {
const slength = s.length;
let r = '';
let i = 0;
while (i < slength) {
const c = s.charAt(i);
r = r.concat(c);
if (c === '\'' && i < slength - 1 && (s.charAt(i + 1) === '\'')) {
i++;
}
i++;
}
return r.toString();
};
GetRoutineBodyString(rule) {
const anysconst = rule.anysconst();
const StringConstant = anysconst.StringConstant();
if (!!StringConstant) {
return this.unquote(this.TrimQuotes(StringConstant.getText()));
}
const UnicodeEscapeStringConstant = anysconst.UnicodeEscapeStringConstant();
if (!!UnicodeEscapeStringConstant) {
return this.TrimQuotes(UnicodeEscapeStringConstant.getText());
}
const EscapeStringConstant = anysconst.EscapeStringConstant();
if (!!EscapeStringConstant) {
return this.TrimQuotes(EscapeStringConstant.getText());
}
let result = '';
const dollartext = anysconst.DollarText();
for (let _i = 0, dollartext_1 = dollartext; _i < dollartext_1.length; _i++) {
const s = dollartext_1[_i];
result += s.getText();
}
return result;
}
}

View File

@ -1,16 +0,0 @@
const Lexer = require('antlr4').Lexer;
function PlSqlBaseLexer(...args) {
Lexer.call(this, ...args);
return this;
}
PlSqlBaseLexer.prototype = Object.create(Lexer.prototype);
PlSqlBaseLexer.prototype.constructor = PlSqlBaseLexer;
PlSqlBaseLexer.prototype.IsNewlineAtPos = function(pos) {
const la = this._input.LA(pos);
return la == -1 || la == '\n';
};
exports.PlSqlBaseLexer = PlSqlBaseLexer;

View File

@ -1,27 +0,0 @@
const Parser = require('antlr4').Parser;
function PlSqlBaseParser(...args) {
Parser.call(this, ...args);
this._isVersion10 = false;
this._isVersion12 = true;
return this;
}
PlSqlBaseParser.prototype = Object.create(Parser.prototype);
PlSqlBaseParser.prototype.constructor = PlSqlBaseParser;
PlSqlBaseParser.prototype.isVersion10 = function() {
return this._isVersion10;
};
PlSqlBaseParser.prototype.isVersion12 = function() {
return this._isVersion12;
};
PlSqlBaseParser.prototype.setVersion10 = function(value) {
this._isVersion10 = value;
};
PlSqlBaseParser.prototype.setVersion12 = function(value) {
this._isVersion12 = value;
};
exports.PlSqlBaseParser = PlSqlBaseParser;

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

16230
src/lib/plsql/PlSqlLexer.ts Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

193036
src/lib/plsql/PlSqlParser.ts Normal file

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,9 @@
import { Lexer } from "antlr4";
export default class PlSqlBaseLexer extends Lexer {
IsNewlineAtPos(pos: number): boolean {
const la = this._input.LA(pos);
return la == -1;
}
}

View File

@ -0,0 +1,20 @@
import { Parser } from 'antlr4';
export default class PlSqlBaseParser extends Parser {
private _isVersion10: boolean = false;
private _isVersion12: boolean = true;
public isVersion10(): boolean {
return this._isVersion10;
}
public isVersion12(): boolean {
return this._isVersion12;
}
public setVersion10(value: boolean): void {
this._isVersion10 = value;
}
public setVersion12(value: boolean): void {
this._isVersion12 = value;
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,25 @@
import { Lexer } from "antlr4";
export default class SparkSqlBaseLexer extends Lexer {
isValidDecimal() {
let nextChar = this.fromCodePoint(this._input.LA(1));
return !(nextChar >= 'A' && nextChar <= 'Z' || nextChar >= '0' && nextChar <= '9' || nextChar == '_')
}
/**
* This method will be called when we see '/*' and try to match it as a bracketed comment.
* If the next character is '+', it should be parsed as hint later, and we cannot match
* it as a bracketed comment.
*
* Returns true if the next character is '+'.
*/
isHint() {
let nextChar = this.fromCodePoint(this._input.LA(1));
return nextChar == '+'
}
fromCodePoint(codePoint) {
return String.fromCodePoint(codePoint);
}
}

View File

@ -1,21 +1,27 @@
import { Token, Lexer } from 'antlr4'; import { ParseTreeWalker, CommonTokenStream } from 'antlr4';
import { ParseTreeWalker } from 'antlr4/tree'; import type { Parser } from 'antlr4/src/antlr4';
import ParserErrorListener, { import ParserErrorListener, {
ParserError, ParserError,
ErrorHandler, ErrorHandler,
ParserErrorCollector, ParserErrorCollector,
} from './parserErrorListener'; } from './parserErrorListener';
interface IParser {
// Lost in type definition
ruleNames: string[];
// Customized in our parser
program(): any;
}
/** /**
* Custom Parser class, subclass needs extends it. * Custom Parser class, subclass needs extends it.
*/ */
export default abstract class BasicParser<C = any> { export default abstract class BasicParser {
private _parser; private _parser: IParser & Parser;
public parse( public parse(
input: string, input: string,
errorListener?: ErrorHandler, errorListener?: ErrorHandler<any>,
) { ) {
const parser = this.createParser(input); const parser = this.createParser(input);
this._parser = parser; this._parser = parser;
@ -46,33 +52,31 @@ export default abstract class BasicParser<C = any> {
* Create antrl4 Lexer object * Create antrl4 Lexer object
* @param input source string * @param input source string
*/ */
public abstract createLexer(input: string): Lexer; public abstract createLexer(input: string);
/** /**
* Create Parser by lexer * Create Parser by lexer
* @param lexer Lexer * @param lexer Lexer
*/ */
public abstract createParserFromLexer(lexer: Lexer); public abstract createParserFromLexer(lexer);
/** /**
* Visit parser tree * Get all Tokens of input string
* @param parserTree
*/
// public abstract visit(visitor: any, parserTree: any);
/**
* The source string
* @param input string * @param input string
* @returns Token[]
*/ */
public getAllTokens(input: string): Token[] { public getAllTokens(input: string): string[] {
return this.createLexer(input).getAllTokens(); const lexer = this.createLexer(input);
const tokensStream = new CommonTokenStream(lexer);
tokensStream.fill();
return tokensStream.tokens;
}; };
/** /**
* Get Parser instance by input string * Get Parser instance by input string
* @param input * @param input
*/ */
public createParser(input: string) { public createParser(input: string): IParser & Parser {
const lexer = this.createLexer(input); const lexer = this.createLexer(input);
const parser: any = this.createParserFromLexer(lexer); const parser: any = this.createParserFromLexer(lexer);
parser.buildParseTrees = true; parser.buildParseTrees = true;

View File

@ -1,6 +1,4 @@
import { Token, Recognizer } from 'antlr4'; import { Token, Recognizer, ErrorListener, RecognitionException } from 'antlr4';
import { ErrorListener } from 'antlr4/error';
export interface ParserError { export interface ParserError {
startLine: number; startLine: number;
endLine: number; endLine: number;
@ -9,8 +7,8 @@ export interface ParserError {
message: string; message: string;
} }
export interface SyntaxError { export interface SyntaxError<T> {
recognizer: Recognizer; recognizer: Recognizer<T>;
offendingSymbol: Token; offendingSymbol: Token;
line: number; line: number;
charPositionInLine: number; charPositionInLine: number;
@ -18,9 +16,13 @@ export interface SyntaxError {
e: any; e: any;
} }
export type ErrorHandler = (err: ParserError, errOption: SyntaxError) => void; type ErrorOffendingSymbol = {
text: string;
};
export class ParserErrorCollector extends ErrorListener { export type ErrorHandler<T> = (err: ParserError, errOption: SyntaxError<T>) => void;
export class ParserErrorCollector extends ErrorListener<ErrorOffendingSymbol> {
private _errors: ParserError[]; private _errors: ParserError[];
constructor(error: ParserError[]) { constructor(error: ParserError[]) {
@ -29,11 +31,11 @@ export class ParserErrorCollector extends ErrorListener {
} }
syntaxError( syntaxError(
recognizer: Recognizer, offendingSymbol: Token, line: number, recognizer: Recognizer<ErrorOffendingSymbol>, offendingSymbol: ErrorOffendingSymbol, line: number,
charPositionInLine: number, msg: string, e: any, charPositionInLine: number, msg: string, e: RecognitionException,
) { ) {
let endCol = charPositionInLine + 1; let endCol = charPositionInLine + 1;
if (offendingSymbol &&offendingSymbol.text !== null) { if (offendingSymbol && offendingSymbol.text !== null) {
endCol = charPositionInLine + offendingSymbol.text.length; endCol = charPositionInLine + offendingSymbol.text.length;
} }
this._errors.push({ this._errors.push({
@ -47,20 +49,20 @@ export class ParserErrorCollector extends ErrorListener {
} }
export default class ParserErrorListener extends ErrorListener { export default class ParserErrorListener extends ErrorListener<ErrorOffendingSymbol> {
private _errorHandler; private _errorHandler;
constructor(errorListener: ErrorHandler) { constructor(errorListener: ErrorHandler<ErrorOffendingSymbol>) {
super(); super();
this._errorHandler = errorListener; this._errorHandler = errorListener;
} }
syntaxError( syntaxError(
recognizer: Recognizer, offendingSymbol: Token, line: number, recognizer: Recognizer<ErrorOffendingSymbol>, offendingSymbol: ErrorOffendingSymbol, line: number,
charPositionInLine: number, msg: string, e: any, charPositionInLine: number, msg: string, e: any,
) { ) {
let endCol = charPositionInLine + 1; let endCol = charPositionInLine + 1;
if (offendingSymbol &&offendingSymbol.text !== null) { if (offendingSymbol && offendingSymbol.text !== null) {
endCol = charPositionInLine + offendingSymbol.text.length; endCol = charPositionInLine + offendingSymbol.text.length;
} }
if (this._errorHandler) { if (this._errorHandler) {

View File

@ -1,17 +1,17 @@
import { InputStream, CommonTokenStream, Lexer } from 'antlr4'; import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
import { FlinkSqlLexer } from '../lib/flinksql/FlinkSqlLexer'; import FlinkSqlLexer from '../lib/flinksql/FlinkSqlLexer';
import { FlinkSqlParser } from '../lib/flinksql/FlinkSqlParser'; import FlinkSqlParser from '../lib/flinksql/FlinkSqlParser';
import BasicParser from './common/basicParser'; import BasicParser from './common/basicParser';
export default class FlinkSQL extends BasicParser { export default class FlinkSQL extends BasicParser {
public createLexer(input: string): Lexer { public createLexer(input: string): FlinkSqlLexer {
const chars = new InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform const chars = new CharStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = <unknown> new FlinkSqlLexer(chars) as Lexer; const lexer = new FlinkSqlLexer(chars);
return lexer; return lexer;
} }
public createParserFromLexer(lexer: Lexer): any { public createParserFromLexer(lexer: Lexer): FlinkSqlParser {
const tokenStream = new CommonTokenStream(lexer); const tokens = new CommonTokenStream(lexer);
return new FlinkSqlParser(tokenStream); const parser = new FlinkSqlParser(tokens);
return parser;
} }
} }

View File

@ -1,15 +1,15 @@
import { InputStream, CommonTokenStream, Lexer } from 'antlr4'; import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
import { SqlLexer } from '../lib/generic/SqlLexer'; import SqlLexer from '../lib/generic/SqlLexer';
import { SqlParser } from '../lib/generic/SqlParser'; import SqlParser from '../lib/generic/SqlParser';
import BasicParser from './common/basicParser'; import BasicParser from './common/basicParser';
export default class GenericSQL extends BasicParser { export default class GenericSQL extends BasicParser {
public createLexer(input: string): Lexer { public createLexer(input: string): SqlLexer {
const chars = new InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform const chars = new CharStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = <unknown> new SqlLexer(chars) as Lexer; const lexer = new SqlLexer(chars);
return lexer; return lexer;
} }
public createParserFromLexer(lexer: Lexer): any { public createParserFromLexer(lexer: Lexer): SqlParser {
const tokenStream = new CommonTokenStream(lexer); const tokenStream = new CommonTokenStream(lexer);
return new SqlParser(tokenStream); return new SqlParser(tokenStream);
} }

View File

@ -1,15 +1,15 @@
import { InputStream, CommonTokenStream, Lexer } from 'antlr4'; import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
import { HiveSqlLexer } from '../lib/hive/HiveSqlLexer'; import HiveSqlLexer from '../lib/hive/HiveSqlLexer';
import { HiveSql } from '../lib/hive/HiveSql'; import HiveSql from '../lib/hive/HiveSql';
import BasicParser from './common/basicParser'; import BasicParser from './common/basicParser';
export default class HiveSQL extends BasicParser { export default class HiveSQL extends BasicParser {
public createLexer(input: string): Lexer { public createLexer(input: string): HiveSqlLexer {
const chars = new InputStream(input); const chars = new CharStream(input);
const lexer = <unknown> new HiveSqlLexer(chars) as Lexer; const lexer = new HiveSqlLexer(chars);
return lexer; return lexer;
} }
public createParserFromLexer(lexer: Lexer): any { public createParserFromLexer(lexer: Lexer): HiveSql {
const tokenStream = new CommonTokenStream(lexer); const tokenStream = new CommonTokenStream(lexer);
return new HiveSql(tokenStream); return new HiveSql(tokenStream);
} }

View File

@ -1,13 +1,13 @@
import { InputStream, CommonTokenStream, Lexer } from 'antlr4'; import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
import { PostgreSQLLexer } from '../lib/pgsql/PostgreSQLLexer';
import { PostgreSQLParser } from '../lib/pgsql/PostgreSQLParser';
import BasicParser from './common/basicParser'; import BasicParser from './common/basicParser';
import PostgreSQLLexer from '../lib/pgsql/PostgreSQLLexer';
import PostgreSQLParser from '../lib/pgsql/PostgreSQLParser';
export default class PostgresSQL extends BasicParser { export default class PostgresSQL extends BasicParser {
public createLexer(input: string): Lexer { public createLexer(input: string): PostgreSQLLexer {
const chars = new InputStream(input.toUpperCase()); const chars = new CharStream(input.toUpperCase());
const lexer = <unknown> new PostgreSQLLexer(chars) as Lexer; const lexer = new PostgreSQLLexer(chars);
return lexer; return lexer;
} }
public createParserFromLexer(lexer: Lexer): any { public createParserFromLexer(lexer: Lexer): any {

View File

@ -1,16 +1,16 @@
import { InputStream, CommonTokenStream, Lexer } from 'antlr4'; import { CharStream, CommonTokenStream } from 'antlr4';
import { PlSqlLexer } from '../lib/plsql/PlSqlLexer';
import { PlSqlParser } from '../lib/plsql/PlSqlParser';
import BasicParser from './common/basicParser'; import BasicParser from './common/basicParser';
import PlSqlLexer from '../lib/plsql/PlSqlLexer';
import PlSqlParser from '../lib/plsql/PlSqlParser';
export default class PLSQLParser extends BasicParser { export default class PLSQLParser extends BasicParser {
public createLexer(input: string): Lexer { public createLexer(input: string): PlSqlLexer {
const chars = new InputStream(input.toUpperCase()); const chars = new CharStream(input.toUpperCase());
const lexer = <unknown> new PlSqlLexer(chars) as Lexer; const lexer = new PlSqlLexer(chars);
return lexer; return lexer;
} }
public createParserFromLexer(lexer: Lexer): any { public createParserFromLexer(lexer: PlSqlLexer): PlSqlParser {
const tokenStream = new CommonTokenStream(lexer); const tokenStream = new CommonTokenStream(lexer);
return new PlSqlParser(tokenStream); return new PlSqlParser(tokenStream);
} }

View File

@ -1,12 +1,12 @@
import { InputStream, CommonTokenStream, Lexer } from 'antlr4'; import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
import { SparkSqlLexer } from '../lib/spark/SparkSqlLexer';
import { SparkSqlParser } from '../lib/spark/SparkSqlParser';
import BasicParser from './common/basicParser'; import BasicParser from './common/basicParser';
import SparkSqlLexer from '../lib/spark/SparkSqlLexer';
import SparkSqlParser from '../lib/spark/SparkSqlParser';
export default class SparkSQL extends BasicParser { export default class SparkSQL extends BasicParser {
public createLexer(input: string): Lexer { public createLexer(input: string): Lexer {
const chars = new InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform const chars = new CharStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = <unknown> new SparkSqlLexer(chars) as Lexer; const lexer = new SparkSqlLexer(chars);
return lexer; return lexer;
} }
public createParserFromLexer(lexer: Lexer): any { public createParserFromLexer(lexer: Lexer): any {

View File

@ -1,4 +1,4 @@
import { FlinkSQL } from '../../../src'; import FlinkSQL from '../../../src/parser/flinksql';
describe('FlinkSQL Lexer tests', () => { describe('FlinkSQL Lexer tests', () => {
const parser = new FlinkSQL(); const parser = new FlinkSQL();
@ -7,6 +7,6 @@ describe('FlinkSQL Lexer tests', () => {
const tokens = parser.getAllTokens(sql); const tokens = parser.getAllTokens(sql);
test('token counts', () => { test('token counts', () => {
expect(tokens.length).toBe(7); expect(tokens.length - 1).toBe(7);
}); });
}); });

View File

@ -1,4 +1,6 @@
import { FlinkSQL, FlinkSqlParserListener } from '../../../src'; import FlinkSQL from '../../../src/parser/flinksql';
import FlinkSqlParserListener from '../../../src/lib/flinksql/FlinkSqlParserListener';
import { TableExpressionContext } from '../../../src/lib/flinksql/FlinkSqlParser';
describe('Flink SQL Listener Tests', () => { describe('Flink SQL Listener Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -10,11 +12,16 @@ describe('Flink SQL Listener Tests', () => {
test('Listener enterTableName', async () => { test('Listener enterTableName', async () => {
let result = ''; let result = '';
class MyListener extends FlinkSqlParserListener { class MyListener extends FlinkSqlParserListener {
enterTableExpression(ctx): void {
constructor() {
super()
}
enterTableExpression = (ctx: TableExpressionContext): void => {
result = ctx.getText().toLowerCase(); result = ctx.getText().toLowerCase();
} }
} }
const listenTableName: any = new MyListener(); const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree); await parser.listen(listenTableName, parserTree);
expect(result).toBe(expectTableName); expect(result).toBe(expectTableName);

View File

@ -1,4 +1,4 @@
import { FlinkSQL } from '../../../src'; import FlinkSQL from '../../../src/parser/flinksql';
describe('FlinkSQL Syntax Tests', () => { describe('FlinkSQL Syntax Tests', () => {
const parser = new FlinkSQL(); const parser = new FlinkSQL();
@ -22,6 +22,11 @@ describe('FlinkSQL Syntax Tests', () => {
const result = parser.validate(sql); const result = parser.validate(sql);
expect(result.length).toBe(0); expect(result.length).toBe(0);
}); });
test('Test simple Error Select Statement', () => {
const sql = `SELECTproduct, amount FROM;`;
const result = parser.validate(sql);
expect(result.length).toBe(1);
});
test('Test Select Statement with where clause', () => { test('Test Select Statement with where clause', () => {
const sql = `SELECT * FROM person WHERE id = 200 OR id = 300;`; const sql = `SELECT * FROM person WHERE id = 200 OR id = 300;`;
const result = parser.validate(sql); const result = parser.validate(sql);
@ -217,7 +222,6 @@ describe('FlinkSQL Syntax Tests', () => {
test('Test valid Double Line Comment statement', () => { test('Test valid Double Line Comment statement', () => {
const sql = `----test comment\n`; const sql = `----test comment\n`;
const result = parser.validate(sql); const result = parser.validate(sql);
console.log('res:', result);
expect(result.length).toBe(0); expect(result.length).toBe(0);
}); });

View File

@ -1,4 +1,4 @@
import { FlinkSQL } from '../../../../src'; import FlinkSQL from "../../../../src/parser/flinksql";
describe('FlinkSQL Create Table Syntax Tests', () => { describe('FlinkSQL Create Table Syntax Tests', () => {
const parser = new FlinkSQL(); const parser = new FlinkSQL();

View File

@ -1,4 +1,4 @@
import { FlinkSQL } from '../../../../src'; import FlinkSQL from "../../../../src/parser/flinksql";
describe('FlinkSQL Create Table Syntax Tests', () => { describe('FlinkSQL Create Table Syntax Tests', () => {
const parser = new FlinkSQL(); const parser = new FlinkSQL();

View File

@ -1,4 +1,4 @@
import { FlinkSQL } from '../../../../src'; import FlinkSQL from '../../../../src/parser/flinksql';
describe('FlinkSQL Create Table Syntax Tests', () => { describe('FlinkSQL Create Table Syntax Tests', () => {
const parser = new FlinkSQL(); const parser = new FlinkSQL();

View File

@ -1,4 +1,4 @@
import { FlinkSQL } from '../../../../src'; import FlinkSQL from "../../../../src/parser/flinksql";
describe('FlinkSQL Create Table Syntax Tests', () => { describe('FlinkSQL Create Table Syntax Tests', () => {
const parser = new FlinkSQL(); const parser = new FlinkSQL();

View File

@ -1,4 +1,4 @@
import { FlinkSQL } from '../../../../src'; import FlinkSQL from "../../../../src/parser/flinksql";
describe('FlinkSQL Create Table Syntax Tests', () => { describe('FlinkSQL Create Table Syntax Tests', () => {
const parser = new FlinkSQL(); const parser = new FlinkSQL();

View File

@ -1,4 +1,5 @@
import { FlinkSQL, FlinkSqlParserVisitor } from '../../../src'; import FlinkSQL from '../../../src/parser/flinksql';
import FlinkSqlParserVisitor from '../../../src/lib/flinksql/FlinkSqlParserVisitor';
describe('Flink SQL Visitor Tests', () => { describe('Flink SQL Visitor Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -9,14 +10,11 @@ describe('Flink SQL Visitor Tests', () => {
console.log('Parse error:', error); console.log('Parse error:', error);
}); });
// console.log('Parser tree string:', parser.toString(parserTree));
test('Visitor visitTableName', () => { test('Visitor visitTableName', () => {
let result = ''; let result = '';
class MyVisitor extends FlinkSqlParserVisitor { class MyVisitor extends FlinkSqlParserVisitor<any>{
visitTableExpression(ctx): void { visitTableExpression = (ctx): void => {
result = ctx.getText().toLowerCase(); result = ctx.getText().toLowerCase();
super.visitTableExpression(ctx);
} }
} }
const visitor: any = new MyVisitor(); const visitor: any = new MyVisitor();

View File

@ -1,4 +1,4 @@
import { GenericSQL } from '../../../src/'; import GenericSQL from '../../../src/parser/generic';
describe('GenericSQL Lexer tests', () => { describe('GenericSQL Lexer tests', () => {
const mysqlParser = new GenericSQL(); const mysqlParser = new GenericSQL();
@ -7,6 +7,6 @@ describe('GenericSQL Lexer tests', () => {
const tokens = mysqlParser.getAllTokens(sql); const tokens = mysqlParser.getAllTokens(sql);
test('token counts', () => { test('token counts', () => {
expect(tokens.length).toBe(12); expect(tokens.length - 1).toBe(12);
}); });
}); });

View File

@ -1,4 +1,5 @@
import { GenericSQL, SqlParserListener } from '../../../src'; import GenericSQL from '../../../src/parser/generic';
import SqlParserListener from '../../../src/lib/generic/SqlParserListener';
describe('Generic SQL Listener Tests', () => { describe('Generic SQL Listener Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -10,7 +11,7 @@ describe('Generic SQL Listener Tests', () => {
test('Listener enterTableName', async () => { test('Listener enterTableName', async () => {
let result = ''; let result = '';
class MyListener extends SqlParserListener { class MyListener extends SqlParserListener {
enterTableName(ctx): void { enterTableName = (ctx): void => {
result = ctx.getText().toLowerCase(); result = ctx.getText().toLowerCase();
} }
} }

View File

@ -1,4 +1,4 @@
import { GenericSQL } from '../../../src'; import GenericSQL from '../../../src/parser/generic';
describe('Generic SQL Syntax Tests', () => { describe('Generic SQL Syntax Tests', () => {
const parser = new GenericSQL(); const parser = new GenericSQL();

View File

@ -1,4 +1,5 @@
import { GenericSQL, SqlParserVisitor } from '../../../src'; import GenericSQL from '../../../src/parser/generic';
import SqlParserVisitor from '../../../src/lib/generic/SqlParserVisitor';
describe('Generic SQL Visitor Tests', () => { describe('Generic SQL Visitor Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -9,17 +10,19 @@ describe('Generic SQL Visitor Tests', () => {
console.log('Parse error:', error); console.log('Parse error:', error);
}); });
console.log('Parser tree string:', parser.toString(parserTree));
test('Visitor visitTableName', () => { test('Visitor visitTableName', () => {
let result = ''; let result = '';
class MyVisitor extends SqlParserVisitor { class MyVisitor extends SqlParserVisitor<any> {
visitTableName(ctx): void { constructor() {
super();
}
visitTableName = (ctx): void => {
result = ctx.getText().toLowerCase(); result = ctx.getText().toLowerCase();
super.visitTableName(ctx); super.visitTableName?.(ctx);
} }
} }
const visitor: any = new MyVisitor(); const visitor = new MyVisitor();
visitor.visit(parserTree); visitor.visit(parserTree);
expect(result).toBe(expectTableName); expect(result).toBe(expectTableName);

View File

@ -1,16 +1,16 @@
import { HiveSQL } from '../../../src'; import HiveSQL from '../../../src/parser/hive';
describe('HiveSQL Lexer tests', () => { describe('HiveSQL Lexer tests', () => {
const parser = new HiveSQL(); const parser = new HiveSQL();
test('select token counts', () => { test('select token counts', () => {
const sql = 'SELECT * FROM t1'; const sql = 'SELECT * FROM t1';
const tokens = parser.getAllTokens(sql); const tokens = parser.getAllTokens(sql);
expect(tokens.length).toBe(4); expect(tokens.length - 1).toBe(4);
}); });
test('select token counts', () => { test('select token counts', () => {
const sql = 'show create table_name;'; const sql = 'show create table_name;';
const tokens = parser.getAllTokens(sql); const tokens = parser.getAllTokens(sql);
expect(tokens.length).toBe(4); expect(tokens.length - 1).toBe(4);
}); });
}); });

View File

@ -1,4 +1,6 @@
import { HiveSQL, HiveSqlListener } from '../../../src'; import HiveSqlListener from '../../../src/lib/hive/HiveSqlListener';
import HiveSQL from '../../../src/parser/hive';
describe('Hive SQL Listener Tests', () => { describe('Hive SQL Listener Tests', () => {
const parser = new HiveSQL(); const parser = new HiveSQL();
@ -9,7 +11,7 @@ describe('Hive SQL Listener Tests', () => {
let result = ''; let result = '';
class MyListener extends HiveSqlListener { class MyListener extends HiveSqlListener {
enterSelect_list(ctx): void { enterSelect_list = (ctx): void => {
result = ctx.getText(); result = ctx.getText();
} }
} }
@ -23,7 +25,7 @@ describe('Hive SQL Listener Tests', () => {
const parserTree = parser.parse(sql); const parserTree = parser.parse(sql);
let result = ''; let result = '';
class MyListener extends HiveSqlListener { class MyListener extends HiveSqlListener {
enterDrop_stmt(ctx): void { enterDrop_stmt = (ctx): void => {
result = ctx.getText(); result = ctx.getText();
} }
} }

View File

@ -1,4 +1,4 @@
import { HiveSQL } from '../../../src'; import HiveSQL from '../../../src/parser/hive';
describe('Hive SQL Syntax Tests', () => { describe('Hive SQL Syntax Tests', () => {
const parser = new HiveSQL(); const parser = new HiveSQL();

Some files were not shown because too many files have changed in this diff Show More