feat: upgrade antlr4 to 4.12.0 (#88)
This commit is contained in:
parent
c0842b3e07
commit
c1c72def30
11
.github/workflows/nodejs.yml
vendored
11
.github/workflows/nodejs.yml
vendored
@ -9,7 +9,7 @@ jobs:
|
|||||||
|
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
node-version: [12.x, 14.x]
|
node-version: [16.x]
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v1
|
||||||
@ -17,10 +17,11 @@ jobs:
|
|||||||
uses: actions/setup-node@v1
|
uses: actions/setup-node@v1
|
||||||
with:
|
with:
|
||||||
node-version: ${{ matrix.node-version }}
|
node-version: ${{ matrix.node-version }}
|
||||||
- name: npm install, test, and build
|
- name: install, test, build
|
||||||
run: |
|
run: |
|
||||||
npm install
|
export NODE_OPTIONS="--max_old_space_size=4096"
|
||||||
npm test
|
yarn install
|
||||||
npm run build
|
yarn test
|
||||||
|
yarn build
|
||||||
env:
|
env:
|
||||||
CI: true
|
CI: true
|
||||||
|
@ -1,10 +1,33 @@
|
|||||||
# dt-sql-parser
|
# dt-sql-parser
|
||||||
|
|
||||||
|
## Get Start
|
||||||
|
|
||||||
## Prerequisites
|
installing the dependencies after cloned project:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn install
|
||||||
|
```
|
||||||
|
|
||||||
|
- test
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn test
|
||||||
|
```
|
||||||
|
|
||||||
|
## Compile the grammar sources
|
||||||
|
|
||||||
|
Compile one language:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn antlr4 --lang=generic
|
||||||
|
```
|
||||||
|
|
||||||
|
Compile all languages:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
yarn antlr4 --all
|
||||||
|
```
|
||||||
|
|
||||||
## Branch Organization
|
## Branch Organization
|
||||||
|
|
||||||
|
|
||||||
## Source Code Organization
|
## Source Code Organization
|
||||||
|
BIN
build/antlr-4.12.0-complete.jar
Normal file
BIN
build/antlr-4.12.0-complete.jar
Normal file
Binary file not shown.
Binary file not shown.
@ -1,7 +1,8 @@
|
|||||||
const path = require('path');
|
const path = require('path');
|
||||||
const exec = require('child_process').exec;
|
const exec = require('child_process').exec;
|
||||||
|
const argv = require('yargs-parser')(process.argv.slice(2))
|
||||||
|
|
||||||
const antlr4 = path.resolve(__dirname, 'antlr-4.8-complete.jar');
|
const antlr4 = path.resolve(__dirname, './antlr-4.12.0-complete.jar');
|
||||||
const grammars = path.resolve(__dirname, '../src/grammar');
|
const grammars = path.resolve(__dirname, '../src/grammar');
|
||||||
const output = path.resolve(__dirname, '../src/lib');
|
const output = path.resolve(__dirname, '../src/lib');
|
||||||
|
|
||||||
@ -14,16 +15,16 @@ const entry = [
|
|||||||
'flinksql',
|
'flinksql',
|
||||||
];
|
];
|
||||||
|
|
||||||
entry.forEach((language) => {
|
function compile(language) {
|
||||||
const cmd = `
|
const cmd = `
|
||||||
java -jar ${antlr4}
|
java -jar ${antlr4}
|
||||||
-Dlanguage=JavaScript
|
-Dlanguage=TypeScript
|
||||||
-visitor
|
-visitor
|
||||||
-listener
|
-listener
|
||||||
-o ${output}/${language}
|
-o ${output}/${language}
|
||||||
${grammars}/${language}/*.g4
|
${grammars}/${language}/*.g4
|
||||||
`.replace(/\n/g, '');
|
`.replace(/\n/g, '');
|
||||||
console.log('cmd:', cmd);
|
console.info('Executing:', cmd);
|
||||||
exec(cmd, (err) => {
|
exec(cmd, (err) => {
|
||||||
if (err) {
|
if (err) {
|
||||||
console.error('Antlr4 build error: ' + language, err);
|
console.error('Antlr4 build error: ' + language, err);
|
||||||
@ -31,5 +32,20 @@ entry.forEach((language) => {
|
|||||||
console.log(`Build ${language} success.`);
|
console.log(`Build ${language} success.`);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
}
|
||||||
|
|
||||||
|
if (argv.all) { // build all: yarn antlr4 --all
|
||||||
|
entry.forEach((language) => {
|
||||||
|
compile(language);
|
||||||
|
});
|
||||||
|
} else if (argv.lang) {// build single: yarn antlr4 --lang=generic
|
||||||
|
const supportedLanguage = entry.find((language) => language === argv.lang);
|
||||||
|
if (supportedLanguage) {
|
||||||
|
compile(argv.lang);
|
||||||
|
} else {
|
||||||
|
console.error('Unsupported language: ' + argv.lang);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.error('Please to specify the language, just like: yarn antlr4 --lang flinksql');
|
||||||
|
}
|
||||||
|
|
||||||
|
119
jest.config.js
119
jest.config.js
@ -1,33 +1,38 @@
|
|||||||
// For a detailed explanation regarding each configuration property, visit:
|
/*
|
||||||
// https://jestjs.io/docs/en/configuration.html
|
* For a detailed explanation regarding each configuration property, visit:
|
||||||
|
* https://jestjs.io/docs/configuration
|
||||||
|
*/
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
// All imported modules in your tests should be mocked automatically
|
// All imported modules in your tests should be mocked automatically
|
||||||
// automock: false,
|
// automock: false,
|
||||||
|
|
||||||
|
|
||||||
// Stop running tests after `n` failures
|
// Stop running tests after `n` failures
|
||||||
// bail: 0,
|
// bail: 0,
|
||||||
|
|
||||||
// Respect "browser" field in package.json when resolving modules
|
|
||||||
// browser: false,
|
|
||||||
|
|
||||||
// The directory where Jest should store its cached dependency information
|
// The directory where Jest should store its cached dependency information
|
||||||
// cacheDirectory: "/private/var/folders/xr/54w2mws93hj3p3_ysc347flc0000gn/T/jest_dx",
|
// cacheDirectory: "/private/var/folders/70/21p94l8j6cd9vv9t990g8cj00000gn/T/jest_dx",
|
||||||
|
|
||||||
// Automatically clear mock calls and instances between every test
|
// Automatically clear mock calls, instances, contexts and results before every test
|
||||||
// clearMocks: false,
|
clearMocks: true,
|
||||||
|
|
||||||
// Indicates whether the coverage information should be collected while executing the test
|
// Indicates whether the coverage information should be collected while executing the test
|
||||||
// collectCoverage: false,
|
collectCoverage: true,
|
||||||
|
|
||||||
// An array of glob patterns indicating a set of files for which coverage information should be collected
|
// An array of glob patterns indicating a set of files for which coverage information should be collected
|
||||||
// collectCoverageFrom: null,
|
// collectCoverageFrom: undefined,
|
||||||
|
|
||||||
// The directory where Jest should output its coverage files
|
// The directory where Jest should output its coverage files
|
||||||
// coverageDirectory: null,
|
coverageDirectory: "coverage",
|
||||||
|
|
||||||
// An array of regexp pattern strings used to skip coverage collection
|
// An array of regexp pattern strings used to skip coverage collection
|
||||||
coveragePathIgnorePatterns: ['/node_modules/'],
|
// coveragePathIgnorePatterns: [
|
||||||
|
// "/node_modules/"
|
||||||
|
// ],
|
||||||
|
|
||||||
|
// Indicates which provider should be used to instrument code for coverage
|
||||||
|
// coverageProvider: "babel",
|
||||||
|
|
||||||
// A list of reporter names that Jest uses when writing coverage reports
|
// A list of reporter names that Jest uses when writing coverage reports
|
||||||
// coverageReporters: [
|
// coverageReporters: [
|
||||||
@ -38,27 +43,33 @@ module.exports = {
|
|||||||
// ],
|
// ],
|
||||||
|
|
||||||
// An object that configures minimum threshold enforcement for coverage results
|
// An object that configures minimum threshold enforcement for coverage results
|
||||||
// coverageThreshold: null,
|
// coverageThreshold: undefined,
|
||||||
|
|
||||||
// A path to a custom dependency extractor
|
// A path to a custom dependency extractor
|
||||||
// dependencyExtractor: null,
|
// dependencyExtractor: undefined,
|
||||||
|
|
||||||
// Make calling deprecated APIs throw helpful error messages
|
// Make calling deprecated APIs throw helpful error messages
|
||||||
// errorOnDeprecated: false,
|
// errorOnDeprecated: false,
|
||||||
|
|
||||||
|
// The default configuration for fake timers
|
||||||
|
// fakeTimers: {
|
||||||
|
// "enableGlobally": false
|
||||||
|
// },
|
||||||
|
|
||||||
// Force coverage collection from ignored files using an array of glob patterns
|
// Force coverage collection from ignored files using an array of glob patterns
|
||||||
// forceCoverageMatch: [],
|
// forceCoverageMatch: [],
|
||||||
|
|
||||||
// A path to a module which exports an async function that is triggered once before all test suites
|
// A path to a module which exports an async function that is triggered once before all test suites
|
||||||
// globalSetup: null,
|
// globalSetup: undefined,
|
||||||
|
|
||||||
// A path to a module which exports an async function that is triggered once after all test suites
|
// A path to a module which exports an async function that is triggered once after all test suites
|
||||||
// globalTeardown: null,
|
// globalTeardown: undefined,
|
||||||
|
|
||||||
// A set of global variables that need to be available in all test environments
|
// A set of global variables that need to be available in all test environments
|
||||||
globals: {
|
// globals: {},
|
||||||
window: {},
|
|
||||||
},
|
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
|
||||||
|
// maxWorkers: "50%",
|
||||||
|
|
||||||
// An array of directory names to be searched recursively up from the requiring module's location
|
// An array of directory names to be searched recursively up from the requiring module's location
|
||||||
// moduleDirectories: [
|
// moduleDirectories: [
|
||||||
@ -66,16 +77,18 @@ module.exports = {
|
|||||||
// ],
|
// ],
|
||||||
|
|
||||||
// An array of file extensions your modules use
|
// An array of file extensions your modules use
|
||||||
// moduleFileExtensions: [
|
moduleFileExtensions: [
|
||||||
// "js",
|
"js",
|
||||||
// "json",
|
"mjs",
|
||||||
// "jsx",
|
"cjs",
|
||||||
// "ts",
|
"jsx",
|
||||||
// "tsx",
|
"ts",
|
||||||
// "node"
|
"tsx",
|
||||||
// ],
|
"json",
|
||||||
|
"node"
|
||||||
|
],
|
||||||
|
|
||||||
// A map from regular expressions to module names that allow to stub out resources with a single module
|
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
|
||||||
// moduleNameMapper: {},
|
// moduleNameMapper: {},
|
||||||
|
|
||||||
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||||
@ -88,28 +101,28 @@ module.exports = {
|
|||||||
// notifyMode: "failure-change",
|
// notifyMode: "failure-change",
|
||||||
|
|
||||||
// A preset that is used as a base for Jest's configuration
|
// A preset that is used as a base for Jest's configuration
|
||||||
// preset: null,
|
preset: "ts-jest/presets/js-with-ts-esm",
|
||||||
|
|
||||||
// Run tests from one or more projects
|
// Run tests from one or more projects
|
||||||
// projects: null,
|
// projects: undefined,
|
||||||
|
|
||||||
// Use this configuration option to add custom reporters to Jest
|
// Use this configuration option to add custom reporters to Jest
|
||||||
// reporters: undefined,
|
// reporters: undefined,
|
||||||
|
|
||||||
// Automatically reset mock state between every test
|
// Automatically reset mock state before every test
|
||||||
// resetMocks: false,
|
// resetMocks: false,
|
||||||
|
|
||||||
// Reset the module registry before running each individual test
|
// Reset the module registry before running each individual test
|
||||||
// resetModules: false,
|
// resetModules: false,
|
||||||
|
|
||||||
// A path to a custom resolver
|
// A path to a custom resolver
|
||||||
// resolver: null,
|
// resolver: undefined,
|
||||||
|
|
||||||
// Automatically restore mock state between every test
|
// Automatically restore mock state and implementation before every test
|
||||||
// restoreMocks: false,
|
// restoreMocks: false,
|
||||||
|
|
||||||
// The root directory that Jest should scan for tests and modules within
|
// The root directory that Jest should scan for tests and modules within
|
||||||
// rootDir: null,
|
// rootDir: undefined,
|
||||||
|
|
||||||
// A list of paths to directories that Jest should use to search for files in
|
// A list of paths to directories that Jest should use to search for files in
|
||||||
// roots: [
|
// roots: [
|
||||||
@ -125,11 +138,14 @@ module.exports = {
|
|||||||
// A list of paths to modules that run some code to configure or set up the testing framework before each test
|
// A list of paths to modules that run some code to configure or set up the testing framework before each test
|
||||||
// setupFilesAfterEnv: [],
|
// setupFilesAfterEnv: [],
|
||||||
|
|
||||||
|
// The number of seconds after which a test is considered as slow and reported as such in the results.
|
||||||
|
// slowTestThreshold: 5,
|
||||||
|
|
||||||
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
|
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
|
||||||
// snapshotSerializers: [],
|
// snapshotSerializers: [],
|
||||||
|
|
||||||
// The test environment that will be used for testing
|
// The test environment that will be used for testing
|
||||||
testEnvironment: 'node',
|
testEnvironment: "node",
|
||||||
|
|
||||||
// Options that will be passed to the testEnvironment
|
// Options that will be passed to the testEnvironment
|
||||||
// testEnvironmentOptions: {},
|
// testEnvironmentOptions: {},
|
||||||
@ -138,46 +154,47 @@ module.exports = {
|
|||||||
// testLocationInResults: false,
|
// testLocationInResults: false,
|
||||||
|
|
||||||
// The glob patterns Jest uses to detect test files
|
// The glob patterns Jest uses to detect test files
|
||||||
// testMatch: [
|
testMatch: [
|
||||||
// "**/__tests__/**/*.[jt]s?(x)",
|
"**/__tests__/**/*.[jt]s?(x)",
|
||||||
// "**/?(*.)+(spec|test).[tj]s?(x)"
|
"**/?(*.)+(spec|test).[tj]s?(x)"
|
||||||
// ],
|
],
|
||||||
|
|
||||||
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
||||||
testPathIgnorePatterns: ['/node_modules/'],
|
// testPathIgnorePatterns: [
|
||||||
|
// "/node_modules/"
|
||||||
|
// ],
|
||||||
|
|
||||||
// The regexp pattern or array of patterns that Jest uses to detect test files
|
// The regexp pattern or array of patterns that Jest uses to detect test files
|
||||||
// testRegex: [],
|
// testRegex: [],
|
||||||
|
|
||||||
// This option allows the use of a custom results processor
|
// This option allows the use of a custom results processor
|
||||||
// testResultsProcessor: null,
|
// testResultsProcessor: undefined,
|
||||||
|
|
||||||
// This option allows use of a custom test runner
|
// This option allows use of a custom test runner
|
||||||
// testRunner: "jasmine2",
|
// testRunner: "jest-circus/runner",
|
||||||
|
|
||||||
// This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
|
|
||||||
// testURL: "http://localhost",
|
|
||||||
|
|
||||||
// Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
|
|
||||||
// timers: "real",
|
|
||||||
|
|
||||||
// A map from regular expressions to paths to transformers
|
// A map from regular expressions to paths to transformers
|
||||||
transform: {
|
transform: {
|
||||||
'^.+\\.(t|j)sx?$': 'ts-jest',
|
'\\.[jt]sx?$': 'ts-jest',
|
||||||
},
|
},
|
||||||
|
|
||||||
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
||||||
transformIgnorePatterns: ['/node_modules/'],
|
transformIgnorePatterns: [
|
||||||
|
'/node_modules/(?!antlr4)'
|
||||||
|
],
|
||||||
|
|
||||||
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
||||||
// unmockedModulePathPatterns: undefined,
|
// unmockedModulePathPatterns: undefined,
|
||||||
|
|
||||||
// Indicates whether each individual test should be reported during the run
|
// Indicates whether each individual test should be reported during the run
|
||||||
// verbose: null,
|
// verbose: undefined,
|
||||||
|
|
||||||
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
|
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
|
||||||
// watchPathIgnorePatterns: [],
|
// watchPathIgnorePatterns: [],
|
||||||
|
|
||||||
// Whether to use watchman for file crawling
|
// Whether to use watchman for file crawling
|
||||||
// watchman: true,
|
// watchman: true,
|
||||||
|
moduleNameMapper : {
|
||||||
|
'^antlr4$': '<rootDir>/node_modules/antlr4/src/antlr4/index.web.js',
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
19
package.json
19
package.json
@ -19,21 +19,23 @@
|
|||||||
"build": "rm -rf dist && tsc",
|
"build": "rm -rf dist && tsc",
|
||||||
"eslint": "eslint ./src/**/*.ts",
|
"eslint": "eslint ./src/**/*.ts",
|
||||||
"check-types": "tsc --skipLibCheck",
|
"check-types": "tsc --skipLibCheck",
|
||||||
"test": "jest",
|
"test": "NODE_OPTIONS=--max_old_space_size=4096 && jest",
|
||||||
"release": "npm run build && standard-version --infile CHANGELOG.md"
|
"release": "npm run build && standard-version --infile CHANGELOG.md"
|
||||||
},
|
},
|
||||||
"author": "dt-insight-front",
|
"author": "dt-insight-front",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/jest": "^24.0.13",
|
"@types/jest": "^29.5.1",
|
||||||
|
"@types/node": "^18.15.11",
|
||||||
"@typescript-eslint/eslint-plugin": "^3.10.1",
|
"@typescript-eslint/eslint-plugin": "^3.10.1",
|
||||||
"@typescript-eslint/parser": "^3.10.1",
|
"@typescript-eslint/parser": "^3.10.1",
|
||||||
"eslint": "^7.7.0",
|
"eslint": "^7.32.0",
|
||||||
"eslint-config-google": "^0.14.0",
|
"eslint-config-google": "^0.14.0",
|
||||||
"jest": "^24.8.0",
|
"jest": "^29.5.0",
|
||||||
"ts-jest": "^24.1.0",
|
"standard-version": "^9.5.0",
|
||||||
"typescript": "^4.9.4",
|
"ts-jest": "^29.1.0",
|
||||||
"standard-version": "^9.1.0"
|
"typescript": "^5.0.4",
|
||||||
|
"yargs-parser": "^21.1.1"
|
||||||
},
|
},
|
||||||
"git repository": "https://github.com/DTStack/dt-sql-parser",
|
"git repository": "https://github.com/DTStack/dt-sql-parser",
|
||||||
"repository": "https://github.com/DTStack/dt-sql-parser",
|
"repository": "https://github.com/DTStack/dt-sql-parser",
|
||||||
@ -41,7 +43,6 @@
|
|||||||
"registry": "https://registry.npmjs.org/"
|
"registry": "https://registry.npmjs.org/"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/antlr4": "4.7.0",
|
"antlr4": "^4.12.0"
|
||||||
"antlr4": "4.7.2"
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
1785
src/lib/flinksql/FlinkSqlLexer.ts
Normal file
1785
src/lib/flinksql/FlinkSqlLexer.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
19616
src/lib/flinksql/FlinkSqlParser.ts
Normal file
19616
src/lib/flinksql/FlinkSqlParser.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
2043
src/lib/flinksql/FlinkSqlParserListener.ts
Normal file
2043
src/lib/flinksql/FlinkSqlParserListener.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
1300
src/lib/flinksql/FlinkSqlParserVisitor.ts
Normal file
1300
src/lib/flinksql/FlinkSqlParserVisitor.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
6618
src/lib/generic/SqlLexer.ts
Normal file
6618
src/lib/generic/SqlLexer.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
78974
src/lib/generic/SqlParser.ts
Normal file
78974
src/lib/generic/SqlParser.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
6564
src/lib/generic/SqlParserListener.ts
Normal file
6564
src/lib/generic/SqlParserListener.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
4111
src/lib/generic/SqlParserVisitor.ts
Normal file
4111
src/lib/generic/SqlParserVisitor.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
37857
src/lib/hive/HiveSql.js
37857
src/lib/hive/HiveSql.js
File diff suppressed because one or more lines are too long
33635
src/lib/hive/HiveSql.ts
Normal file
33635
src/lib/hive/HiveSql.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
2108
src/lib/hive/HiveSqlLexer.ts
Normal file
2108
src/lib/hive/HiveSqlLexer.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
2515
src/lib/hive/HiveSqlListener.ts
Normal file
2515
src/lib/hive/HiveSqlListener.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
1608
src/lib/hive/HiveSqlVisitor.ts
Normal file
1608
src/lib/hive/HiveSqlVisitor.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
3235
src/lib/pgsql/PostgreSQLLexer.ts
Normal file
3235
src/lib/pgsql/PostgreSQLLexer.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
115014
src/lib/pgsql/PostgreSQLParser.ts
Normal file
115014
src/lib/pgsql/PostgreSQLParser.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
8946
src/lib/pgsql/PostgreSQLParserListener.ts
Normal file
8946
src/lib/pgsql/PostgreSQLParserListener.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
5699
src/lib/pgsql/PostgreSQLParserVisitor.ts
Normal file
5699
src/lib/pgsql/PostgreSQLParserVisitor.ts
Normal file
File diff suppressed because it is too large
Load Diff
@ -1,101 +0,0 @@
|
|||||||
// https://github.com/antlr/grammars-v4/blob/master/sql/postgresql/Java/PostgreSQLLexerBase.java
|
|
||||||
|
|
||||||
// eslint-disable-next-line no-invalid-this
|
|
||||||
const __extends = (this && this.__extends) || (function() {
|
|
||||||
let extendStatics = function(d, b) {
|
|
||||||
extendStatics = Object.setPrototypeOf ||
|
|
||||||
({ __proto__: [] } instanceof Array && function(d, b) {
|
|
||||||
d.__proto__ = b;
|
|
||||||
}) ||
|
|
||||||
function(d, b) {
|
|
||||||
for (const p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p];
|
|
||||||
};
|
|
||||||
return extendStatics(d, b);
|
|
||||||
};
|
|
||||||
return function(d, b) {
|
|
||||||
if (typeof b !== 'function' && b !== null) {
|
|
||||||
throw new TypeError('Class extends value ' + String(b) + ' is not a constructor or null');
|
|
||||||
}
|
|
||||||
extendStatics(d, b);
|
|
||||||
function __() {
|
|
||||||
this.constructor = d;
|
|
||||||
}
|
|
||||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
|
||||||
};
|
|
||||||
})();
|
|
||||||
|
|
||||||
const Lexer = require('antlr4').Lexer;
|
|
||||||
|
|
||||||
function isLetter(str) {
|
|
||||||
return str.length === 1 && str.match(/[a-z]/i);
|
|
||||||
}
|
|
||||||
|
|
||||||
function PostgreSQLLexerBase(input) {
|
|
||||||
const _this = Lexer.call(this, input) || this;
|
|
||||||
_this.tags = [];
|
|
||||||
return _this;
|
|
||||||
}
|
|
||||||
|
|
||||||
__extends(PostgreSQLLexerBase, Lexer);
|
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.pushTag = function() {
|
|
||||||
this.tags.push(getText());
|
|
||||||
};
|
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.isTag = function() {
|
|
||||||
return this.getText().equals(this.tags.peek());
|
|
||||||
};
|
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.popTag = function() {
|
|
||||||
this.tags.pop();
|
|
||||||
};
|
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.getInputStream = function() {
|
|
||||||
return this._input;
|
|
||||||
};
|
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.checkLA = function(c) {
|
|
||||||
// eslint-disable-next-line new-cap
|
|
||||||
return this.getInputStream().LA(1) !== c;
|
|
||||||
};
|
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.charIsLetter = function() {
|
|
||||||
// eslint-disable-next-line new-cap
|
|
||||||
return isLetter(this.getInputStream().LA(-1));
|
|
||||||
};
|
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.HandleNumericFail = function() {
|
|
||||||
this.getInputStream().seek(this.getInputStream().index() - 2);
|
|
||||||
const Integral = 535;
|
|
||||||
this.setType(Integral);
|
|
||||||
};
|
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.HandleLessLessGreaterGreater = function() {
|
|
||||||
const LESS_LESS = 18;
|
|
||||||
const GREATER_GREATER = 19;
|
|
||||||
if (this.getText() === '<<') {
|
|
||||||
this.setType(LESS_LESS);
|
|
||||||
}
|
|
||||||
if (this.getText() === '>>') {
|
|
||||||
this.setType(GREATER_GREATER);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.UnterminatedBlockCommentDebugAssert = function() {
|
|
||||||
// Debug.Assert(InputStream.LA(1) == -1 /*EOF*/);
|
|
||||||
};
|
|
||||||
|
|
||||||
PostgreSQLLexerBase.prototype.CheckIfUtf32Letter = function() {
|
|
||||||
// eslint-disable-next-line new-cap
|
|
||||||
let codePoint = this.getInputStream().LA(-2) << 8 + this.getInputStream().LA(-1);
|
|
||||||
let c;
|
|
||||||
if (codePoint < 0x10000) {
|
|
||||||
c = String.fromCharCode(codePoint);
|
|
||||||
} else {
|
|
||||||
codePoint -= 0x10000;
|
|
||||||
c = String.fromCharCode(codePoint / 0x400 + 0xd800, codePoint % 0x400 + 0xdc00);
|
|
||||||
}
|
|
||||||
return isLetter(c[0]);
|
|
||||||
};
|
|
||||||
|
|
||||||
exports.PostgreSQLLexerBase = PostgreSQLLexerBase;
|
|
72
src/lib/pgsql/base/PostgreSQLLexerBase.ts
Normal file
72
src/lib/pgsql/base/PostgreSQLLexerBase.ts
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
|
||||||
|
import { Lexer } from 'antlr4';
|
||||||
|
|
||||||
|
function isLetter(str) {
|
||||||
|
return str.length === 1 && str.match(/[a-z]/i);
|
||||||
|
}
|
||||||
|
|
||||||
|
export default class PostgreSQLLexerBase extends Lexer {
|
||||||
|
|
||||||
|
tags: string[] = [];
|
||||||
|
|
||||||
|
CheckIfUtf32Letter() {
|
||||||
|
// eslint-disable-next-line new-cap
|
||||||
|
let codePoint = this.getInputStream().LA(-2) << 8 + this.getInputStream().LA(-1);
|
||||||
|
let c;
|
||||||
|
if (codePoint < 0x10000) {
|
||||||
|
c = String.fromCharCode(codePoint);
|
||||||
|
} else {
|
||||||
|
codePoint -= 0x10000;
|
||||||
|
c = String.fromCharCode(codePoint / 0x400 + 0xd800, codePoint % 0x400 + 0xdc00);
|
||||||
|
}
|
||||||
|
return isLetter(c[0]);
|
||||||
|
}
|
||||||
|
|
||||||
|
UnterminatedBlockCommentDebugAssert() {
|
||||||
|
// Debug.Assert(InputStream.LA(1) == -1 /*EOF*/);
|
||||||
|
}
|
||||||
|
|
||||||
|
HandleLessLessGreaterGreater() {
|
||||||
|
const LESS_LESS = 18;
|
||||||
|
const GREATER_GREATER = 19;
|
||||||
|
if (this.text === '<<') {
|
||||||
|
this._type = LESS_LESS;
|
||||||
|
}
|
||||||
|
if (this.text === '>>') {
|
||||||
|
this._type = GREATER_GREATER;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
HandleNumericFail() {
|
||||||
|
this.getInputStream().seek(this.getInputStream().index - 2);
|
||||||
|
const Integral = 535;
|
||||||
|
this._type = Integral;
|
||||||
|
}
|
||||||
|
|
||||||
|
charIsLetter() {
|
||||||
|
// eslint-disable-next-line new-cap
|
||||||
|
return isLetter(this.getInputStream().LA(-1));
|
||||||
|
}
|
||||||
|
|
||||||
|
pushTag() {
|
||||||
|
this.tags.push(this.text);
|
||||||
|
};
|
||||||
|
|
||||||
|
isTag() {
|
||||||
|
return this.text === this.tags.pop();
|
||||||
|
}
|
||||||
|
|
||||||
|
popTag() {
|
||||||
|
this.tags.pop();
|
||||||
|
}
|
||||||
|
|
||||||
|
getInputStream() {
|
||||||
|
return this._input;
|
||||||
|
}
|
||||||
|
|
||||||
|
checkLA(c) {
|
||||||
|
// eslint-disable-next-line new-cap
|
||||||
|
return this.getInputStream().LA(1) !== c;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -1,149 +0,0 @@
|
|||||||
/* eslint-disable new-cap,camelcase */
|
|
||||||
|
|
||||||
// https://github.com/antlr/grammars-v4/blob/master/sql/postgresql/Java/PostgreSQLParserBase.java
|
|
||||||
// eslint-disable-next-line no-invalid-this
|
|
||||||
const __extends = (this && this.__extends) || (function() {
|
|
||||||
let extendStatics = function(d, b) {
|
|
||||||
extendStatics = Object.setPrototypeOf ||
|
|
||||||
({ __proto__: [] } instanceof Array && function(d, b) {
|
|
||||||
d.__proto__ = b;
|
|
||||||
}) ||
|
|
||||||
function(d, b) {
|
|
||||||
for (const p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p];
|
|
||||||
};
|
|
||||||
return extendStatics(d, b);
|
|
||||||
};
|
|
||||||
return function(d, b) {
|
|
||||||
if (typeof b !== 'function' && b !== null) {
|
|
||||||
throw new TypeError('Class extends value ' + String(b) + ' is not a constructor or null');
|
|
||||||
}
|
|
||||||
extendStatics(d, b);
|
|
||||||
function __() {
|
|
||||||
this.constructor = d;
|
|
||||||
}
|
|
||||||
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
|
||||||
};
|
|
||||||
})();
|
|
||||||
|
|
||||||
const PostgreSQLLexer_1 = require('../PostgreSQLLexer');
|
|
||||||
const PostgreSQLParser_1 = require('../PostgreSQLParser');
|
|
||||||
|
|
||||||
const antlr4 = require('antlr4/index');
|
|
||||||
const CharStreams = antlr4.CharStreams;
|
|
||||||
const CommonTokenStream = antlr4.CommonTokenStream;
|
|
||||||
const Parser = antlr4.Parser;
|
|
||||||
|
|
||||||
__extends(PostgreSQLParserBase, Parser);
|
|
||||||
|
|
||||||
function PostgreSQLParserBase(input) {
|
|
||||||
return Parser.call(this, input) || this;
|
|
||||||
}
|
|
||||||
|
|
||||||
PostgreSQLParserBase.prototype.GetParsedSqlTree = function(script, line) {
|
|
||||||
const ph = this.getPostgreSQLParser(script);
|
|
||||||
return ph.program();
|
|
||||||
};
|
|
||||||
|
|
||||||
PostgreSQLParserBase.prototype.ParseRoutineBody = function(_localctx) {
|
|
||||||
let lang = null;
|
|
||||||
for (let _i = 0, _a = _localctx.createfunc_opt_item(); _i < _a.length; _i++) {
|
|
||||||
const coi = _a[_i];
|
|
||||||
if (!!coi.LANGUAGE()) {
|
|
||||||
if (!!coi.nonreservedword_or_sconst()) {
|
|
||||||
if (!!coi.nonreservedword_or_sconst().nonreservedword()) {
|
|
||||||
if (!!coi.nonreservedword_or_sconst().nonreservedword().identifier()) {
|
|
||||||
if (!!coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier()) {
|
|
||||||
lang = coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier().getText();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (!lang) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
// eslint-disable-next-line camelcase
|
|
||||||
let func_as = null;
|
|
||||||
for (let _b = 0, _c = _localctx.createfunc_opt_item(); _b < _c.length; _b++) {
|
|
||||||
const a = _c[_b];
|
|
||||||
if (!a.func_as()) {
|
|
||||||
// eslint-disable-next-line camelcase
|
|
||||||
func_as = a;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// eslint-disable-next-line camelcase
|
|
||||||
if (!!func_as) {
|
|
||||||
const txt = this.GetRoutineBodyString(func_as.func_as().sconst(0));
|
|
||||||
const line = func_as.func_as().sconst(0).start.getLine();
|
|
||||||
const ph = this.getPostgreSQLParser(txt);
|
|
||||||
switch (lang) {
|
|
||||||
case 'plpgsql':
|
|
||||||
func_as.func_as().Definition = ph.plsqlroot();
|
|
||||||
break;
|
|
||||||
case 'sql':
|
|
||||||
func_as.func_as().Definition = ph.program();
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
PostgreSQLParserBase.prototype.TrimQuotes = function(s) {
|
|
||||||
return (!s) ? s : s.substring(1, s.length() - 1);
|
|
||||||
};
|
|
||||||
|
|
||||||
PostgreSQLParserBase.prototype.unquote = function(s) {
|
|
||||||
const slength = s.length();
|
|
||||||
const r = '';
|
|
||||||
let i = 0;
|
|
||||||
while (i < slength) {
|
|
||||||
const c = s.charAt(i);
|
|
||||||
r.append(c);
|
|
||||||
if (c === '\'' && i < slength - 1 && (s.charAt(i + 1) === '\'')) {
|
|
||||||
i++;
|
|
||||||
}
|
|
||||||
i++;
|
|
||||||
}
|
|
||||||
return r.toString();
|
|
||||||
};
|
|
||||||
|
|
||||||
PostgreSQLParserBase.prototype.GetRoutineBodyString = function(rule) {
|
|
||||||
const anysconst = rule.anysconst();
|
|
||||||
const StringConstant = anysconst.StringConstant();
|
|
||||||
if (!!StringConstant) {
|
|
||||||
return this.unquote(this.TrimQuotes(StringConstant.getText()));
|
|
||||||
}
|
|
||||||
const UnicodeEscapeStringConstant = anysconst.UnicodeEscapeStringConstant();
|
|
||||||
if (!!UnicodeEscapeStringConstant) {
|
|
||||||
return this.TrimQuotes(UnicodeEscapeStringConstant.getText());
|
|
||||||
}
|
|
||||||
const EscapeStringConstant = anysconst.EscapeStringConstant();
|
|
||||||
if (!!EscapeStringConstant) {
|
|
||||||
return this.TrimQuotes(EscapeStringConstant.getText());
|
|
||||||
}
|
|
||||||
let result = '';
|
|
||||||
const dollartext = anysconst.DollarText();
|
|
||||||
for (let _i = 0, dollartext_1 = dollartext; _i < dollartext_1.length; _i++) {
|
|
||||||
const s = dollartext_1[_i];
|
|
||||||
result += s.getText();
|
|
||||||
}
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
|
|
||||||
PostgreSQLParserBase.getPostgreSQLParser = function(script) {
|
|
||||||
const charStream = CharStreams.fromString(script);
|
|
||||||
const lexer = new PostgreSQLLexer_1.PostgreSQLLexer(charStream);
|
|
||||||
const tokens = new CommonTokenStream(lexer);
|
|
||||||
const parser = new PostgreSQLParser_1.PostgreSQLParser(tokens);
|
|
||||||
lexer.removeErrorListeners();
|
|
||||||
parser.removeErrorListeners();
|
|
||||||
// LexerDispatchingErrorListener listener_lexer = new LexerDispatchingErrorListener((Lexer)(((CommonTokenStream)(this.getInputStream())).getTokenSource()));
|
|
||||||
// ParserDispatchingErrorListener listener_parser = new ParserDispatchingErrorListener(this);
|
|
||||||
// lexer.addErrorListener(listener_lexer);
|
|
||||||
// parser.addErrorListener(listener_parser);
|
|
||||||
return parser;
|
|
||||||
};
|
|
||||||
|
|
||||||
exports.PostgreSQLParserBase = PostgreSQLParserBase;
|
|
110
src/lib/pgsql/base/PostgreSQLParserBase.ts
Normal file
110
src/lib/pgsql/base/PostgreSQLParserBase.ts
Normal file
@ -0,0 +1,110 @@
|
|||||||
|
/* eslint-disable new-cap,camelcase */
|
||||||
|
import { Parser, CharStreams, CommonTokenStream } from 'antlr4';
|
||||||
|
import PostgreSQLLexer from '../PostgreSQLLexer';
|
||||||
|
import PostgreSQLParser from '../PostgreSQLParser';
|
||||||
|
|
||||||
|
export default class PostgreSQLParserBase extends Parser {
|
||||||
|
|
||||||
|
getPostgreSQLParser(script) {
|
||||||
|
const charStream = CharStreams.fromString(script);
|
||||||
|
const lexer = new PostgreSQLLexer(charStream);
|
||||||
|
const tokens = new CommonTokenStream(lexer);
|
||||||
|
const parser = new PostgreSQLParser(tokens);
|
||||||
|
return parser;
|
||||||
|
}
|
||||||
|
|
||||||
|
GetParsedSqlTree(script, line) {
|
||||||
|
const ph = this.getPostgreSQLParser(script);
|
||||||
|
return ph.program();
|
||||||
|
}
|
||||||
|
|
||||||
|
ParseRoutineBody(_localctx) {
|
||||||
|
let lang = null;
|
||||||
|
for (let _i = 0, _a = _localctx.createfunc_opt_item(); _i < _a.length; _i++) {
|
||||||
|
const coi = _a[_i];
|
||||||
|
if (!!coi.LANGUAGE()) {
|
||||||
|
if (!!coi.nonreservedword_or_sconst()) {
|
||||||
|
if (!!coi.nonreservedword_or_sconst().nonreservedword()) {
|
||||||
|
if (!!coi.nonreservedword_or_sconst().nonreservedword().identifier()) {
|
||||||
|
if (!!coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier()) {
|
||||||
|
lang = coi.nonreservedword_or_sconst().nonreservedword().identifier().Identifier().getText();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (!lang) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line camelcase
|
||||||
|
let func_as = null;
|
||||||
|
for (let _b = 0, _c = _localctx.createfunc_opt_item(); _b < _c.length; _b++) {
|
||||||
|
const a = _c[_b];
|
||||||
|
if (!a.func_as()) {
|
||||||
|
// eslint-disable-next-line camelcase
|
||||||
|
func_as = a;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// eslint-disable-next-line camelcase
|
||||||
|
if (!!func_as) {
|
||||||
|
const txt = this.GetRoutineBodyString(func_as.func_as().sconst(0));
|
||||||
|
// @ts-ignore
|
||||||
|
const line = func_as.func_as().sconst(0).start.getLine();
|
||||||
|
const ph = this.getPostgreSQLParser(txt);
|
||||||
|
switch (lang) {
|
||||||
|
case 'plpgsql':
|
||||||
|
func_as.func_as().Definition = ph.plsqlroot();
|
||||||
|
break;
|
||||||
|
case 'sql':
|
||||||
|
func_as.func_as().Definition = ph.program();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
TrimQuotes(s: string) {
|
||||||
|
return (!s) ? s : s.substring(1, s.length - 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
unquote(s: string) {
|
||||||
|
const slength = s.length;
|
||||||
|
let r = '';
|
||||||
|
let i = 0;
|
||||||
|
while (i < slength) {
|
||||||
|
const c = s.charAt(i);
|
||||||
|
r = r.concat(c);
|
||||||
|
if (c === '\'' && i < slength - 1 && (s.charAt(i + 1) === '\'')) {
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
i++;
|
||||||
|
}
|
||||||
|
return r.toString();
|
||||||
|
};
|
||||||
|
|
||||||
|
GetRoutineBodyString(rule) {
|
||||||
|
const anysconst = rule.anysconst();
|
||||||
|
const StringConstant = anysconst.StringConstant();
|
||||||
|
if (!!StringConstant) {
|
||||||
|
return this.unquote(this.TrimQuotes(StringConstant.getText()));
|
||||||
|
}
|
||||||
|
const UnicodeEscapeStringConstant = anysconst.UnicodeEscapeStringConstant();
|
||||||
|
if (!!UnicodeEscapeStringConstant) {
|
||||||
|
return this.TrimQuotes(UnicodeEscapeStringConstant.getText());
|
||||||
|
}
|
||||||
|
const EscapeStringConstant = anysconst.EscapeStringConstant();
|
||||||
|
if (!!EscapeStringConstant) {
|
||||||
|
return this.TrimQuotes(EscapeStringConstant.getText());
|
||||||
|
}
|
||||||
|
let result = '';
|
||||||
|
const dollartext = anysconst.DollarText();
|
||||||
|
for (let _i = 0, dollartext_1 = dollartext; _i < dollartext_1.length; _i++) {
|
||||||
|
const s = dollartext_1[_i];
|
||||||
|
result += s.getText();
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
@ -1,16 +0,0 @@
|
|||||||
const Lexer = require('antlr4').Lexer;
|
|
||||||
|
|
||||||
function PlSqlBaseLexer(...args) {
|
|
||||||
Lexer.call(this, ...args);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
PlSqlBaseLexer.prototype = Object.create(Lexer.prototype);
|
|
||||||
PlSqlBaseLexer.prototype.constructor = PlSqlBaseLexer;
|
|
||||||
|
|
||||||
PlSqlBaseLexer.prototype.IsNewlineAtPos = function(pos) {
|
|
||||||
const la = this._input.LA(pos);
|
|
||||||
return la == -1 || la == '\n';
|
|
||||||
};
|
|
||||||
|
|
||||||
exports.PlSqlBaseLexer = PlSqlBaseLexer;
|
|
@ -1,27 +0,0 @@
|
|||||||
|
|
||||||
const Parser = require('antlr4').Parser;
|
|
||||||
|
|
||||||
function PlSqlBaseParser(...args) {
|
|
||||||
Parser.call(this, ...args);
|
|
||||||
this._isVersion10 = false;
|
|
||||||
this._isVersion12 = true;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
PlSqlBaseParser.prototype = Object.create(Parser.prototype);
|
|
||||||
PlSqlBaseParser.prototype.constructor = PlSqlBaseParser;
|
|
||||||
|
|
||||||
PlSqlBaseParser.prototype.isVersion10 = function() {
|
|
||||||
return this._isVersion10;
|
|
||||||
};
|
|
||||||
PlSqlBaseParser.prototype.isVersion12 = function() {
|
|
||||||
return this._isVersion12;
|
|
||||||
};
|
|
||||||
PlSqlBaseParser.prototype.setVersion10 = function(value) {
|
|
||||||
this._isVersion10 = value;
|
|
||||||
};
|
|
||||||
PlSqlBaseParser.prototype.setVersion12 = function(value) {
|
|
||||||
this._isVersion12 = value;
|
|
||||||
};
|
|
||||||
|
|
||||||
exports.PlSqlBaseParser = PlSqlBaseParser;
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
16230
src/lib/plsql/PlSqlLexer.ts
Normal file
16230
src/lib/plsql/PlSqlLexer.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
208527
src/lib/plsql/PlSqlParser.js
208527
src/lib/plsql/PlSqlParser.js
File diff suppressed because one or more lines are too long
193036
src/lib/plsql/PlSqlParser.ts
Normal file
193036
src/lib/plsql/PlSqlParser.ts
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
8303
src/lib/plsql/PlSqlParserListener.ts
Normal file
8303
src/lib/plsql/PlSqlParserListener.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
5291
src/lib/plsql/PlSqlParserVisitor.ts
Normal file
5291
src/lib/plsql/PlSqlParserVisitor.ts
Normal file
File diff suppressed because it is too large
Load Diff
9
src/lib/plsql/base/PlSqlBaseLexer.ts
Normal file
9
src/lib/plsql/base/PlSqlBaseLexer.ts
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
import { Lexer } from "antlr4";
|
||||||
|
|
||||||
|
export default class PlSqlBaseLexer extends Lexer {
|
||||||
|
|
||||||
|
IsNewlineAtPos(pos: number): boolean {
|
||||||
|
const la = this._input.LA(pos);
|
||||||
|
return la == -1;
|
||||||
|
}
|
||||||
|
}
|
20
src/lib/plsql/base/PlSqlBaseParser.ts
Normal file
20
src/lib/plsql/base/PlSqlBaseParser.ts
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
import { Parser } from 'antlr4';
|
||||||
|
|
||||||
|
export default class PlSqlBaseParser extends Parser {
|
||||||
|
|
||||||
|
private _isVersion10: boolean = false;
|
||||||
|
private _isVersion12: boolean = true;
|
||||||
|
|
||||||
|
public isVersion10(): boolean {
|
||||||
|
return this._isVersion10;
|
||||||
|
}
|
||||||
|
public isVersion12(): boolean {
|
||||||
|
return this._isVersion12;
|
||||||
|
}
|
||||||
|
public setVersion10(value: boolean): void {
|
||||||
|
this._isVersion10 = value;
|
||||||
|
}
|
||||||
|
public setVersion12(value: boolean): void {
|
||||||
|
this._isVersion12 = value;
|
||||||
|
}
|
||||||
|
}
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
1638
src/lib/spark/SparkSqlLexer.ts
Normal file
1638
src/lib/spark/SparkSqlLexer.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
3303
src/lib/spark/SparkSqlListener.ts
Normal file
3303
src/lib/spark/SparkSqlListener.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
27201
src/lib/spark/SparkSqlParser.ts
Normal file
27201
src/lib/spark/SparkSqlParser.ts
Normal file
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
2068
src/lib/spark/SparkSqlVisitor.ts
Normal file
2068
src/lib/spark/SparkSqlVisitor.ts
Normal file
File diff suppressed because it is too large
Load Diff
25
src/lib/spark/base/SparkSqlBaseLexer.ts
Normal file
25
src/lib/spark/base/SparkSqlBaseLexer.ts
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
import { Lexer } from "antlr4";
|
||||||
|
|
||||||
|
export default class SparkSqlBaseLexer extends Lexer {
|
||||||
|
|
||||||
|
isValidDecimal() {
|
||||||
|
let nextChar = this.fromCodePoint(this._input.LA(1));
|
||||||
|
return !(nextChar >= 'A' && nextChar <= 'Z' || nextChar >= '0' && nextChar <= '9' || nextChar == '_')
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method will be called when we see '/*' and try to match it as a bracketed comment.
|
||||||
|
* If the next character is '+', it should be parsed as hint later, and we cannot match
|
||||||
|
* it as a bracketed comment.
|
||||||
|
*
|
||||||
|
* Returns true if the next character is '+'.
|
||||||
|
*/
|
||||||
|
isHint() {
|
||||||
|
let nextChar = this.fromCodePoint(this._input.LA(1));
|
||||||
|
return nextChar == '+'
|
||||||
|
}
|
||||||
|
|
||||||
|
fromCodePoint(codePoint) {
|
||||||
|
return String.fromCodePoint(codePoint);
|
||||||
|
}
|
||||||
|
}
|
@ -1,21 +1,27 @@
|
|||||||
import { Token, Lexer } from 'antlr4';
|
import { ParseTreeWalker, CommonTokenStream } from 'antlr4';
|
||||||
import { ParseTreeWalker } from 'antlr4/tree';
|
import type { Parser } from 'antlr4/src/antlr4';
|
||||||
|
|
||||||
import ParserErrorListener, {
|
import ParserErrorListener, {
|
||||||
ParserError,
|
ParserError,
|
||||||
ErrorHandler,
|
ErrorHandler,
|
||||||
ParserErrorCollector,
|
ParserErrorCollector,
|
||||||
} from './parserErrorListener';
|
} from './parserErrorListener';
|
||||||
|
|
||||||
|
interface IParser {
|
||||||
|
// Lost in type definition
|
||||||
|
ruleNames: string[];
|
||||||
|
// Customized in our parser
|
||||||
|
program(): any;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Custom Parser class, subclass needs extends it.
|
* Custom Parser class, subclass needs extends it.
|
||||||
*/
|
*/
|
||||||
export default abstract class BasicParser<C = any> {
|
export default abstract class BasicParser {
|
||||||
private _parser;
|
private _parser: IParser & Parser;
|
||||||
|
|
||||||
public parse(
|
public parse(
|
||||||
input: string,
|
input: string,
|
||||||
errorListener?: ErrorHandler,
|
errorListener?: ErrorHandler<any>,
|
||||||
) {
|
) {
|
||||||
const parser = this.createParser(input);
|
const parser = this.createParser(input);
|
||||||
this._parser = parser;
|
this._parser = parser;
|
||||||
@ -46,33 +52,31 @@ export default abstract class BasicParser<C = any> {
|
|||||||
* Create antrl4 Lexer object
|
* Create antrl4 Lexer object
|
||||||
* @param input source string
|
* @param input source string
|
||||||
*/
|
*/
|
||||||
public abstract createLexer(input: string): Lexer;
|
public abstract createLexer(input: string);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create Parser by lexer
|
* Create Parser by lexer
|
||||||
* @param lexer Lexer
|
* @param lexer Lexer
|
||||||
*/
|
*/
|
||||||
public abstract createParserFromLexer(lexer: Lexer);
|
public abstract createParserFromLexer(lexer);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Visit parser tree
|
* Get all Tokens of input string
|
||||||
* @param parserTree
|
|
||||||
*/
|
|
||||||
// public abstract visit(visitor: any, parserTree: any);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* The source string
|
|
||||||
* @param input string
|
* @param input string
|
||||||
|
* @returns Token[]
|
||||||
*/
|
*/
|
||||||
public getAllTokens(input: string): Token[] {
|
public getAllTokens(input: string): string[] {
|
||||||
return this.createLexer(input).getAllTokens();
|
const lexer = this.createLexer(input);
|
||||||
|
const tokensStream = new CommonTokenStream(lexer);
|
||||||
|
tokensStream.fill();
|
||||||
|
return tokensStream.tokens;
|
||||||
};
|
};
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get Parser instance by input string
|
* Get Parser instance by input string
|
||||||
* @param input
|
* @param input
|
||||||
*/
|
*/
|
||||||
public createParser(input: string) {
|
public createParser(input: string): IParser & Parser {
|
||||||
const lexer = this.createLexer(input);
|
const lexer = this.createLexer(input);
|
||||||
const parser: any = this.createParserFromLexer(lexer);
|
const parser: any = this.createParserFromLexer(lexer);
|
||||||
parser.buildParseTrees = true;
|
parser.buildParseTrees = true;
|
||||||
|
@ -1,6 +1,4 @@
|
|||||||
import { Token, Recognizer } from 'antlr4';
|
import { Token, Recognizer, ErrorListener, RecognitionException } from 'antlr4';
|
||||||
import { ErrorListener } from 'antlr4/error';
|
|
||||||
|
|
||||||
export interface ParserError {
|
export interface ParserError {
|
||||||
startLine: number;
|
startLine: number;
|
||||||
endLine: number;
|
endLine: number;
|
||||||
@ -9,8 +7,8 @@ export interface ParserError {
|
|||||||
message: string;
|
message: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface SyntaxError {
|
export interface SyntaxError<T> {
|
||||||
recognizer: Recognizer;
|
recognizer: Recognizer<T>;
|
||||||
offendingSymbol: Token;
|
offendingSymbol: Token;
|
||||||
line: number;
|
line: number;
|
||||||
charPositionInLine: number;
|
charPositionInLine: number;
|
||||||
@ -18,9 +16,13 @@ export interface SyntaxError {
|
|||||||
e: any;
|
e: any;
|
||||||
}
|
}
|
||||||
|
|
||||||
export type ErrorHandler = (err: ParserError, errOption: SyntaxError) => void;
|
type ErrorOffendingSymbol = {
|
||||||
|
text: string;
|
||||||
|
};
|
||||||
|
|
||||||
export class ParserErrorCollector extends ErrorListener {
|
export type ErrorHandler<T> = (err: ParserError, errOption: SyntaxError<T>) => void;
|
||||||
|
|
||||||
|
export class ParserErrorCollector extends ErrorListener<ErrorOffendingSymbol> {
|
||||||
private _errors: ParserError[];
|
private _errors: ParserError[];
|
||||||
|
|
||||||
constructor(error: ParserError[]) {
|
constructor(error: ParserError[]) {
|
||||||
@ -29,8 +31,8 @@ export class ParserErrorCollector extends ErrorListener {
|
|||||||
}
|
}
|
||||||
|
|
||||||
syntaxError(
|
syntaxError(
|
||||||
recognizer: Recognizer, offendingSymbol: Token, line: number,
|
recognizer: Recognizer<ErrorOffendingSymbol>, offendingSymbol: ErrorOffendingSymbol, line: number,
|
||||||
charPositionInLine: number, msg: string, e: any,
|
charPositionInLine: number, msg: string, e: RecognitionException,
|
||||||
) {
|
) {
|
||||||
let endCol = charPositionInLine + 1;
|
let endCol = charPositionInLine + 1;
|
||||||
if (offendingSymbol && offendingSymbol.text !== null) {
|
if (offendingSymbol && offendingSymbol.text !== null) {
|
||||||
@ -47,16 +49,16 @@ export class ParserErrorCollector extends ErrorListener {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
export default class ParserErrorListener extends ErrorListener {
|
export default class ParserErrorListener extends ErrorListener<ErrorOffendingSymbol> {
|
||||||
private _errorHandler;
|
private _errorHandler;
|
||||||
|
|
||||||
constructor(errorListener: ErrorHandler) {
|
constructor(errorListener: ErrorHandler<ErrorOffendingSymbol>) {
|
||||||
super();
|
super();
|
||||||
this._errorHandler = errorListener;
|
this._errorHandler = errorListener;
|
||||||
}
|
}
|
||||||
|
|
||||||
syntaxError(
|
syntaxError(
|
||||||
recognizer: Recognizer, offendingSymbol: Token, line: number,
|
recognizer: Recognizer<ErrorOffendingSymbol>, offendingSymbol: ErrorOffendingSymbol, line: number,
|
||||||
charPositionInLine: number, msg: string, e: any,
|
charPositionInLine: number, msg: string, e: any,
|
||||||
) {
|
) {
|
||||||
let endCol = charPositionInLine + 1;
|
let endCol = charPositionInLine + 1;
|
||||||
|
@ -1,17 +1,17 @@
|
|||||||
import { InputStream, CommonTokenStream, Lexer } from 'antlr4';
|
import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
|
||||||
import { FlinkSqlLexer } from '../lib/flinksql/FlinkSqlLexer';
|
import FlinkSqlLexer from '../lib/flinksql/FlinkSqlLexer';
|
||||||
import { FlinkSqlParser } from '../lib/flinksql/FlinkSqlParser';
|
import FlinkSqlParser from '../lib/flinksql/FlinkSqlParser';
|
||||||
import BasicParser from './common/basicParser';
|
import BasicParser from './common/basicParser';
|
||||||
|
|
||||||
export default class FlinkSQL extends BasicParser {
|
export default class FlinkSQL extends BasicParser {
|
||||||
public createLexer(input: string): Lexer {
|
public createLexer(input: string): FlinkSqlLexer {
|
||||||
const chars = new InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
|
const chars = new CharStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
|
||||||
const lexer = <unknown> new FlinkSqlLexer(chars) as Lexer;
|
const lexer = new FlinkSqlLexer(chars);
|
||||||
return lexer;
|
return lexer;
|
||||||
}
|
}
|
||||||
public createParserFromLexer(lexer: Lexer): any {
|
public createParserFromLexer(lexer: Lexer): FlinkSqlParser {
|
||||||
const tokenStream = new CommonTokenStream(lexer);
|
const tokens = new CommonTokenStream(lexer);
|
||||||
return new FlinkSqlParser(tokenStream);
|
const parser = new FlinkSqlParser(tokens);
|
||||||
|
return parser;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
import { InputStream, CommonTokenStream, Lexer } from 'antlr4';
|
import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
|
||||||
import { SqlLexer } from '../lib/generic/SqlLexer';
|
import SqlLexer from '../lib/generic/SqlLexer';
|
||||||
import { SqlParser } from '../lib/generic/SqlParser';
|
import SqlParser from '../lib/generic/SqlParser';
|
||||||
import BasicParser from './common/basicParser';
|
import BasicParser from './common/basicParser';
|
||||||
|
|
||||||
export default class GenericSQL extends BasicParser {
|
export default class GenericSQL extends BasicParser {
|
||||||
public createLexer(input: string): Lexer {
|
public createLexer(input: string): SqlLexer {
|
||||||
const chars = new InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
|
const chars = new CharStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
|
||||||
const lexer = <unknown> new SqlLexer(chars) as Lexer;
|
const lexer = new SqlLexer(chars);
|
||||||
return lexer;
|
return lexer;
|
||||||
}
|
}
|
||||||
public createParserFromLexer(lexer: Lexer): any {
|
public createParserFromLexer(lexer: Lexer): SqlParser {
|
||||||
const tokenStream = new CommonTokenStream(lexer);
|
const tokenStream = new CommonTokenStream(lexer);
|
||||||
return new SqlParser(tokenStream);
|
return new SqlParser(tokenStream);
|
||||||
}
|
}
|
||||||
|
@ -1,15 +1,15 @@
|
|||||||
import { InputStream, CommonTokenStream, Lexer } from 'antlr4';
|
import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
|
||||||
import { HiveSqlLexer } from '../lib/hive/HiveSqlLexer';
|
import HiveSqlLexer from '../lib/hive/HiveSqlLexer';
|
||||||
import { HiveSql } from '../lib/hive/HiveSql';
|
import HiveSql from '../lib/hive/HiveSql';
|
||||||
import BasicParser from './common/basicParser';
|
import BasicParser from './common/basicParser';
|
||||||
|
|
||||||
export default class HiveSQL extends BasicParser {
|
export default class HiveSQL extends BasicParser {
|
||||||
public createLexer(input: string): Lexer {
|
public createLexer(input: string): HiveSqlLexer {
|
||||||
const chars = new InputStream(input);
|
const chars = new CharStream(input);
|
||||||
const lexer = <unknown> new HiveSqlLexer(chars) as Lexer;
|
const lexer = new HiveSqlLexer(chars);
|
||||||
return lexer;
|
return lexer;
|
||||||
}
|
}
|
||||||
public createParserFromLexer(lexer: Lexer): any {
|
public createParserFromLexer(lexer: Lexer): HiveSql {
|
||||||
const tokenStream = new CommonTokenStream(lexer);
|
const tokenStream = new CommonTokenStream(lexer);
|
||||||
return new HiveSql(tokenStream);
|
return new HiveSql(tokenStream);
|
||||||
}
|
}
|
||||||
|
@ -1,13 +1,13 @@
|
|||||||
import { InputStream, CommonTokenStream, Lexer } from 'antlr4';
|
import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
|
||||||
import { PostgreSQLLexer } from '../lib/pgsql/PostgreSQLLexer';
|
|
||||||
import { PostgreSQLParser } from '../lib/pgsql/PostgreSQLParser';
|
|
||||||
|
|
||||||
import BasicParser from './common/basicParser';
|
import BasicParser from './common/basicParser';
|
||||||
|
import PostgreSQLLexer from '../lib/pgsql/PostgreSQLLexer';
|
||||||
|
import PostgreSQLParser from '../lib/pgsql/PostgreSQLParser';
|
||||||
|
|
||||||
export default class PostgresSQL extends BasicParser {
|
export default class PostgresSQL extends BasicParser {
|
||||||
public createLexer(input: string): Lexer {
|
public createLexer(input: string): PostgreSQLLexer {
|
||||||
const chars = new InputStream(input.toUpperCase());
|
const chars = new CharStream(input.toUpperCase());
|
||||||
const lexer = <unknown> new PostgreSQLLexer(chars) as Lexer;
|
const lexer = new PostgreSQLLexer(chars);
|
||||||
return lexer;
|
return lexer;
|
||||||
}
|
}
|
||||||
public createParserFromLexer(lexer: Lexer): any {
|
public createParserFromLexer(lexer: Lexer): any {
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
import { InputStream, CommonTokenStream, Lexer } from 'antlr4';
|
import { CharStream, CommonTokenStream } from 'antlr4';
|
||||||
import { PlSqlLexer } from '../lib/plsql/PlSqlLexer';
|
|
||||||
import { PlSqlParser } from '../lib/plsql/PlSqlParser';
|
|
||||||
|
|
||||||
import BasicParser from './common/basicParser';
|
import BasicParser from './common/basicParser';
|
||||||
|
import PlSqlLexer from '../lib/plsql/PlSqlLexer';
|
||||||
|
import PlSqlParser from '../lib/plsql/PlSqlParser';
|
||||||
|
|
||||||
export default class PLSQLParser extends BasicParser {
|
export default class PLSQLParser extends BasicParser {
|
||||||
public createLexer(input: string): Lexer {
|
public createLexer(input: string): PlSqlLexer {
|
||||||
const chars = new InputStream(input.toUpperCase());
|
const chars = new CharStream(input.toUpperCase());
|
||||||
const lexer = <unknown> new PlSqlLexer(chars) as Lexer;
|
const lexer = new PlSqlLexer(chars);
|
||||||
return lexer;
|
return lexer;
|
||||||
}
|
}
|
||||||
public createParserFromLexer(lexer: Lexer): any {
|
public createParserFromLexer(lexer: PlSqlLexer): PlSqlParser {
|
||||||
const tokenStream = new CommonTokenStream(lexer);
|
const tokenStream = new CommonTokenStream(lexer);
|
||||||
return new PlSqlParser(tokenStream);
|
return new PlSqlParser(tokenStream);
|
||||||
}
|
}
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
import { InputStream, CommonTokenStream, Lexer } from 'antlr4';
|
import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
|
||||||
import { SparkSqlLexer } from '../lib/spark/SparkSqlLexer';
|
|
||||||
import { SparkSqlParser } from '../lib/spark/SparkSqlParser';
|
|
||||||
import BasicParser from './common/basicParser';
|
import BasicParser from './common/basicParser';
|
||||||
|
import SparkSqlLexer from '../lib/spark/SparkSqlLexer';
|
||||||
|
import SparkSqlParser from '../lib/spark/SparkSqlParser';
|
||||||
|
|
||||||
export default class SparkSQL extends BasicParser {
|
export default class SparkSQL extends BasicParser {
|
||||||
public createLexer(input: string): Lexer {
|
public createLexer(input: string): Lexer {
|
||||||
const chars = new InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
|
const chars = new CharStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
|
||||||
const lexer = <unknown> new SparkSqlLexer(chars) as Lexer;
|
const lexer = new SparkSqlLexer(chars);
|
||||||
return lexer;
|
return lexer;
|
||||||
}
|
}
|
||||||
public createParserFromLexer(lexer: Lexer): any {
|
public createParserFromLexer(lexer: Lexer): any {
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { FlinkSQL } from '../../../src';
|
import FlinkSQL from '../../../src/parser/flinksql';
|
||||||
|
|
||||||
describe('FlinkSQL Lexer tests', () => {
|
describe('FlinkSQL Lexer tests', () => {
|
||||||
const parser = new FlinkSQL();
|
const parser = new FlinkSQL();
|
||||||
@ -7,6 +7,6 @@ describe('FlinkSQL Lexer tests', () => {
|
|||||||
const tokens = parser.getAllTokens(sql);
|
const tokens = parser.getAllTokens(sql);
|
||||||
|
|
||||||
test('token counts', () => {
|
test('token counts', () => {
|
||||||
expect(tokens.length).toBe(7);
|
expect(tokens.length - 1).toBe(7);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
import { FlinkSQL, FlinkSqlParserListener } from '../../../src';
|
import FlinkSQL from '../../../src/parser/flinksql';
|
||||||
|
import FlinkSqlParserListener from '../../../src/lib/flinksql/FlinkSqlParserListener';
|
||||||
|
import { TableExpressionContext } from '../../../src/lib/flinksql/FlinkSqlParser';
|
||||||
|
|
||||||
describe('Flink SQL Listener Tests', () => {
|
describe('Flink SQL Listener Tests', () => {
|
||||||
const expectTableName = 'user1';
|
const expectTableName = 'user1';
|
||||||
@ -10,11 +12,16 @@ describe('Flink SQL Listener Tests', () => {
|
|||||||
test('Listener enterTableName', async () => {
|
test('Listener enterTableName', async () => {
|
||||||
let result = '';
|
let result = '';
|
||||||
class MyListener extends FlinkSqlParserListener {
|
class MyListener extends FlinkSqlParserListener {
|
||||||
enterTableExpression(ctx): void {
|
|
||||||
|
constructor() {
|
||||||
|
super()
|
||||||
|
}
|
||||||
|
|
||||||
|
enterTableExpression = (ctx: TableExpressionContext): void => {
|
||||||
result = ctx.getText().toLowerCase();
|
result = ctx.getText().toLowerCase();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const listenTableName: any = new MyListener();
|
const listenTableName = new MyListener();
|
||||||
|
|
||||||
await parser.listen(listenTableName, parserTree);
|
await parser.listen(listenTableName, parserTree);
|
||||||
expect(result).toBe(expectTableName);
|
expect(result).toBe(expectTableName);
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { FlinkSQL } from '../../../src';
|
import FlinkSQL from '../../../src/parser/flinksql';
|
||||||
|
|
||||||
describe('FlinkSQL Syntax Tests', () => {
|
describe('FlinkSQL Syntax Tests', () => {
|
||||||
const parser = new FlinkSQL();
|
const parser = new FlinkSQL();
|
||||||
@ -22,6 +22,11 @@ describe('FlinkSQL Syntax Tests', () => {
|
|||||||
const result = parser.validate(sql);
|
const result = parser.validate(sql);
|
||||||
expect(result.length).toBe(0);
|
expect(result.length).toBe(0);
|
||||||
});
|
});
|
||||||
|
test('Test simple Error Select Statement', () => {
|
||||||
|
const sql = `SELECTproduct, amount FROM;`;
|
||||||
|
const result = parser.validate(sql);
|
||||||
|
expect(result.length).toBe(1);
|
||||||
|
});
|
||||||
test('Test Select Statement with where clause', () => {
|
test('Test Select Statement with where clause', () => {
|
||||||
const sql = `SELECT * FROM person WHERE id = 200 OR id = 300;`;
|
const sql = `SELECT * FROM person WHERE id = 200 OR id = 300;`;
|
||||||
const result = parser.validate(sql);
|
const result = parser.validate(sql);
|
||||||
@ -217,7 +222,6 @@ describe('FlinkSQL Syntax Tests', () => {
|
|||||||
test('Test valid Double Line Comment statement', () => {
|
test('Test valid Double Line Comment statement', () => {
|
||||||
const sql = `----test comment\n`;
|
const sql = `----test comment\n`;
|
||||||
const result = parser.validate(sql);
|
const result = parser.validate(sql);
|
||||||
console.log('res:', result);
|
|
||||||
expect(result.length).toBe(0);
|
expect(result.length).toBe(0);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { FlinkSQL } from '../../../../src';
|
import FlinkSQL from "../../../../src/parser/flinksql";
|
||||||
|
|
||||||
describe('FlinkSQL Create Table Syntax Tests', () => {
|
describe('FlinkSQL Create Table Syntax Tests', () => {
|
||||||
const parser = new FlinkSQL();
|
const parser = new FlinkSQL();
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { FlinkSQL } from '../../../../src';
|
import FlinkSQL from "../../../../src/parser/flinksql";
|
||||||
|
|
||||||
describe('FlinkSQL Create Table Syntax Tests', () => {
|
describe('FlinkSQL Create Table Syntax Tests', () => {
|
||||||
const parser = new FlinkSQL();
|
const parser = new FlinkSQL();
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { FlinkSQL } from '../../../../src';
|
import FlinkSQL from '../../../../src/parser/flinksql';
|
||||||
|
|
||||||
describe('FlinkSQL Create Table Syntax Tests', () => {
|
describe('FlinkSQL Create Table Syntax Tests', () => {
|
||||||
const parser = new FlinkSQL();
|
const parser = new FlinkSQL();
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { FlinkSQL } from '../../../../src';
|
import FlinkSQL from "../../../../src/parser/flinksql";
|
||||||
|
|
||||||
describe('FlinkSQL Create Table Syntax Tests', () => {
|
describe('FlinkSQL Create Table Syntax Tests', () => {
|
||||||
const parser = new FlinkSQL();
|
const parser = new FlinkSQL();
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { FlinkSQL } from '../../../../src';
|
import FlinkSQL from "../../../../src/parser/flinksql";
|
||||||
|
|
||||||
describe('FlinkSQL Create Table Syntax Tests', () => {
|
describe('FlinkSQL Create Table Syntax Tests', () => {
|
||||||
const parser = new FlinkSQL();
|
const parser = new FlinkSQL();
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import { FlinkSQL, FlinkSqlParserVisitor } from '../../../src';
|
import FlinkSQL from '../../../src/parser/flinksql';
|
||||||
|
import FlinkSqlParserVisitor from '../../../src/lib/flinksql/FlinkSqlParserVisitor';
|
||||||
|
|
||||||
describe('Flink SQL Visitor Tests', () => {
|
describe('Flink SQL Visitor Tests', () => {
|
||||||
const expectTableName = 'user1';
|
const expectTableName = 'user1';
|
||||||
@ -9,14 +10,11 @@ describe('Flink SQL Visitor Tests', () => {
|
|||||||
console.log('Parse error:', error);
|
console.log('Parse error:', error);
|
||||||
});
|
});
|
||||||
|
|
||||||
// console.log('Parser tree string:', parser.toString(parserTree));
|
|
||||||
|
|
||||||
test('Visitor visitTableName', () => {
|
test('Visitor visitTableName', () => {
|
||||||
let result = '';
|
let result = '';
|
||||||
class MyVisitor extends FlinkSqlParserVisitor {
|
class MyVisitor extends FlinkSqlParserVisitor<any>{
|
||||||
visitTableExpression(ctx): void {
|
visitTableExpression = (ctx): void => {
|
||||||
result = ctx.getText().toLowerCase();
|
result = ctx.getText().toLowerCase();
|
||||||
super.visitTableExpression(ctx);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const visitor: any = new MyVisitor();
|
const visitor: any = new MyVisitor();
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { GenericSQL } from '../../../src/';
|
import GenericSQL from '../../../src/parser/generic';
|
||||||
|
|
||||||
describe('GenericSQL Lexer tests', () => {
|
describe('GenericSQL Lexer tests', () => {
|
||||||
const mysqlParser = new GenericSQL();
|
const mysqlParser = new GenericSQL();
|
||||||
@ -7,6 +7,6 @@ describe('GenericSQL Lexer tests', () => {
|
|||||||
const tokens = mysqlParser.getAllTokens(sql);
|
const tokens = mysqlParser.getAllTokens(sql);
|
||||||
|
|
||||||
test('token counts', () => {
|
test('token counts', () => {
|
||||||
expect(tokens.length).toBe(12);
|
expect(tokens.length - 1).toBe(12);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import { GenericSQL, SqlParserListener } from '../../../src';
|
import GenericSQL from '../../../src/parser/generic';
|
||||||
|
import SqlParserListener from '../../../src/lib/generic/SqlParserListener';
|
||||||
|
|
||||||
describe('Generic SQL Listener Tests', () => {
|
describe('Generic SQL Listener Tests', () => {
|
||||||
const expectTableName = 'user1';
|
const expectTableName = 'user1';
|
||||||
@ -10,7 +11,7 @@ describe('Generic SQL Listener Tests', () => {
|
|||||||
test('Listener enterTableName', async () => {
|
test('Listener enterTableName', async () => {
|
||||||
let result = '';
|
let result = '';
|
||||||
class MyListener extends SqlParserListener {
|
class MyListener extends SqlParserListener {
|
||||||
enterTableName(ctx): void {
|
enterTableName = (ctx): void => {
|
||||||
result = ctx.getText().toLowerCase();
|
result = ctx.getText().toLowerCase();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { GenericSQL } from '../../../src';
|
import GenericSQL from '../../../src/parser/generic';
|
||||||
|
|
||||||
describe('Generic SQL Syntax Tests', () => {
|
describe('Generic SQL Syntax Tests', () => {
|
||||||
const parser = new GenericSQL();
|
const parser = new GenericSQL();
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import { GenericSQL, SqlParserVisitor } from '../../../src';
|
import GenericSQL from '../../../src/parser/generic';
|
||||||
|
import SqlParserVisitor from '../../../src/lib/generic/SqlParserVisitor';
|
||||||
|
|
||||||
describe('Generic SQL Visitor Tests', () => {
|
describe('Generic SQL Visitor Tests', () => {
|
||||||
const expectTableName = 'user1';
|
const expectTableName = 'user1';
|
||||||
@ -9,17 +10,19 @@ describe('Generic SQL Visitor Tests', () => {
|
|||||||
console.log('Parse error:', error);
|
console.log('Parse error:', error);
|
||||||
});
|
});
|
||||||
|
|
||||||
console.log('Parser tree string:', parser.toString(parserTree));
|
|
||||||
|
|
||||||
test('Visitor visitTableName', () => {
|
test('Visitor visitTableName', () => {
|
||||||
let result = '';
|
let result = '';
|
||||||
class MyVisitor extends SqlParserVisitor {
|
class MyVisitor extends SqlParserVisitor<any> {
|
||||||
visitTableName(ctx): void {
|
constructor() {
|
||||||
|
super();
|
||||||
|
}
|
||||||
|
|
||||||
|
visitTableName = (ctx): void => {
|
||||||
result = ctx.getText().toLowerCase();
|
result = ctx.getText().toLowerCase();
|
||||||
super.visitTableName(ctx);
|
super.visitTableName?.(ctx);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const visitor: any = new MyVisitor();
|
const visitor = new MyVisitor();
|
||||||
visitor.visit(parserTree);
|
visitor.visit(parserTree);
|
||||||
|
|
||||||
expect(result).toBe(expectTableName);
|
expect(result).toBe(expectTableName);
|
||||||
|
@ -1,16 +1,16 @@
|
|||||||
import { HiveSQL } from '../../../src';
|
import HiveSQL from '../../../src/parser/hive';
|
||||||
|
|
||||||
describe('HiveSQL Lexer tests', () => {
|
describe('HiveSQL Lexer tests', () => {
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
test('select token counts', () => {
|
test('select token counts', () => {
|
||||||
const sql = 'SELECT * FROM t1';
|
const sql = 'SELECT * FROM t1';
|
||||||
const tokens = parser.getAllTokens(sql);
|
const tokens = parser.getAllTokens(sql);
|
||||||
expect(tokens.length).toBe(4);
|
expect(tokens.length - 1).toBe(4);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('select token counts', () => {
|
test('select token counts', () => {
|
||||||
const sql = 'show create table_name;';
|
const sql = 'show create table_name;';
|
||||||
const tokens = parser.getAllTokens(sql);
|
const tokens = parser.getAllTokens(sql);
|
||||||
expect(tokens.length).toBe(4);
|
expect(tokens.length - 1).toBe(4);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
import { HiveSQL, HiveSqlListener } from '../../../src';
|
import HiveSqlListener from '../../../src/lib/hive/HiveSqlListener';
|
||||||
|
import HiveSQL from '../../../src/parser/hive';
|
||||||
|
|
||||||
|
|
||||||
describe('Hive SQL Listener Tests', () => {
|
describe('Hive SQL Listener Tests', () => {
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
@ -9,7 +11,7 @@ describe('Hive SQL Listener Tests', () => {
|
|||||||
|
|
||||||
let result = '';
|
let result = '';
|
||||||
class MyListener extends HiveSqlListener {
|
class MyListener extends HiveSqlListener {
|
||||||
enterSelect_list(ctx): void {
|
enterSelect_list = (ctx): void => {
|
||||||
result = ctx.getText();
|
result = ctx.getText();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -23,7 +25,7 @@ describe('Hive SQL Listener Tests', () => {
|
|||||||
const parserTree = parser.parse(sql);
|
const parserTree = parser.parse(sql);
|
||||||
let result = '';
|
let result = '';
|
||||||
class MyListener extends HiveSqlListener {
|
class MyListener extends HiveSqlListener {
|
||||||
enterDrop_stmt(ctx): void {
|
enterDrop_stmt = (ctx): void => {
|
||||||
result = ctx.getText();
|
result = ctx.getText();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { HiveSQL } from '../../../src';
|
import HiveSQL from '../../../src/parser/hive';
|
||||||
|
|
||||||
describe('Hive SQL Syntax Tests', () => {
|
describe('Hive SQL Syntax Tests', () => {
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user