build: optimize cli and add eslint
This commit is contained in:
parent
c069f9f714
commit
d86a6e7638
31
.eslintrc.js
Normal file
31
.eslintrc.js
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
module.exports = {
|
||||||
|
'env': {
|
||||||
|
'browser': true,
|
||||||
|
'es6': true,
|
||||||
|
},
|
||||||
|
'extends': [
|
||||||
|
'google',
|
||||||
|
],
|
||||||
|
'globals': {
|
||||||
|
'Atomics': 'readonly',
|
||||||
|
'SharedArrayBuffer': 'readonly',
|
||||||
|
},
|
||||||
|
'parser': '@typescript-eslint/parser',
|
||||||
|
'parserOptions': {
|
||||||
|
'ecmaFeatures': {
|
||||||
|
},
|
||||||
|
'ecmaVersion': 11,
|
||||||
|
'sourceType': 'module',
|
||||||
|
},
|
||||||
|
'plugins': [
|
||||||
|
'@typescript-eslint',
|
||||||
|
],
|
||||||
|
'rules': {
|
||||||
|
'indent': ['error', 4],
|
||||||
|
'object-curly-spacing': ['error', 'always'],
|
||||||
|
'max-len': ['error', { 'ignoreComments': true }],
|
||||||
|
'require-jsdoc': 0,
|
||||||
|
'valid-jsdoc': 0,
|
||||||
|
'no-unused-vars': 0,
|
||||||
|
},
|
||||||
|
};
|
4
.gitignore
vendored
4
.gitignore
vendored
@ -1,4 +1,6 @@
|
|||||||
node_modules
|
node_modules
|
||||||
package-lock.json
|
package-lock.json
|
||||||
.DS_Store
|
.DS_Store
|
||||||
.vscode
|
.vscode
|
||||||
|
.history
|
||||||
|
lib/
|
@ -3,4 +3,8 @@ node_modules
|
|||||||
package-lock.json
|
package-lock.json
|
||||||
.DS_Store
|
.DS_Store
|
||||||
.git
|
.git
|
||||||
|
.github
|
||||||
|
.history
|
||||||
|
site
|
||||||
src/
|
src/
|
||||||
|
docs
|
@ -13,7 +13,6 @@
|
|||||||
2. SQL分割,根据`;`将sql分割为数组
|
2. SQL分割,根据`;`将sql分割为数组
|
||||||
3. 去除SQL中的的注释(目前支持`--`,`/**/`类型注释)
|
3. 去除SQL中的的注释(目前支持`--`,`/**/`类型注释)
|
||||||
|
|
||||||
|
|
||||||
## 用法
|
## 用法
|
||||||
|
|
||||||
### 过滤注释 / SQL分割
|
### 过滤注释 / SQL分割
|
||||||
@ -31,6 +30,7 @@ console.log(dtFilter.splitSql(sql));//分割sql
|
|||||||
```
|
```
|
||||||
|
|
||||||
### 校验hive sql语法
|
### 校验hive sql语法
|
||||||
|
|
||||||
``` javascript
|
``` javascript
|
||||||
const dtSqlParser=require("dt-sql-parser").parser;
|
const dtSqlParser=require("dt-sql-parser").parser;
|
||||||
|
|
||||||
@ -69,19 +69,23 @@ console.log(dtSqlParser.parseSyntax("selet * form",'hive'));
|
|||||||
### filter
|
### filter
|
||||||
|
|
||||||
#### function filterComments(sql:string):string
|
#### function filterComments(sql:string):string
|
||||||
|
|
||||||
过滤 `sql` 注释(支持`/*`和`--`)
|
过滤 `sql` 注释(支持`/*`和`--`)
|
||||||
|
|
||||||
#### function splitSql(sql:string):Array<string>
|
#### function splitSql(sql:string):Array<string>
|
||||||
|
|
||||||
自动去除注释,并且提取出各个 `sql`
|
自动去除注释,并且提取出各个 `sql`
|
||||||
|
|
||||||
### parser
|
### parser
|
||||||
|
|
||||||
#### function parseSyntax(sql:string|Array<string>, type?:string):Object|boolean
|
#### function parseSyntax(sql:string|Array<string>, type?:string):Object|boolean
|
||||||
|
|
||||||
校验 `sql` 语法,如果没错误,则返回 `false`,否则返回错误详细信息
|
校验 `sql` 语法,如果没错误,则返回 `false`,否则返回错误详细信息
|
||||||
|
|
||||||
可以提供一个含有两个字符串的数组,代表被光标分割的两个 `sql片段`
|
可以提供一个含有两个字符串的数组,代表被光标分割的两个 `sql片段`
|
||||||
|
|
||||||
#### function parserSql(sql:string|Array<string>, type?:string):Object
|
#### function parserSql(sql:string|Array<string>, type?:string):Object
|
||||||
|
|
||||||
解析 `sql` 语法,根据上下文提示补全字段与其它辅助信息
|
解析 `sql` 语法,根据上下文提示补全字段与其它辅助信息
|
||||||
|
|
||||||
可以提供一个含有两个字符串的数组,代表被光标分割的两个sql片段
|
可以提供一个含有两个字符串的数组,代表被光标分割的两个sql片段
|
||||||
@ -89,6 +93,7 @@ console.log(dtSqlParser.parseSyntax("selet * form",'hive'));
|
|||||||
### flinksqlParser
|
### flinksqlParser
|
||||||
|
|
||||||
#### function flinksqlParser (sql: sql): SyntaxError
|
#### function flinksqlParser (sql: sql): SyntaxError
|
||||||
|
|
||||||
校验 `flinksql` 语法。
|
校验 `flinksql` 语法。
|
||||||
|
|
||||||
>本项目文档不是很详细,也不准确(暂时没精力写),项目功能可以满足 hivesql,sql,impala,flinksql 的语法检查和提示功能。
|
>本项目文档不是很详细,也不准确(暂时没精力写),项目功能可以满足 hivesql,sql,impala,flinksql 的语法检查和提示功能。
|
||||||
@ -102,7 +107,7 @@ hive,impala语法解析文件来自[Hue](https://github.com/cloudera/hue)
|
|||||||
### ChangeLog
|
### ChangeLog
|
||||||
|
|
||||||
- 1.1.8 添加转义字符支持
|
- 1.1.8 添加转义字符支持
|
||||||
- 1.1.9 添加函数的中括号语法支持( split(nameList)[0] )
|
- 1.1.9 添加函数的中括号语法支持[ split(nameList](0) )
|
||||||
- 1.2.0 添加 ts,添加测试
|
- 1.2.0 添加 ts,添加测试
|
||||||
- 2.0.0 添加flinksql语法检查
|
- 2.0.0 添加flinksql语法检查
|
||||||
- 3.0.0 拆分hive,impala,集成最新 `HUE` 方案
|
- 3.0.0 拆分hive,impala,集成最新 `HUE` 方案
|
||||||
|
31
build/antlr4.js
Normal file
31
build/antlr4.js
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
const path = require('path');
|
||||||
|
const exec = require('child_process').exec;
|
||||||
|
|
||||||
|
const grammars = path.resolve(__dirname, '../src/grammar');
|
||||||
|
|
||||||
|
const languages = [
|
||||||
|
'generic',
|
||||||
|
'mysql',
|
||||||
|
// 'oracle',
|
||||||
|
// 'flink',
|
||||||
|
// 'hive',
|
||||||
|
// 'impala',
|
||||||
|
// 'libra',
|
||||||
|
// 'spark',
|
||||||
|
// 'tidb'
|
||||||
|
];
|
||||||
|
|
||||||
|
languages.forEach(language => {
|
||||||
|
|
||||||
|
exec(`npx antlr4-tool -l ts -o ${grammars}/${language}/parser ${grammars}/${language}/*.g4`, (error) => {
|
||||||
|
console.log('error:', error)
|
||||||
|
})
|
||||||
|
|
||||||
|
// const compiledResults = antlr4Tool.compile({
|
||||||
|
// language: 'ts', // Only support for JavaScript & TypeScript
|
||||||
|
// grammarFiles: [`${grammars}/${language}/*.g4`],
|
||||||
|
// outputDirectory: `${grammars}/${language}/parser`
|
||||||
|
// });
|
||||||
|
|
||||||
|
})
|
||||||
|
|
0
build/release.js
Normal file
0
build/release.js
Normal file
22
docs/Roadmap.md
Normal file
22
docs/Roadmap.md
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
# RoadMap
|
||||||
|
|
||||||
|
## Supported SQL Language
|
||||||
|
|
||||||
|
- Generic SQL
|
||||||
|
<https://github.com/mysql/mysql-workbench/tree/8.0/library/parsers/grammars>
|
||||||
|
- Hive SQL
|
||||||
|
- Impala SQL
|
||||||
|
- Spark SQL
|
||||||
|
- MySQL
|
||||||
|
- TSQL
|
||||||
|
- PLSQL
|
||||||
|
|
||||||
|
## TODO
|
||||||
|
|
||||||
|
- Unify parser generate to Antlr4
|
||||||
|
- Generic SQL
|
||||||
|
- Flink SQL
|
||||||
|
- Libra SQL
|
||||||
|
- Oracle SQL
|
||||||
|
- TiDB
|
||||||
|
MySQL Compatible Syntax
|
9
docs/Tutorials.md
Normal file
9
docs/Tutorials.md
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
# Tutorials
|
||||||
|
|
||||||
|
## Reference
|
||||||
|
|
||||||
|
- <https://tomassetti.me/writing-a-browser-based-editor-using-monaco-and-antlr/>
|
||||||
|
- [SQL](https://en.wikipedia.org/wiki/SQL)
|
||||||
|
- [FlinkSQL](https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/test/java/org/apache/flink/sql/parser/CreateTableLikeTest.java)
|
||||||
|
- [antlr4 grammar](https://github.com/antlr/grammars-v4/tree/master/sql)
|
||||||
|
- <https://github.com/apache/spark/blob/master/sql/catalyst/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBase.g4>
|
260
jest.config.js
260
jest.config.js
@ -2,186 +2,182 @@
|
|||||||
// https://jestjs.io/docs/en/configuration.html
|
// https://jestjs.io/docs/en/configuration.html
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
// All imported modules in your tests should be mocked automatically
|
// All imported modules in your tests should be mocked automatically
|
||||||
// automock: false,
|
// automock: false,
|
||||||
|
|
||||||
// Stop running tests after `n` failures
|
// Stop running tests after `n` failures
|
||||||
// bail: 0,
|
// bail: 0,
|
||||||
|
|
||||||
// Respect "browser" field in package.json when resolving modules
|
// Respect "browser" field in package.json when resolving modules
|
||||||
// browser: false,
|
// browser: false,
|
||||||
|
|
||||||
// The directory where Jest should store its cached dependency information
|
// The directory where Jest should store its cached dependency information
|
||||||
// cacheDirectory: "/private/var/folders/xr/54w2mws93hj3p3_ysc347flc0000gn/T/jest_dx",
|
// cacheDirectory: "/private/var/folders/xr/54w2mws93hj3p3_ysc347flc0000gn/T/jest_dx",
|
||||||
|
|
||||||
// Automatically clear mock calls and instances between every test
|
// Automatically clear mock calls and instances between every test
|
||||||
// clearMocks: false,
|
// clearMocks: false,
|
||||||
|
|
||||||
// Indicates whether the coverage information should be collected while executing the test
|
// Indicates whether the coverage information should be collected while executing the test
|
||||||
// collectCoverage: false,
|
// collectCoverage: false,
|
||||||
|
|
||||||
// An array of glob patterns indicating a set of files for which coverage information should be collected
|
// An array of glob patterns indicating a set of files for which coverage information should be collected
|
||||||
// collectCoverageFrom: null,
|
// collectCoverageFrom: null,
|
||||||
|
|
||||||
// The directory where Jest should output its coverage files
|
// The directory where Jest should output its coverage files
|
||||||
// coverageDirectory: null,
|
// coverageDirectory: null,
|
||||||
|
|
||||||
// An array of regexp pattern strings used to skip coverage collection
|
// An array of regexp pattern strings used to skip coverage collection
|
||||||
coveragePathIgnorePatterns: [
|
coveragePathIgnorePatterns: ["/node_modules/"],
|
||||||
"/node_modules/"
|
|
||||||
],
|
|
||||||
|
|
||||||
// A list of reporter names that Jest uses when writing coverage reports
|
// A list of reporter names that Jest uses when writing coverage reports
|
||||||
// coverageReporters: [
|
// coverageReporters: [
|
||||||
// "json",
|
// "json",
|
||||||
// "text",
|
// "text",
|
||||||
// "lcov",
|
// "lcov",
|
||||||
// "clover"
|
// "clover"
|
||||||
// ],
|
// ],
|
||||||
|
|
||||||
// An object that configures minimum threshold enforcement for coverage results
|
// An object that configures minimum threshold enforcement for coverage results
|
||||||
// coverageThreshold: null,
|
// coverageThreshold: null,
|
||||||
|
|
||||||
// A path to a custom dependency extractor
|
// A path to a custom dependency extractor
|
||||||
// dependencyExtractor: null,
|
// dependencyExtractor: null,
|
||||||
|
|
||||||
// Make calling deprecated APIs throw helpful error messages
|
// Make calling deprecated APIs throw helpful error messages
|
||||||
// errorOnDeprecated: false,
|
// errorOnDeprecated: false,
|
||||||
|
|
||||||
// Force coverage collection from ignored files using an array of glob patterns
|
// Force coverage collection from ignored files using an array of glob patterns
|
||||||
// forceCoverageMatch: [],
|
// forceCoverageMatch: [],
|
||||||
|
|
||||||
// A path to a module which exports an async function that is triggered once before all test suites
|
// A path to a module which exports an async function that is triggered once before all test suites
|
||||||
// globalSetup: null,
|
// globalSetup: null,
|
||||||
|
|
||||||
// A path to a module which exports an async function that is triggered once after all test suites
|
// A path to a module which exports an async function that is triggered once after all test suites
|
||||||
// globalTeardown: null,
|
// globalTeardown: null,
|
||||||
|
|
||||||
// A set of global variables that need to be available in all test environments
|
// A set of global variables that need to be available in all test environments
|
||||||
// globals: {},
|
globals: {
|
||||||
|
window: {},
|
||||||
|
},
|
||||||
|
|
||||||
// An array of directory names to be searched recursively up from the requiring module's location
|
// An array of directory names to be searched recursively up from the requiring module's location
|
||||||
// moduleDirectories: [
|
// moduleDirectories: [
|
||||||
// "node_modules"
|
// "node_modules"
|
||||||
// ],
|
// ],
|
||||||
|
|
||||||
// An array of file extensions your modules use
|
// An array of file extensions your modules use
|
||||||
// moduleFileExtensions: [
|
// moduleFileExtensions: [
|
||||||
// "js",
|
// "js",
|
||||||
// "json",
|
// "json",
|
||||||
// "jsx",
|
// "jsx",
|
||||||
// "ts",
|
// "ts",
|
||||||
// "tsx",
|
// "tsx",
|
||||||
// "node"
|
// "node"
|
||||||
// ],
|
// ],
|
||||||
|
|
||||||
// A map from regular expressions to module names that allow to stub out resources with a single module
|
// A map from regular expressions to module names that allow to stub out resources with a single module
|
||||||
// moduleNameMapper: {},
|
// moduleNameMapper: {},
|
||||||
|
|
||||||
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||||
// modulePathIgnorePatterns: [],
|
// modulePathIgnorePatterns: [],
|
||||||
|
|
||||||
// Activates notifications for test results
|
// Activates notifications for test results
|
||||||
// notify: false,
|
// notify: false,
|
||||||
|
|
||||||
// An enum that specifies notification mode. Requires { notify: true }
|
// An enum that specifies notification mode. Requires { notify: true }
|
||||||
// notifyMode: "failure-change",
|
// notifyMode: "failure-change",
|
||||||
|
|
||||||
// A preset that is used as a base for Jest's configuration
|
// A preset that is used as a base for Jest's configuration
|
||||||
// preset: null,
|
// preset: null,
|
||||||
|
|
||||||
// Run tests from one or more projects
|
// Run tests from one or more projects
|
||||||
// projects: null,
|
// projects: null,
|
||||||
|
|
||||||
// Use this configuration option to add custom reporters to Jest
|
// Use this configuration option to add custom reporters to Jest
|
||||||
// reporters: undefined,
|
// reporters: undefined,
|
||||||
|
|
||||||
// Automatically reset mock state between every test
|
// Automatically reset mock state between every test
|
||||||
// resetMocks: false,
|
// resetMocks: false,
|
||||||
|
|
||||||
// Reset the module registry before running each individual test
|
// Reset the module registry before running each individual test
|
||||||
// resetModules: false,
|
// resetModules: false,
|
||||||
|
|
||||||
// A path to a custom resolver
|
// A path to a custom resolver
|
||||||
// resolver: null,
|
// resolver: null,
|
||||||
|
|
||||||
// Automatically restore mock state between every test
|
// Automatically restore mock state between every test
|
||||||
// restoreMocks: false,
|
// restoreMocks: false,
|
||||||
|
|
||||||
// The root directory that Jest should scan for tests and modules within
|
// The root directory that Jest should scan for tests and modules within
|
||||||
// rootDir: null,
|
// rootDir: null,
|
||||||
|
|
||||||
// A list of paths to directories that Jest should use to search for files in
|
// A list of paths to directories that Jest should use to search for files in
|
||||||
// roots: [
|
// roots: [
|
||||||
// "<rootDir>"
|
// "<rootDir>"
|
||||||
// ],
|
// ],
|
||||||
|
|
||||||
// Allows you to use a custom runner instead of Jest's default test runner
|
// Allows you to use a custom runner instead of Jest's default test runner
|
||||||
// runner: "jest-runner",
|
// runner: "jest-runner",
|
||||||
|
|
||||||
// The paths to modules that run some code to configure or set up the testing environment before each test
|
// The paths to modules that run some code to configure or set up the testing environment before each test
|
||||||
// setupFiles: [],
|
// setupFiles: [],
|
||||||
|
|
||||||
// A list of paths to modules that run some code to configure or set up the testing framework before each test
|
// A list of paths to modules that run some code to configure or set up the testing framework before each test
|
||||||
// setupFilesAfterEnv: [],
|
// setupFilesAfterEnv: [],
|
||||||
|
|
||||||
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
|
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
|
||||||
// snapshotSerializers: [],
|
// snapshotSerializers: [],
|
||||||
|
|
||||||
// The test environment that will be used for testing
|
// The test environment that will be used for testing
|
||||||
testEnvironment: "node",
|
testEnvironment: "node",
|
||||||
|
|
||||||
// Options that will be passed to the testEnvironment
|
// Options that will be passed to the testEnvironment
|
||||||
// testEnvironmentOptions: {},
|
// testEnvironmentOptions: {},
|
||||||
|
|
||||||
// Adds a location field to test results
|
// Adds a location field to test results
|
||||||
// testLocationInResults: false,
|
// testLocationInResults: false,
|
||||||
|
|
||||||
// The glob patterns Jest uses to detect test files
|
// The glob patterns Jest uses to detect test files
|
||||||
// testMatch: [
|
// testMatch: [
|
||||||
// "**/__tests__/**/*.[jt]s?(x)",
|
// "**/__tests__/**/*.[jt]s?(x)",
|
||||||
// "**/?(*.)+(spec|test).[tj]s?(x)"
|
// "**/?(*.)+(spec|test).[tj]s?(x)"
|
||||||
// ],
|
// ],
|
||||||
|
|
||||||
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
||||||
testPathIgnorePatterns: [
|
testPathIgnorePatterns: ["/node_modules/"],
|
||||||
"/node_modules/"
|
|
||||||
],
|
|
||||||
|
|
||||||
// The regexp pattern or array of patterns that Jest uses to detect test files
|
// The regexp pattern or array of patterns that Jest uses to detect test files
|
||||||
// testRegex: [],
|
// testRegex: [],
|
||||||
|
|
||||||
// This option allows the use of a custom results processor
|
// This option allows the use of a custom results processor
|
||||||
// testResultsProcessor: null,
|
// testResultsProcessor: null,
|
||||||
|
|
||||||
// This option allows use of a custom test runner
|
// This option allows use of a custom test runner
|
||||||
// testRunner: "jasmine2",
|
// testRunner: "jasmine2",
|
||||||
|
|
||||||
// This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
|
// This option sets the URL for the jsdom environment. It is reflected in properties such as location.href
|
||||||
// testURL: "http://localhost",
|
// testURL: "http://localhost",
|
||||||
|
|
||||||
// Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
|
// Setting this value to "fake" allows the use of fake timers for functions such as "setTimeout"
|
||||||
// timers: "real",
|
// timers: "real",
|
||||||
|
|
||||||
// A map from regular expressions to paths to transformers
|
// A map from regular expressions to paths to transformers
|
||||||
transform: {
|
transform: {
|
||||||
'^.+\\.(t|j)sx?$': 'ts-jest'
|
"^.+\\.(t|j)sx?$": "ts-jest",
|
||||||
},
|
},
|
||||||
|
|
||||||
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
||||||
transformIgnorePatterns: [
|
transformIgnorePatterns: ["/node_modules/"],
|
||||||
"/node_modules/"
|
|
||||||
],
|
|
||||||
|
|
||||||
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
||||||
// unmockedModulePathPatterns: undefined,
|
// unmockedModulePathPatterns: undefined,
|
||||||
|
|
||||||
// Indicates whether each individual test should be reported during the run
|
// Indicates whether each individual test should be reported during the run
|
||||||
// verbose: null,
|
// verbose: null,
|
||||||
|
|
||||||
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
|
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
|
||||||
// watchPathIgnorePatterns: [],
|
// watchPathIgnorePatterns: [],
|
||||||
|
|
||||||
// Whether to use watchman for file crawling
|
// Whether to use watchman for file crawling
|
||||||
// watchman: true,
|
// watchman: true,
|
||||||
};
|
};
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
@ -1,887 +0,0 @@
|
|||||||
// Generated from ./grammar/sql.g4 by ANTLR 4.7.1
|
|
||||||
// jshint ignore: start
|
|
||||||
var antlr4 = require('antlr4/index');
|
|
||||||
// This class defines a complete generic visitor for a parse tree produced by sqlParser.
|
|
||||||
function sqlVisitor() {
|
|
||||||
antlr4.tree.ParseTreeVisitor.call(this);
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
sqlVisitor.prototype = Object.create(antlr4.tree.ParseTreeVisitor.prototype);
|
|
||||||
sqlVisitor.prototype.constructor = sqlVisitor;
|
|
||||||
// Visit a parse tree produced by sqlParser#singleStatement.
|
|
||||||
sqlVisitor.prototype.visitSingleStatement = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#singleExpression.
|
|
||||||
sqlVisitor.prototype.visitSingleExpression = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#singleTableIdentifier.
|
|
||||||
sqlVisitor.prototype.visitSingleTableIdentifier = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#singleFunctionIdentifier.
|
|
||||||
sqlVisitor.prototype.visitSingleFunctionIdentifier = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#singleDataType.
|
|
||||||
sqlVisitor.prototype.visitSingleDataType = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#singleTableSchema.
|
|
||||||
sqlVisitor.prototype.visitSingleTableSchema = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#statementDefault.
|
|
||||||
sqlVisitor.prototype.visitStatementDefault = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#use.
|
|
||||||
sqlVisitor.prototype.visitUse = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#createDatabase.
|
|
||||||
sqlVisitor.prototype.visitCreateDatabase = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#setDatabaseProperties.
|
|
||||||
sqlVisitor.prototype.visitSetDatabaseProperties = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#dropDatabase.
|
|
||||||
sqlVisitor.prototype.visitDropDatabase = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#createTable.
|
|
||||||
sqlVisitor.prototype.visitCreateTable = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#createHiveTable.
|
|
||||||
sqlVisitor.prototype.visitCreateHiveTable = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#createFlinkTable.
|
|
||||||
sqlVisitor.prototype.visitCreateFlinkTable = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#createTableLike.
|
|
||||||
sqlVisitor.prototype.visitCreateTableLike = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#analyze.
|
|
||||||
sqlVisitor.prototype.visitAnalyze = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#addTableColumns.
|
|
||||||
sqlVisitor.prototype.visitAddTableColumns = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#renameTable.
|
|
||||||
sqlVisitor.prototype.visitRenameTable = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#setTableProperties.
|
|
||||||
sqlVisitor.prototype.visitSetTableProperties = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#unsetTableProperties.
|
|
||||||
sqlVisitor.prototype.visitUnsetTableProperties = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#changeColumn.
|
|
||||||
sqlVisitor.prototype.visitChangeColumn = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#setTableSerDe.
|
|
||||||
sqlVisitor.prototype.visitSetTableSerDe = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#addTablePartition.
|
|
||||||
sqlVisitor.prototype.visitAddTablePartition = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#renameTablePartition.
|
|
||||||
sqlVisitor.prototype.visitRenameTablePartition = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#dropTablePartitions.
|
|
||||||
sqlVisitor.prototype.visitDropTablePartitions = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#setTableLocation.
|
|
||||||
sqlVisitor.prototype.visitSetTableLocation = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#recoverPartitions.
|
|
||||||
sqlVisitor.prototype.visitRecoverPartitions = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#dropTable.
|
|
||||||
sqlVisitor.prototype.visitDropTable = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#createView.
|
|
||||||
sqlVisitor.prototype.visitCreateView = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#createTempViewUsing.
|
|
||||||
sqlVisitor.prototype.visitCreateTempViewUsing = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#alterViewQuery.
|
|
||||||
sqlVisitor.prototype.visitAlterViewQuery = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#createFunction.
|
|
||||||
sqlVisitor.prototype.visitCreateFunction = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#dropFunction.
|
|
||||||
sqlVisitor.prototype.visitDropFunction = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#explain.
|
|
||||||
sqlVisitor.prototype.visitExplain = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#showTables.
|
|
||||||
sqlVisitor.prototype.visitShowTables = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#showTable.
|
|
||||||
sqlVisitor.prototype.visitShowTable = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#showDatabases.
|
|
||||||
sqlVisitor.prototype.visitShowDatabases = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#showTblProperties.
|
|
||||||
sqlVisitor.prototype.visitShowTblProperties = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#showColumns.
|
|
||||||
sqlVisitor.prototype.visitShowColumns = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#showPartitions.
|
|
||||||
sqlVisitor.prototype.visitShowPartitions = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#showFunctions.
|
|
||||||
sqlVisitor.prototype.visitShowFunctions = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#showCreateTable.
|
|
||||||
sqlVisitor.prototype.visitShowCreateTable = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#describeFunction.
|
|
||||||
sqlVisitor.prototype.visitDescribeFunction = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#describeDatabase.
|
|
||||||
sqlVisitor.prototype.visitDescribeDatabase = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#describeTable.
|
|
||||||
sqlVisitor.prototype.visitDescribeTable = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#refreshTable.
|
|
||||||
sqlVisitor.prototype.visitRefreshTable = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#refreshResource.
|
|
||||||
sqlVisitor.prototype.visitRefreshResource = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#cacheTable.
|
|
||||||
sqlVisitor.prototype.visitCacheTable = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#uncacheTable.
|
|
||||||
sqlVisitor.prototype.visitUncacheTable = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#clearCache.
|
|
||||||
sqlVisitor.prototype.visitClearCache = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#loadData.
|
|
||||||
sqlVisitor.prototype.visitLoadData = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#truncateTable.
|
|
||||||
sqlVisitor.prototype.visitTruncateTable = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#repairTable.
|
|
||||||
sqlVisitor.prototype.visitRepairTable = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#manageResource.
|
|
||||||
sqlVisitor.prototype.visitManageResource = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#failNativeCommand.
|
|
||||||
sqlVisitor.prototype.visitFailNativeCommand = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#setConfiguration.
|
|
||||||
sqlVisitor.prototype.visitSetConfiguration = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#resetConfiguration.
|
|
||||||
sqlVisitor.prototype.visitResetConfiguration = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#unsupportedHiveNativeCommands.
|
|
||||||
sqlVisitor.prototype.visitUnsupportedHiveNativeCommands = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#createTableHeader.
|
|
||||||
sqlVisitor.prototype.visitCreateTableHeader = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#bucketSpec.
|
|
||||||
sqlVisitor.prototype.visitBucketSpec = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#skewSpec.
|
|
||||||
sqlVisitor.prototype.visitSkewSpec = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#locationSpec.
|
|
||||||
sqlVisitor.prototype.visitLocationSpec = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#query.
|
|
||||||
sqlVisitor.prototype.visitQuery = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#insertOverwriteTable.
|
|
||||||
sqlVisitor.prototype.visitInsertOverwriteTable = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#insertIntoTable.
|
|
||||||
sqlVisitor.prototype.visitInsertIntoTable = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#insertOverwriteHiveDir.
|
|
||||||
sqlVisitor.prototype.visitInsertOverwriteHiveDir = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#insertOverwriteDir.
|
|
||||||
sqlVisitor.prototype.visitInsertOverwriteDir = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#partitionSpecLocation.
|
|
||||||
sqlVisitor.prototype.visitPartitionSpecLocation = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#partitionSpec.
|
|
||||||
sqlVisitor.prototype.visitPartitionSpec = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#partitionVal.
|
|
||||||
sqlVisitor.prototype.visitPartitionVal = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#describeFuncName.
|
|
||||||
sqlVisitor.prototype.visitDescribeFuncName = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#describeColName.
|
|
||||||
sqlVisitor.prototype.visitDescribeColName = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#ctes.
|
|
||||||
sqlVisitor.prototype.visitCtes = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#namedQuery.
|
|
||||||
sqlVisitor.prototype.visitNamedQuery = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#tableProvider.
|
|
||||||
sqlVisitor.prototype.visitTableProvider = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#tablePropertyList.
|
|
||||||
sqlVisitor.prototype.visitTablePropertyList = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#tableProperty.
|
|
||||||
sqlVisitor.prototype.visitTableProperty = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#tablePropertyKey.
|
|
||||||
sqlVisitor.prototype.visitTablePropertyKey = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#tablePropertyValue.
|
|
||||||
sqlVisitor.prototype.visitTablePropertyValue = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#constantList.
|
|
||||||
sqlVisitor.prototype.visitConstantList = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#nestedConstantList.
|
|
||||||
sqlVisitor.prototype.visitNestedConstantList = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#createFileFormat.
|
|
||||||
sqlVisitor.prototype.visitCreateFileFormat = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#tableFileFormat.
|
|
||||||
sqlVisitor.prototype.visitTableFileFormat = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#genericFileFormat.
|
|
||||||
sqlVisitor.prototype.visitGenericFileFormat = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#storageHandler.
|
|
||||||
sqlVisitor.prototype.visitStorageHandler = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#resource.
|
|
||||||
sqlVisitor.prototype.visitResource = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#singleInsertQuery.
|
|
||||||
sqlVisitor.prototype.visitSingleInsertQuery = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#multiInsertQuery.
|
|
||||||
sqlVisitor.prototype.visitMultiInsertQuery = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#queryOrganization.
|
|
||||||
sqlVisitor.prototype.visitQueryOrganization = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#multiInsertQueryBody.
|
|
||||||
sqlVisitor.prototype.visitMultiInsertQueryBody = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#queryTermDefault.
|
|
||||||
sqlVisitor.prototype.visitQueryTermDefault = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#setOperation.
|
|
||||||
sqlVisitor.prototype.visitSetOperation = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#queryPrimaryDefault.
|
|
||||||
sqlVisitor.prototype.visitQueryPrimaryDefault = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#table.
|
|
||||||
sqlVisitor.prototype.visitTable = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#inlineTableDefault1.
|
|
||||||
sqlVisitor.prototype.visitInlineTableDefault1 = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#subquery.
|
|
||||||
sqlVisitor.prototype.visitSubquery = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#sortItem.
|
|
||||||
sqlVisitor.prototype.visitSortItem = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#querySpecification.
|
|
||||||
sqlVisitor.prototype.visitQuerySpecification = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#hint.
|
|
||||||
sqlVisitor.prototype.visitHint = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#hintStatement.
|
|
||||||
sqlVisitor.prototype.visitHintStatement = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#fromClause.
|
|
||||||
sqlVisitor.prototype.visitFromClause = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#aggregation.
|
|
||||||
sqlVisitor.prototype.visitAggregation = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#groupingSet.
|
|
||||||
sqlVisitor.prototype.visitGroupingSet = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#pivotClause.
|
|
||||||
sqlVisitor.prototype.visitPivotClause = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#pivotColumn.
|
|
||||||
sqlVisitor.prototype.visitPivotColumn = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#pivotValue.
|
|
||||||
sqlVisitor.prototype.visitPivotValue = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#lateralView.
|
|
||||||
sqlVisitor.prototype.visitLateralView = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#setQuantifier.
|
|
||||||
sqlVisitor.prototype.visitSetQuantifier = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#relation.
|
|
||||||
sqlVisitor.prototype.visitRelation = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#joinRelation.
|
|
||||||
sqlVisitor.prototype.visitJoinRelation = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#joinType.
|
|
||||||
sqlVisitor.prototype.visitJoinType = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#joinCriteria.
|
|
||||||
sqlVisitor.prototype.visitJoinCriteria = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#sample.
|
|
||||||
sqlVisitor.prototype.visitSample = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#sampleByPercentile.
|
|
||||||
sqlVisitor.prototype.visitSampleByPercentile = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#sampleByRows.
|
|
||||||
sqlVisitor.prototype.visitSampleByRows = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#sampleByBucket.
|
|
||||||
sqlVisitor.prototype.visitSampleByBucket = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#sampleByBytes.
|
|
||||||
sqlVisitor.prototype.visitSampleByBytes = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#identifierList.
|
|
||||||
sqlVisitor.prototype.visitIdentifierList = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#identifierSeq.
|
|
||||||
sqlVisitor.prototype.visitIdentifierSeq = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#orderedIdentifierList.
|
|
||||||
sqlVisitor.prototype.visitOrderedIdentifierList = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#orderedIdentifier.
|
|
||||||
sqlVisitor.prototype.visitOrderedIdentifier = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#identifierCommentList.
|
|
||||||
sqlVisitor.prototype.visitIdentifierCommentList = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#identifierComment.
|
|
||||||
sqlVisitor.prototype.visitIdentifierComment = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#tableName.
|
|
||||||
sqlVisitor.prototype.visitTableName = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#aliasedQuery.
|
|
||||||
sqlVisitor.prototype.visitAliasedQuery = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#aliasedRelation.
|
|
||||||
sqlVisitor.prototype.visitAliasedRelation = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#inlineTableDefault2.
|
|
||||||
sqlVisitor.prototype.visitInlineTableDefault2 = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#tableValuedFunction.
|
|
||||||
sqlVisitor.prototype.visitTableValuedFunction = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#matchRecognize.
|
|
||||||
sqlVisitor.prototype.visitMatchRecognize = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#measureColumn.
|
|
||||||
sqlVisitor.prototype.visitMeasureColumn = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#condition1.
|
|
||||||
sqlVisitor.prototype.visitCondition1 = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#variable.
|
|
||||||
sqlVisitor.prototype.visitVariable = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#pattern1.
|
|
||||||
sqlVisitor.prototype.visitPattern1 = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#patternTerm.
|
|
||||||
sqlVisitor.prototype.visitPatternTerm = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#patternFactor.
|
|
||||||
sqlVisitor.prototype.visitPatternFactor = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#patternQuantifier.
|
|
||||||
sqlVisitor.prototype.visitPatternQuantifier = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#minRepeat.
|
|
||||||
sqlVisitor.prototype.visitMinRepeat = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#maxRepeat.
|
|
||||||
sqlVisitor.prototype.visitMaxRepeat = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#repeat.
|
|
||||||
sqlVisitor.prototype.visitRepeat = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#inlineTable.
|
|
||||||
sqlVisitor.prototype.visitInlineTable = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#functionTable.
|
|
||||||
sqlVisitor.prototype.visitFunctionTable = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#tableAlias.
|
|
||||||
sqlVisitor.prototype.visitTableAlias = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#rowFormatSerde.
|
|
||||||
sqlVisitor.prototype.visitRowFormatSerde = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#rowFormatDelimited.
|
|
||||||
sqlVisitor.prototype.visitRowFormatDelimited = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#tableIdentifier.
|
|
||||||
sqlVisitor.prototype.visitTableIdentifier = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#functionIdentifier.
|
|
||||||
sqlVisitor.prototype.visitFunctionIdentifier = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#namedExpression.
|
|
||||||
sqlVisitor.prototype.visitNamedExpression = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#namedExpressionSeq.
|
|
||||||
sqlVisitor.prototype.visitNamedExpressionSeq = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#expression.
|
|
||||||
sqlVisitor.prototype.visitExpression = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#logicalNot.
|
|
||||||
sqlVisitor.prototype.visitLogicalNot = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#predicated.
|
|
||||||
sqlVisitor.prototype.visitPredicated = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#exists.
|
|
||||||
sqlVisitor.prototype.visitExists = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#logicalBinary.
|
|
||||||
sqlVisitor.prototype.visitLogicalBinary = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#predicate.
|
|
||||||
sqlVisitor.prototype.visitPredicate = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#valueExpressionDefault.
|
|
||||||
sqlVisitor.prototype.visitValueExpressionDefault = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#comparison.
|
|
||||||
sqlVisitor.prototype.visitComparison = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#arithmeticBinary.
|
|
||||||
sqlVisitor.prototype.visitArithmeticBinary = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#arithmeticUnary.
|
|
||||||
sqlVisitor.prototype.visitArithmeticUnary = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#struct.
|
|
||||||
sqlVisitor.prototype.visitStruct = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#dereference.
|
|
||||||
sqlVisitor.prototype.visitDereference = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#simpleCase.
|
|
||||||
sqlVisitor.prototype.visitSimpleCase = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#columnReference.
|
|
||||||
sqlVisitor.prototype.visitColumnReference = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#rowConstructor.
|
|
||||||
sqlVisitor.prototype.visitRowConstructor = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#last.
|
|
||||||
sqlVisitor.prototype.visitLast = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#star.
|
|
||||||
sqlVisitor.prototype.visitStar = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#subscript.
|
|
||||||
sqlVisitor.prototype.visitSubscript = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#subqueryExpression.
|
|
||||||
sqlVisitor.prototype.visitSubqueryExpression = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#cast.
|
|
||||||
sqlVisitor.prototype.visitCast = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#constantDefault.
|
|
||||||
sqlVisitor.prototype.visitConstantDefault = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#lambda.
|
|
||||||
sqlVisitor.prototype.visitLambda = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#parenthesizedExpression.
|
|
||||||
sqlVisitor.prototype.visitParenthesizedExpression = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#extract.
|
|
||||||
sqlVisitor.prototype.visitExtract = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#functionCall.
|
|
||||||
sqlVisitor.prototype.visitFunctionCall = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#searchedCase.
|
|
||||||
sqlVisitor.prototype.visitSearchedCase = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#position.
|
|
||||||
sqlVisitor.prototype.visitPosition = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#first.
|
|
||||||
sqlVisitor.prototype.visitFirst = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#nullLiteral.
|
|
||||||
sqlVisitor.prototype.visitNullLiteral = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#intervalLiteral.
|
|
||||||
sqlVisitor.prototype.visitIntervalLiteral = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#typeConstructor.
|
|
||||||
sqlVisitor.prototype.visitTypeConstructor = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#numericLiteral.
|
|
||||||
sqlVisitor.prototype.visitNumericLiteral = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#booleanLiteral.
|
|
||||||
sqlVisitor.prototype.visitBooleanLiteral = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#stringLiteral.
|
|
||||||
sqlVisitor.prototype.visitStringLiteral = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#comparisonOperator.
|
|
||||||
sqlVisitor.prototype.visitComparisonOperator = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#arithmeticOperator.
|
|
||||||
sqlVisitor.prototype.visitArithmeticOperator = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#predicateOperator.
|
|
||||||
sqlVisitor.prototype.visitPredicateOperator = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#booleanValue.
|
|
||||||
sqlVisitor.prototype.visitBooleanValue = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#interval.
|
|
||||||
sqlVisitor.prototype.visitInterval = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#intervalField.
|
|
||||||
sqlVisitor.prototype.visitIntervalField = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#intervalValue.
|
|
||||||
sqlVisitor.prototype.visitIntervalValue = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#colPosition.
|
|
||||||
sqlVisitor.prototype.visitColPosition = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#complexDataType.
|
|
||||||
sqlVisitor.prototype.visitComplexDataType = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#primitiveDataType.
|
|
||||||
sqlVisitor.prototype.visitPrimitiveDataType = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#colTypeList.
|
|
||||||
sqlVisitor.prototype.visitColTypeList = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#colType.
|
|
||||||
sqlVisitor.prototype.visitColType = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#dtColTypeList.
|
|
||||||
sqlVisitor.prototype.visitDtColTypeList = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#dtColType.
|
|
||||||
sqlVisitor.prototype.visitDtColType = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#dtColIdentifier.
|
|
||||||
sqlVisitor.prototype.visitDtColIdentifier = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#complexColTypeList.
|
|
||||||
sqlVisitor.prototype.visitComplexColTypeList = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#complexColType.
|
|
||||||
sqlVisitor.prototype.visitComplexColType = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#whenClause.
|
|
||||||
sqlVisitor.prototype.visitWhenClause = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#windows.
|
|
||||||
sqlVisitor.prototype.visitWindows = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#namedWindow.
|
|
||||||
sqlVisitor.prototype.visitNamedWindow = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#windowRef.
|
|
||||||
sqlVisitor.prototype.visitWindowRef = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#windowDef.
|
|
||||||
sqlVisitor.prototype.visitWindowDef = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#windowFrame.
|
|
||||||
sqlVisitor.prototype.visitWindowFrame = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#frameBound.
|
|
||||||
sqlVisitor.prototype.visitFrameBound = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#qualifiedName.
|
|
||||||
sqlVisitor.prototype.visitQualifiedName = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#identifier.
|
|
||||||
sqlVisitor.prototype.visitIdentifier = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#unquotedIdentifier.
|
|
||||||
sqlVisitor.prototype.visitUnquotedIdentifier = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#quotedIdentifierAlternative.
|
|
||||||
sqlVisitor.prototype.visitQuotedIdentifierAlternative = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#quotedIdentifier.
|
|
||||||
sqlVisitor.prototype.visitQuotedIdentifier = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#decimalLiteral.
|
|
||||||
sqlVisitor.prototype.visitDecimalLiteral = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#integerLiteral.
|
|
||||||
sqlVisitor.prototype.visitIntegerLiteral = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#bigIntLiteral.
|
|
||||||
sqlVisitor.prototype.visitBigIntLiteral = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#smallIntLiteral.
|
|
||||||
sqlVisitor.prototype.visitSmallIntLiteral = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#tinyIntLiteral.
|
|
||||||
sqlVisitor.prototype.visitTinyIntLiteral = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#doubleLiteral.
|
|
||||||
sqlVisitor.prototype.visitDoubleLiteral = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#bigDecimalLiteral.
|
|
||||||
sqlVisitor.prototype.visitBigDecimalLiteral = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
// Visit a parse tree produced by sqlParser#nonReserved.
|
|
||||||
sqlVisitor.prototype.visitNonReserved = function (ctx) {
|
|
||||||
return this.visitChildren(ctx);
|
|
||||||
};
|
|
||||||
exports.sqlVisitor = sqlVisitor;
|
|
@ -1,521 +0,0 @@
|
|||||||
/*
|
|
||||||
* Generated by PEG.js 0.10.0.
|
|
||||||
*
|
|
||||||
* http://pegjs.org/
|
|
||||||
*/
|
|
||||||
(function (root, factory) {
|
|
||||||
if (typeof define === "function" && define.amd) {
|
|
||||||
define([], factory);
|
|
||||||
}
|
|
||||||
else if (typeof module === "object" && module.exports) {
|
|
||||||
module.exports = factory();
|
|
||||||
}
|
|
||||||
})(this, function () {
|
|
||||||
"use strict";
|
|
||||||
function peg$subclass(child, parent) {
|
|
||||||
function ctor() { this.constructor = child; }
|
|
||||||
ctor.prototype = parent.prototype;
|
|
||||||
child.prototype = new ctor();
|
|
||||||
}
|
|
||||||
function peg$SyntaxError(message, expected, found, location) {
|
|
||||||
this.message = message;
|
|
||||||
this.expected = expected;
|
|
||||||
this.found = found;
|
|
||||||
this.location = location;
|
|
||||||
this.name = "SyntaxError";
|
|
||||||
if (typeof Error.captureStackTrace === "function") {
|
|
||||||
Error.captureStackTrace(this, peg$SyntaxError);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
peg$subclass(peg$SyntaxError, Error);
|
|
||||||
peg$SyntaxError.buildMessage = function (expected, found) {
|
|
||||||
var DESCRIBE_EXPECTATION_FNS = {
|
|
||||||
literal: function (expectation) {
|
|
||||||
return "\"" + literalEscape(expectation.text) + "\"";
|
|
||||||
},
|
|
||||||
"class": function (expectation) {
|
|
||||||
var escapedParts = "", i;
|
|
||||||
for (i = 0; i < expectation.parts.length; i++) {
|
|
||||||
escapedParts += expectation.parts[i] instanceof Array
|
|
||||||
? classEscape(expectation.parts[i][0]) + "-" + classEscape(expectation.parts[i][1])
|
|
||||||
: classEscape(expectation.parts[i]);
|
|
||||||
}
|
|
||||||
return "[" + (expectation.inverted ? "^" : "") + escapedParts + "]";
|
|
||||||
},
|
|
||||||
any: function (expectation) {
|
|
||||||
return "any character";
|
|
||||||
},
|
|
||||||
end: function (expectation) {
|
|
||||||
return "end of input";
|
|
||||||
},
|
|
||||||
other: function (expectation) {
|
|
||||||
return expectation.description;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
function hex(ch) {
|
|
||||||
return ch.charCodeAt(0).toString(16).toUpperCase();
|
|
||||||
}
|
|
||||||
function literalEscape(s) {
|
|
||||||
return s
|
|
||||||
.replace(/\\/g, '\\\\')
|
|
||||||
.replace(/"/g, '\\"')
|
|
||||||
.replace(/\0/g, '\\0')
|
|
||||||
.replace(/\t/g, '\\t')
|
|
||||||
.replace(/\n/g, '\\n')
|
|
||||||
.replace(/\r/g, '\\r')
|
|
||||||
.replace(/[\x00-\x0F]/g, function (ch) { return '\\x0' + hex(ch); })
|
|
||||||
.replace(/[\x10-\x1F\x7F-\x9F]/g, function (ch) { return '\\x' + hex(ch); });
|
|
||||||
}
|
|
||||||
function classEscape(s) {
|
|
||||||
return s
|
|
||||||
.replace(/\\/g, '\\\\')
|
|
||||||
.replace(/\]/g, '\\]')
|
|
||||||
.replace(/\^/g, '\\^')
|
|
||||||
.replace(/-/g, '\\-')
|
|
||||||
.replace(/\0/g, '\\0')
|
|
||||||
.replace(/\t/g, '\\t')
|
|
||||||
.replace(/\n/g, '\\n')
|
|
||||||
.replace(/\r/g, '\\r')
|
|
||||||
.replace(/[\x00-\x0F]/g, function (ch) { return '\\x0' + hex(ch); })
|
|
||||||
.replace(/[\x10-\x1F\x7F-\x9F]/g, function (ch) { return '\\x' + hex(ch); });
|
|
||||||
}
|
|
||||||
function describeExpectation(expectation) {
|
|
||||||
return DESCRIBE_EXPECTATION_FNS[expectation.type](expectation);
|
|
||||||
}
|
|
||||||
function describeExpected(expected) {
|
|
||||||
var descriptions = new Array(expected.length), i, j;
|
|
||||||
for (i = 0; i < expected.length; i++) {
|
|
||||||
descriptions[i] = describeExpectation(expected[i]);
|
|
||||||
}
|
|
||||||
descriptions.sort();
|
|
||||||
if (descriptions.length > 0) {
|
|
||||||
for (i = 1, j = 1; i < descriptions.length; i++) {
|
|
||||||
if (descriptions[i - 1] !== descriptions[i]) {
|
|
||||||
descriptions[j] = descriptions[i];
|
|
||||||
j++;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
descriptions.length = j;
|
|
||||||
}
|
|
||||||
switch (descriptions.length) {
|
|
||||||
case 1:
|
|
||||||
return descriptions[0];
|
|
||||||
case 2:
|
|
||||||
return descriptions[0] + " or " + descriptions[1];
|
|
||||||
default:
|
|
||||||
return descriptions.slice(0, -1).join(", ")
|
|
||||||
+ ", or "
|
|
||||||
+ descriptions[descriptions.length - 1];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function describeFound(found) {
|
|
||||||
return found ? "\"" + literalEscape(found) + "\"" : "end of input";
|
|
||||||
}
|
|
||||||
return "Expected " + describeExpected(expected) + " but " + describeFound(found) + " found.";
|
|
||||||
};
|
|
||||||
function peg$parse(input, options) {
|
|
||||||
options = options !== void 0 ? options : {};
|
|
||||||
var peg$FAILED = {}, peg$startRuleIndices = { start: 0 }, peg$startRuleIndex = 0, peg$consts = [
|
|
||||||
function (union_stmt) {
|
|
||||||
return { lines, text: union_stmt };
|
|
||||||
},
|
|
||||||
peg$anyExpectation(),
|
|
||||||
function (word) { return word; },
|
|
||||||
function (words, comment) { return ''; },
|
|
||||||
function (words, quote) { return quote; },
|
|
||||||
";",
|
|
||||||
peg$literalExpectation(";", false),
|
|
||||||
function (words) { isSplit = true; return ";"; },
|
|
||||||
function (words, stmt) {
|
|
||||||
const text = words.join("") + stmt;
|
|
||||||
let index = Math.max(lines.length - 1, 0);
|
|
||||||
lines[index] = (lines[index] || '') + text;
|
|
||||||
if (isSplit) {
|
|
||||||
isSplit = false;
|
|
||||||
lines.push('');
|
|
||||||
}
|
|
||||||
return text;
|
|
||||||
},
|
|
||||||
function (stmt, other) {
|
|
||||||
const text = stmt.join("") + other.join("");
|
|
||||||
let index = Math.max(lines.length - 1, 0);
|
|
||||||
lines[index] = lines[index] + other.join("");
|
|
||||||
return text;
|
|
||||||
},
|
|
||||||
function (comment) {
|
|
||||||
return comment;
|
|
||||||
},
|
|
||||||
/^[^\r\n]/,
|
|
||||||
peg$classExpectation(["\r", "\n"], true, false),
|
|
||||||
function (start, words) {
|
|
||||||
return start + words.join("");
|
|
||||||
},
|
|
||||||
"*/",
|
|
||||||
peg$literalExpectation("*/", false),
|
|
||||||
function (start, word) { return word; },
|
|
||||||
function (start, words, end) { return start + words.join("") + end; },
|
|
||||||
"\"",
|
|
||||||
peg$literalExpectation("\"", false),
|
|
||||||
/^[^"]/,
|
|
||||||
peg$classExpectation(["\""], true, false),
|
|
||||||
function (start, words, end) { return start + words.join("") + end; },
|
|
||||||
"'",
|
|
||||||
peg$literalExpectation("'", false),
|
|
||||||
/^[^']/,
|
|
||||||
peg$classExpectation(["'"], true, false),
|
|
||||||
"--",
|
|
||||||
peg$literalExpectation("--", false),
|
|
||||||
/^[\r\n]/,
|
|
||||||
peg$classExpectation(["\r", "\n"], false, false),
|
|
||||||
"/*",
|
|
||||||
peg$literalExpectation("/*", false),
|
|
||||||
/^[ \t\r\n]/,
|
|
||||||
peg$classExpectation([" ", "\t", "\r", "\n"], false, false)
|
|
||||||
], peg$bytecode = [
|
|
||||||
peg$decode("%;!/' 8!: !! )"),
|
|
||||||
peg$decode("%$%$%%<;&=.##&&!&'#/6#1\"\"5!7!/($8\":\"\"! )(\"'#&'#0L*%%<;&=.##&&!&'#/6#1\"\"5!7!/($8\":\"\"! )(\"'#&'#&/j#%;\"/( 8!:#!\"\" ).H &%;%/( 8!:$!\"\" ).5 &%2%\"\"6%7&/' 8!:'!!\")/)$8\":(\"\"! )(\"'#&'#0\xCD*%$%%<;&=.##&&!&'#/6#1\"\"5!7!/($8\":\"\"! )(\"'#&'#0L*%%<;&=.##&&!&'#/6#1\"\"5!7!/($8\":\"\"! )(\"'#&'#&/j#%;\"/( 8!:#!\"\" ).H &%;%/( 8!:$!\"\" ).5 &%2%\"\"6%7&/' 8!:'!!\")/)$8\":(\"\"! )(\"'#&'#&/C#$1\"\"5!7!0(*1\"\"5!7!&/)$8\":)\"\"! )(\"'#&'#"),
|
|
||||||
peg$decode("%;$.# &;#/' 8!:*!! )"),
|
|
||||||
peg$decode("%;'/E#$4+\"\"5!7,0)*4+\"\"5!7,&/)$8\":-\"\"! )(\"'#&'#"),
|
|
||||||
peg$decode("%;)/\xA3#$%%<2.\"\"6.7/=.##&&!&'#/7#1\"\"5!7!/)$8\":0\"\"$ )(\"'#&'#0S*%%<2.\"\"6.7/=.##&&!&'#/7#1\"\"5!7!/)$8\":0\"\"$ )(\"'#&'#&/3$;*/*$8#:1##\"! )(#'#(\"'#&'#"),
|
|
||||||
peg$decode("%22\"\"6273/U#$44\"\"5!750)*44\"\"5!75&/9$22\"\"6273/*$8#:6##\"! )(#'#(\"'#&'#.e &%27\"\"6778/U#$49\"\"5!7:0)*49\"\"5!7:&/9$27\"\"6778/*$8#:6##\"! )(#'#(\"'#&'#"),
|
|
||||||
peg$decode(";'.G &;).A &22\"\"6273.5 &27\"\"6778.) &2%\"\"6%7&"),
|
|
||||||
peg$decode("2;\"\"6;7<"),
|
|
||||||
peg$decode("4=\"\"5!7>"),
|
|
||||||
peg$decode("2?\"\"6?7@"),
|
|
||||||
peg$decode("2.\"\"6.7/"),
|
|
||||||
peg$decode("$;,0#*;,&"),
|
|
||||||
peg$decode("4A\"\"5!7B")
|
|
||||||
], peg$currPos = 0, peg$savedPos = 0, peg$posDetailsCache = [{ line: 1, column: 1 }], peg$maxFailPos = 0, peg$maxFailExpected = [], peg$silentFails = 0, peg$result;
|
|
||||||
if ("startRule" in options) {
|
|
||||||
if (!(options.startRule in peg$startRuleIndices)) {
|
|
||||||
throw new Error("Can't start parsing from rule \"" + options.startRule + "\".");
|
|
||||||
}
|
|
||||||
peg$startRuleIndex = peg$startRuleIndices[options.startRule];
|
|
||||||
}
|
|
||||||
function text() {
|
|
||||||
return input.substring(peg$savedPos, peg$currPos);
|
|
||||||
}
|
|
||||||
function location() {
|
|
||||||
return peg$computeLocation(peg$savedPos, peg$currPos);
|
|
||||||
}
|
|
||||||
function expected(description, location) {
|
|
||||||
location = location !== void 0 ? location : peg$computeLocation(peg$savedPos, peg$currPos);
|
|
||||||
throw peg$buildStructuredError([peg$otherExpectation(description)], input.substring(peg$savedPos, peg$currPos), location);
|
|
||||||
}
|
|
||||||
function error(message, location) {
|
|
||||||
location = location !== void 0 ? location : peg$computeLocation(peg$savedPos, peg$currPos);
|
|
||||||
throw peg$buildSimpleError(message, location);
|
|
||||||
}
|
|
||||||
function peg$literalExpectation(text, ignoreCase) {
|
|
||||||
return { type: "literal", text: text, ignoreCase: ignoreCase };
|
|
||||||
}
|
|
||||||
function peg$classExpectation(parts, inverted, ignoreCase) {
|
|
||||||
return { type: "class", parts: parts, inverted: inverted, ignoreCase: ignoreCase };
|
|
||||||
}
|
|
||||||
function peg$anyExpectation() {
|
|
||||||
return { type: "any" };
|
|
||||||
}
|
|
||||||
function peg$endExpectation() {
|
|
||||||
return { type: "end" };
|
|
||||||
}
|
|
||||||
function peg$otherExpectation(description) {
|
|
||||||
return { type: "other", description: description };
|
|
||||||
}
|
|
||||||
function peg$computePosDetails(pos) {
|
|
||||||
var details = peg$posDetailsCache[pos], p;
|
|
||||||
if (details) {
|
|
||||||
return details;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
p = pos - 1;
|
|
||||||
while (!peg$posDetailsCache[p]) {
|
|
||||||
p--;
|
|
||||||
}
|
|
||||||
details = peg$posDetailsCache[p];
|
|
||||||
details = {
|
|
||||||
line: details.line,
|
|
||||||
column: details.column
|
|
||||||
};
|
|
||||||
while (p < pos) {
|
|
||||||
if (input.charCodeAt(p) === 10) {
|
|
||||||
details.line++;
|
|
||||||
details.column = 1;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
details.column++;
|
|
||||||
}
|
|
||||||
p++;
|
|
||||||
}
|
|
||||||
peg$posDetailsCache[pos] = details;
|
|
||||||
return details;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function peg$computeLocation(startPos, endPos) {
|
|
||||||
var startPosDetails = peg$computePosDetails(startPos), endPosDetails = peg$computePosDetails(endPos);
|
|
||||||
return {
|
|
||||||
start: {
|
|
||||||
offset: startPos,
|
|
||||||
line: startPosDetails.line,
|
|
||||||
column: startPosDetails.column
|
|
||||||
},
|
|
||||||
end: {
|
|
||||||
offset: endPos,
|
|
||||||
line: endPosDetails.line,
|
|
||||||
column: endPosDetails.column
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
function peg$fail(expected) {
|
|
||||||
if (peg$currPos < peg$maxFailPos) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (peg$currPos > peg$maxFailPos) {
|
|
||||||
peg$maxFailPos = peg$currPos;
|
|
||||||
peg$maxFailExpected = [];
|
|
||||||
}
|
|
||||||
peg$maxFailExpected.push(expected);
|
|
||||||
}
|
|
||||||
function peg$buildSimpleError(message, location) {
|
|
||||||
return new peg$SyntaxError(message, null, null, location);
|
|
||||||
}
|
|
||||||
function peg$buildStructuredError(expected, found, location) {
|
|
||||||
return new peg$SyntaxError(peg$SyntaxError.buildMessage(expected, found), expected, found, location);
|
|
||||||
}
|
|
||||||
function peg$decode(s) {
|
|
||||||
var bc = new Array(s.length), i;
|
|
||||||
for (i = 0; i < s.length; i++) {
|
|
||||||
bc[i] = s.charCodeAt(i) - 32;
|
|
||||||
}
|
|
||||||
return bc;
|
|
||||||
}
|
|
||||||
function peg$parseRule(index) {
|
|
||||||
var bc = peg$bytecode[index], ip = 0, ips = [], end = bc.length, ends = [], stack = [], params, i;
|
|
||||||
while (true) {
|
|
||||||
while (ip < end) {
|
|
||||||
switch (bc[ip]) {
|
|
||||||
case 0:
|
|
||||||
stack.push(peg$consts[bc[ip + 1]]);
|
|
||||||
ip += 2;
|
|
||||||
break;
|
|
||||||
case 1:
|
|
||||||
stack.push(void 0);
|
|
||||||
ip++;
|
|
||||||
break;
|
|
||||||
case 2:
|
|
||||||
stack.push(null);
|
|
||||||
ip++;
|
|
||||||
break;
|
|
||||||
case 3:
|
|
||||||
stack.push(peg$FAILED);
|
|
||||||
ip++;
|
|
||||||
break;
|
|
||||||
case 4:
|
|
||||||
stack.push([]);
|
|
||||||
ip++;
|
|
||||||
break;
|
|
||||||
case 5:
|
|
||||||
stack.push(peg$currPos);
|
|
||||||
ip++;
|
|
||||||
break;
|
|
||||||
case 6:
|
|
||||||
stack.pop();
|
|
||||||
ip++;
|
|
||||||
break;
|
|
||||||
case 7:
|
|
||||||
peg$currPos = stack.pop();
|
|
||||||
ip++;
|
|
||||||
break;
|
|
||||||
case 8:
|
|
||||||
stack.length -= bc[ip + 1];
|
|
||||||
ip += 2;
|
|
||||||
break;
|
|
||||||
case 9:
|
|
||||||
stack.splice(-2, 1);
|
|
||||||
ip++;
|
|
||||||
break;
|
|
||||||
case 10:
|
|
||||||
stack[stack.length - 2].push(stack.pop());
|
|
||||||
ip++;
|
|
||||||
break;
|
|
||||||
case 11:
|
|
||||||
stack.push(stack.splice(stack.length - bc[ip + 1], bc[ip + 1]));
|
|
||||||
ip += 2;
|
|
||||||
break;
|
|
||||||
case 12:
|
|
||||||
stack.push(input.substring(stack.pop(), peg$currPos));
|
|
||||||
ip++;
|
|
||||||
break;
|
|
||||||
case 13:
|
|
||||||
ends.push(end);
|
|
||||||
ips.push(ip + 3 + bc[ip + 1] + bc[ip + 2]);
|
|
||||||
if (stack[stack.length - 1]) {
|
|
||||||
end = ip + 3 + bc[ip + 1];
|
|
||||||
ip += 3;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
end = ip + 3 + bc[ip + 1] + bc[ip + 2];
|
|
||||||
ip += 3 + bc[ip + 1];
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 14:
|
|
||||||
ends.push(end);
|
|
||||||
ips.push(ip + 3 + bc[ip + 1] + bc[ip + 2]);
|
|
||||||
if (stack[stack.length - 1] === peg$FAILED) {
|
|
||||||
end = ip + 3 + bc[ip + 1];
|
|
||||||
ip += 3;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
end = ip + 3 + bc[ip + 1] + bc[ip + 2];
|
|
||||||
ip += 3 + bc[ip + 1];
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 15:
|
|
||||||
ends.push(end);
|
|
||||||
ips.push(ip + 3 + bc[ip + 1] + bc[ip + 2]);
|
|
||||||
if (stack[stack.length - 1] !== peg$FAILED) {
|
|
||||||
end = ip + 3 + bc[ip + 1];
|
|
||||||
ip += 3;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
end = ip + 3 + bc[ip + 1] + bc[ip + 2];
|
|
||||||
ip += 3 + bc[ip + 1];
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 16:
|
|
||||||
if (stack[stack.length - 1] !== peg$FAILED) {
|
|
||||||
ends.push(end);
|
|
||||||
ips.push(ip);
|
|
||||||
end = ip + 2 + bc[ip + 1];
|
|
||||||
ip += 2;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
ip += 2 + bc[ip + 1];
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 17:
|
|
||||||
ends.push(end);
|
|
||||||
ips.push(ip + 3 + bc[ip + 1] + bc[ip + 2]);
|
|
||||||
if (input.length > peg$currPos) {
|
|
||||||
end = ip + 3 + bc[ip + 1];
|
|
||||||
ip += 3;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
end = ip + 3 + bc[ip + 1] + bc[ip + 2];
|
|
||||||
ip += 3 + bc[ip + 1];
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 18:
|
|
||||||
ends.push(end);
|
|
||||||
ips.push(ip + 4 + bc[ip + 2] + bc[ip + 3]);
|
|
||||||
if (input.substr(peg$currPos, peg$consts[bc[ip + 1]].length) === peg$consts[bc[ip + 1]]) {
|
|
||||||
end = ip + 4 + bc[ip + 2];
|
|
||||||
ip += 4;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
end = ip + 4 + bc[ip + 2] + bc[ip + 3];
|
|
||||||
ip += 4 + bc[ip + 2];
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 19:
|
|
||||||
ends.push(end);
|
|
||||||
ips.push(ip + 4 + bc[ip + 2] + bc[ip + 3]);
|
|
||||||
if (input.substr(peg$currPos, peg$consts[bc[ip + 1]].length).toLowerCase() === peg$consts[bc[ip + 1]]) {
|
|
||||||
end = ip + 4 + bc[ip + 2];
|
|
||||||
ip += 4;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
end = ip + 4 + bc[ip + 2] + bc[ip + 3];
|
|
||||||
ip += 4 + bc[ip + 2];
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 20:
|
|
||||||
ends.push(end);
|
|
||||||
ips.push(ip + 4 + bc[ip + 2] + bc[ip + 3]);
|
|
||||||
if (peg$consts[bc[ip + 1]].test(input.charAt(peg$currPos))) {
|
|
||||||
end = ip + 4 + bc[ip + 2];
|
|
||||||
ip += 4;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
end = ip + 4 + bc[ip + 2] + bc[ip + 3];
|
|
||||||
ip += 4 + bc[ip + 2];
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 21:
|
|
||||||
stack.push(input.substr(peg$currPos, bc[ip + 1]));
|
|
||||||
peg$currPos += bc[ip + 1];
|
|
||||||
ip += 2;
|
|
||||||
break;
|
|
||||||
case 22:
|
|
||||||
stack.push(peg$consts[bc[ip + 1]]);
|
|
||||||
peg$currPos += peg$consts[bc[ip + 1]].length;
|
|
||||||
ip += 2;
|
|
||||||
break;
|
|
||||||
case 23:
|
|
||||||
stack.push(peg$FAILED);
|
|
||||||
if (peg$silentFails === 0) {
|
|
||||||
peg$fail(peg$consts[bc[ip + 1]]);
|
|
||||||
}
|
|
||||||
ip += 2;
|
|
||||||
break;
|
|
||||||
case 24:
|
|
||||||
peg$savedPos = stack[stack.length - 1 - bc[ip + 1]];
|
|
||||||
ip += 2;
|
|
||||||
break;
|
|
||||||
case 25:
|
|
||||||
peg$savedPos = peg$currPos;
|
|
||||||
ip++;
|
|
||||||
break;
|
|
||||||
case 26:
|
|
||||||
params = bc.slice(ip + 4, ip + 4 + bc[ip + 3]);
|
|
||||||
for (i = 0; i < bc[ip + 3]; i++) {
|
|
||||||
params[i] = stack[stack.length - 1 - params[i]];
|
|
||||||
}
|
|
||||||
stack.splice(stack.length - bc[ip + 2], bc[ip + 2], peg$consts[bc[ip + 1]].apply(null, params));
|
|
||||||
ip += 4 + bc[ip + 3];
|
|
||||||
break;
|
|
||||||
case 27:
|
|
||||||
stack.push(peg$parseRule(bc[ip + 1]));
|
|
||||||
ip += 2;
|
|
||||||
break;
|
|
||||||
case 28:
|
|
||||||
peg$silentFails++;
|
|
||||||
ip++;
|
|
||||||
break;
|
|
||||||
case 29:
|
|
||||||
peg$silentFails--;
|
|
||||||
ip++;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
throw new Error("Invalid opcode: " + bc[ip] + ".");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (ends.length > 0) {
|
|
||||||
end = ends.pop();
|
|
||||||
ip = ips.pop();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return stack[0];
|
|
||||||
}
|
|
||||||
let lines = [];
|
|
||||||
let isSplit = false;
|
|
||||||
peg$result = peg$parseRule(peg$startRuleIndex);
|
|
||||||
if (peg$result !== peg$FAILED && peg$currPos === input.length) {
|
|
||||||
return peg$result;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
if (peg$result !== peg$FAILED && peg$currPos < input.length) {
|
|
||||||
peg$fail(peg$endExpectation());
|
|
||||||
}
|
|
||||||
throw peg$buildStructuredError(peg$maxFailExpected, peg$maxFailPos < input.length ? input.charAt(peg$maxFailPos) : null, peg$maxFailPos < input.length
|
|
||||||
? peg$computeLocation(peg$maxFailPos, peg$maxFailPos + 1)
|
|
||||||
: peg$computeLocation(peg$maxFailPos, peg$maxFailPos));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return {
|
|
||||||
SyntaxError: peg$SyntaxError,
|
|
||||||
parse: peg$parse
|
|
||||||
};
|
|
||||||
});
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,84 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
// Licensed to Cloudera, Inc. under one
|
|
||||||
// or more contributor license agreements. See the NOTICE file
|
|
||||||
// distributed with this work for additional information
|
|
||||||
// regarding copyright ownership. Cloudera, Inc. licenses this file
|
|
||||||
// to you under the Apache License, Version 2.0 (the
|
|
||||||
// "License"); you may not use this file except in compliance
|
|
||||||
// with the License. You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
|
|
||||||
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
|
|
||||||
return new (P || (P = Promise))(function (resolve, reject) {
|
|
||||||
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
|
|
||||||
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
|
|
||||||
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
|
|
||||||
step((generator = generator.apply(thisArg, _arguments || [])).next());
|
|
||||||
});
|
|
||||||
};
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
/* eslint-disable */
|
|
||||||
/**
|
|
||||||
* AUTOCOMPLETE_MODULES and SYNTAX_MODULES are generated, do not edit manually, see tools/jison/generateParsers.js
|
|
||||||
*/
|
|
||||||
const AUTOCOMPLETE_MODULES = {
|
|
||||||
calcite: require("calcite/calciteAutocompleteParser"),
|
|
||||||
druid: require("druid/druidAutocompleteParser"),
|
|
||||||
elasticsearch: require("elasticsearch/elasticsearchAutocompleteParser"),
|
|
||||||
flink: require("flink/flinkAutocompleteParser"),
|
|
||||||
generic: require("generic/genericAutocompleteParser"),
|
|
||||||
hive: require("hive/hiveAutocompleteParser"),
|
|
||||||
impala: require("impala/impalaAutocompleteParser"),
|
|
||||||
ksql: require("ksql/ksqlAutocompleteParser"),
|
|
||||||
phoenix: require("phoenix/phoenixAutocompleteParser"),
|
|
||||||
presto: require("presto/prestoAutocompleteParser")
|
|
||||||
};
|
|
||||||
const SYNTAX_MODULES = {
|
|
||||||
calcite: require("calcite/calciteSyntaxParser"),
|
|
||||||
druid: require("druid/druidSyntaxParser"),
|
|
||||||
elasticsearch: require("elasticsearch/elasticsearchSyntaxParser"),
|
|
||||||
flink: require("flink/flinkSyntaxParser"),
|
|
||||||
generic: require("generic/genericSyntaxParser"),
|
|
||||||
hive: require("hive/hiveSyntaxParser"),
|
|
||||||
impala: require("impala/impalaSyntaxParser"),
|
|
||||||
ksql: require("ksql/ksqlSyntaxParser"),
|
|
||||||
phoenix: require("phoenix/phoenixSyntaxParser"),
|
|
||||||
presto: require("presto/prestoSyntaxParser")
|
|
||||||
};
|
|
||||||
/* eslint-enable */
|
|
||||||
class SqlParserRepository {
|
|
||||||
constructor() {
|
|
||||||
this.modulePromises = {};
|
|
||||||
}
|
|
||||||
getParser(sourceType, parserType) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
if (!this.modulePromises[sourceType + parserType]) {
|
|
||||||
const modules = parserType === 'Autocomplete' ? AUTOCOMPLETE_MODULES : SYNTAX_MODULES;
|
|
||||||
this.modulePromises[sourceType + parserType] = new Promise((resolve, reject) => {
|
|
||||||
const targetModule = modules[sourceType] || modules.generic;
|
|
||||||
resolve(targetModule);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
return this.modulePromises[sourceType + parserType];
|
|
||||||
});
|
|
||||||
}
|
|
||||||
getAutocompleter(sourceType) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
return this.getParser(sourceType, 'Autocomplete');
|
|
||||||
});
|
|
||||||
}
|
|
||||||
getSyntaxParser(sourceType) {
|
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
|
||||||
return this.getParser(sourceType, 'Syntax');
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
const sqlParserRepository = new SqlParserRepository();
|
|
||||||
exports.default = sqlParserRepository;
|
|
@ -1,75 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
// Licensed to Cloudera, Inc. under one
|
|
||||||
// or more contributor license agreements. See the NOTICE file
|
|
||||||
// distributed with this work for additional information
|
|
||||||
// regarding copyright ownership. Cloudera, Inc. licenses this file
|
|
||||||
// to you under the Apache License, Version 2.0 (the
|
|
||||||
// "License"); you may not use this file except in compliance
|
|
||||||
// with the License. You may obtain a copy of the License at
|
|
||||||
//
|
|
||||||
// http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
//
|
|
||||||
// Unless required by applicable law or agreed to in writing, software
|
|
||||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
// See the License for the specific language governing permissions and
|
|
||||||
// limitations under the License.
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
/**
|
|
||||||
* Calculates the Optimal String Alignment distance between two strings. Returns 0 when the strings are equal and the
|
|
||||||
* distance when not, distances is less than or equal to the length of the longest string.
|
|
||||||
*
|
|
||||||
* @param strA
|
|
||||||
* @param strB
|
|
||||||
* @param [ignoreCase]
|
|
||||||
* @returns {number} The similarity
|
|
||||||
*/
|
|
||||||
const stringDistance = function (strA, strB, ignoreCase) {
|
|
||||||
if (ignoreCase) {
|
|
||||||
strA = strA.toLowerCase();
|
|
||||||
strB = strB.toLowerCase();
|
|
||||||
}
|
|
||||||
// TODO: Consider other algorithms for performance
|
|
||||||
const strALength = strA.length;
|
|
||||||
const strBLength = strB.length;
|
|
||||||
if (strALength === 0) {
|
|
||||||
return strBLength;
|
|
||||||
}
|
|
||||||
if (strBLength === 0) {
|
|
||||||
return strALength;
|
|
||||||
}
|
|
||||||
const distances = new Array(strALength);
|
|
||||||
let cost, deletion, insertion, substitution, transposition;
|
|
||||||
for (let i = 0; i <= strALength; i++) {
|
|
||||||
distances[i] = new Array(strBLength);
|
|
||||||
distances[i][0] = i;
|
|
||||||
for (let j = 1; j <= strBLength; j++) {
|
|
||||||
if (!i) {
|
|
||||||
distances[0][j] = j;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
cost = strA[i - 1] === strB[j - 1] ? 0 : 1;
|
|
||||||
deletion = distances[i - 1][j] + 1;
|
|
||||||
insertion = distances[i][j - 1] + 1;
|
|
||||||
substitution = distances[i - 1][j - 1] + cost;
|
|
||||||
if (deletion <= insertion && deletion <= substitution) {
|
|
||||||
distances[i][j] = deletion;
|
|
||||||
}
|
|
||||||
else if (insertion <= deletion && insertion <= substitution) {
|
|
||||||
distances[i][j] = insertion;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
distances[i][j] = substitution;
|
|
||||||
}
|
|
||||||
if (i > 1 && j > 1 && strA[i] === strB[j - 1] && strA[i - 1] === strB[j]) {
|
|
||||||
transposition = distances[i - 2][j - 2] + cost;
|
|
||||||
if (transposition < distances[i][j]) {
|
|
||||||
distances[i][j] = transposition;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return distances[strALength][strBLength];
|
|
||||||
};
|
|
||||||
exports.default = stringDistance;
|
|
18
lib/index.js
18
lib/index.js
@ -1,8 +1,14 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
|
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
|
||||||
|
}) : (function(o, m, k, k2) {
|
||||||
|
if (k2 === undefined) k2 = k;
|
||||||
|
o[k2] = m[k];
|
||||||
|
}));
|
||||||
|
var __exportStar = (this && this.__exportStar) || function(m, exports) {
|
||||||
|
for (var p in m) if (p !== "default" && !exports.hasOwnProperty(p)) __createBinding(exports, m, p);
|
||||||
|
};
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
const parser = require("./lib/parser");
|
__exportStar(require("./core"), exports);
|
||||||
exports.parser = parser;
|
__exportStar(require("./utils"), exports);
|
||||||
const filter = require("./lib/filter");
|
|
||||||
exports.filter = filter;
|
|
||||||
const flinkParser_1 = require("./lib/flinkParser");
|
|
||||||
exports.flinksqlParser = flinkParser_1.default;
|
|
||||||
|
@ -1,27 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const commentFilter = require("../core/comment");
|
|
||||||
/**
|
|
||||||
* 过滤--注释
|
|
||||||
* @param {String} sql
|
|
||||||
*/
|
|
||||||
function filterComments(sql) {
|
|
||||||
return commentFilter.parse(sql).text;
|
|
||||||
}
|
|
||||||
exports.filterComments = filterComments;
|
|
||||||
/**
|
|
||||||
* 清除注释和前后空格
|
|
||||||
* @param {String} sql
|
|
||||||
*/
|
|
||||||
function cleanSql(sql) {
|
|
||||||
return filterComments(sql);
|
|
||||||
}
|
|
||||||
exports.cleanSql = cleanSql;
|
|
||||||
/**
|
|
||||||
* 分割sql
|
|
||||||
* @param {String} sql
|
|
||||||
*/
|
|
||||||
function splitSql(sql) {
|
|
||||||
return commentFilter.parse(sql).lines;
|
|
||||||
}
|
|
||||||
exports.splitSql = splitSql;
|
|
@ -1,53 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
const antlr4 = require("antlr4");
|
|
||||||
const error_1 = require("antlr4/error");
|
|
||||||
const sqlLexer_1 = require("../antlr4/flinksql/sqlLexer");
|
|
||||||
const sqlParser_1 = require("../antlr4/flinksql/sqlParser");
|
|
||||||
const utils_1 = require("../utils");
|
|
||||||
class SqlErrorListener extends error_1.ErrorListener {
|
|
||||||
constructor() {
|
|
||||||
super(...arguments);
|
|
||||||
this.error = null;
|
|
||||||
}
|
|
||||||
syntaxError(recognizer, offendingSymbol, line, charPositionInLine, msg, e) {
|
|
||||||
this.error = {
|
|
||||||
line,
|
|
||||||
column: charPositionInLine,
|
|
||||||
token: offendingSymbol,
|
|
||||||
errorMsg: msg
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function parserSingle(sql) {
|
|
||||||
if (!sql || !sql.trim()) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
const inputStream = new antlr4.InputStream(sql.toUpperCase());
|
|
||||||
const lexer = new sqlLexer_1.sqlLexer(inputStream);
|
|
||||||
const tokenStream = new antlr4.CommonTokenStream(lexer);
|
|
||||||
const parser = new sqlParser_1.sqlParser(tokenStream);
|
|
||||||
parser.buildParseTrees = true;
|
|
||||||
let listener = new SqlErrorListener();
|
|
||||||
parser.addErrorListener(listener);
|
|
||||||
parser.singleStatement();
|
|
||||||
return listener.error;
|
|
||||||
}
|
|
||||||
function parserSyntax(sql) {
|
|
||||||
let runSql = typeof sql == 'string' ? sql : sql.join('');
|
|
||||||
const sqls = utils_1.splitSql(runSql);
|
|
||||||
for (let i = 0, index = 0; i < sqls.length; i++) {
|
|
||||||
let end = runSql[sqls[i]] == ';' ? sqls[i] : sqls[i] + 1;
|
|
||||||
/**
|
|
||||||
* 这边不取分号
|
|
||||||
*/
|
|
||||||
let sql = new Array(index).fill(' ').join('') + runSql.substring(index, end);
|
|
||||||
let err = parserSingle(sql);
|
|
||||||
if (err) {
|
|
||||||
return err;
|
|
||||||
}
|
|
||||||
index = sqls[i] + 1;
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
exports.default = parserSyntax;
|
|
@ -1,76 +0,0 @@
|
|||||||
"use strict";
|
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
|
||||||
// import * as sqlSyntaxParser from '../core/sqlSyntaxParser';
|
|
||||||
const hiveSyntaxParser_1 = require("../core/parse/hive/hiveSyntaxParser");
|
|
||||||
const hiveAutocompleteParser_1 = require("../core/parse/hive/hiveAutocompleteParser");
|
|
||||||
const impalaSyntaxParser_1 = require("../core/parse/impala/impalaSyntaxParser");
|
|
||||||
const impalaAutocompleteParser_1 = require("../core/parse/impala/impalaAutocompleteParser");
|
|
||||||
const genericSyntaxParser_1 = require("../core/parse/generic/genericSyntaxParser");
|
|
||||||
const genericAutocompleteParser_1 = require("../core/parse/generic/genericAutocompleteParser");
|
|
||||||
function getSyntaxParser(type) {
|
|
||||||
switch (type) {
|
|
||||||
case sqlType.Hive: {
|
|
||||||
return hiveSyntaxParser_1.default;
|
|
||||||
}
|
|
||||||
case sqlType.Impala: {
|
|
||||||
return impalaSyntaxParser_1.default;
|
|
||||||
}
|
|
||||||
case sqlType.None: {
|
|
||||||
return genericSyntaxParser_1.default;
|
|
||||||
}
|
|
||||||
default: {
|
|
||||||
return hiveSyntaxParser_1.default;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
function getAutoCompleteParser(type) {
|
|
||||||
switch (type) {
|
|
||||||
case sqlType.Hive: {
|
|
||||||
return hiveAutocompleteParser_1.default;
|
|
||||||
}
|
|
||||||
case sqlType.Impala: {
|
|
||||||
return impalaAutocompleteParser_1.default;
|
|
||||||
}
|
|
||||||
case sqlType.None: {
|
|
||||||
return genericAutocompleteParser_1.default;
|
|
||||||
}
|
|
||||||
default: {
|
|
||||||
return hiveAutocompleteParser_1.default;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
var sqlType;
|
|
||||||
(function (sqlType) {
|
|
||||||
sqlType["Hive"] = "hive";
|
|
||||||
sqlType["None"] = "sql";
|
|
||||||
sqlType["Impala"] = "impala";
|
|
||||||
})(sqlType || (sqlType = {}));
|
|
||||||
exports.sqlType = sqlType;
|
|
||||||
function sqlToParserArgs(sql) {
|
|
||||||
let preSql = '', sufSql = '';
|
|
||||||
if (Object.prototype.toString.call(sql) == '[object Array]') {
|
|
||||||
preSql = sql[0];
|
|
||||||
sufSql = sql[1];
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
preSql = sql;
|
|
||||||
}
|
|
||||||
return [preSql, sufSql];
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* 校验语法
|
|
||||||
*/
|
|
||||||
function parseSyntax(sql, type = sqlType.Hive) {
|
|
||||||
const parserArgs = sqlToParserArgs(sql);
|
|
||||||
console.log(getSyntaxParser(type));
|
|
||||||
return getSyntaxParser(type).parseSyntax(parserArgs[0], parserArgs[1], type, false);
|
|
||||||
}
|
|
||||||
exports.parseSyntax = parseSyntax;
|
|
||||||
/**
|
|
||||||
* 自动补全提示
|
|
||||||
*/
|
|
||||||
function parserSql(sql, type = sqlType.Hive) {
|
|
||||||
const parserArgs = sqlToParserArgs(sql);
|
|
||||||
return getAutoCompleteParser(type).parseSql(parserArgs[0], parserArgs[1], type, false);
|
|
||||||
}
|
|
||||||
exports.parserSql = parserSql;
|
|
@ -1,467 +0,0 @@
|
|||||||
const fs = require('fs');
|
|
||||||
const path = require('path');
|
|
||||||
const exec = require('child_process').exec;
|
|
||||||
const LICENSE = '// Licensed to Cloudera, Inc. under one\n' +
|
|
||||||
'// or more contributor license agreements. See the NOTICE file\n' +
|
|
||||||
'// distributed with this work for additional information\n' +
|
|
||||||
'// regarding copyright ownership. Cloudera, Inc. licenses this file\n' +
|
|
||||||
'// to you under the Apache License, Version 2.0 (the\n' +
|
|
||||||
'// "License"); you may not use this file except in compliance\n' +
|
|
||||||
'// with the License. You may obtain a copy of the License at\n' +
|
|
||||||
'//\n' +
|
|
||||||
'// http://www.apache.org/licenses/LICENSE-2.0\n' +
|
|
||||||
'//\n' +
|
|
||||||
'// Unless required by applicable law or agreed to in writing, software\n' +
|
|
||||||
'// distributed under the License is distributed on an "AS IS" BASIS,\n' +
|
|
||||||
'// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n' +
|
|
||||||
'// See the License for the specific language governing permissions and\n' +
|
|
||||||
'// limitations under the License.\n';
|
|
||||||
const SQL_STATEMENTS_PARSER_JSDOC = '/**\n' +
|
|
||||||
' * @param {string} input\n' +
|
|
||||||
' *\n' +
|
|
||||||
' * @return {SqlStatementsParserResult}\n' +
|
|
||||||
' */\n';
|
|
||||||
const PARSER_FOLDER = path.join(process.cwd(), 'src/core/parse/');
|
|
||||||
const JISON_FOLDER = path.join(process.cwd(), 'src/jison/');
|
|
||||||
const SQL_PARSER_REPOSITORY_PATH = path.join(PARSER_FOLDER, 'sqlParserRepository.js');
|
|
||||||
const SYNTAX_PARSER_IMPORT_TEMPLATE = ' KEY: require("KEY/KEYSyntaxParser")';
|
|
||||||
const AUTOCOMPLETE_PARSER_IMPORT_TEMPLATE = ' KEY: require("KEY/KEYAutocompleteParser")';
|
|
||||||
const parserDefinitions = {
|
|
||||||
globalSearchParser: {
|
|
||||||
sources: [path.join(JISON_FOLDER, 'globalSearchParser.jison')],
|
|
||||||
target: path.join(JISON_FOLDER, 'globalSearchParser.jison'),
|
|
||||||
outputFolder: PARSER_FOLDER,
|
|
||||||
afterParse: contents => new Promise(resolve => {
|
|
||||||
resolve(LICENSE +
|
|
||||||
contents.replace('var globalSearchParser = ', "import SqlParseSupport from './sqlParseSupport';\n\nvar globalSearchParser = ") +
|
|
||||||
'\nexport default globalSearchParser;\n');
|
|
||||||
})
|
|
||||||
},
|
|
||||||
solrFormulaParser: {
|
|
||||||
sources: [path.join(JISON_FOLDER, 'solrFormulaParser.jison')],
|
|
||||||
target: path.join(JISON_FOLDER, 'solrFormulaParser.jison'),
|
|
||||||
outputFolder: PARSER_FOLDER,
|
|
||||||
afterParse: contents => new Promise(resolve => {
|
|
||||||
resolve(LICENSE + contents + 'export default solrFormulaParser;\n');
|
|
||||||
})
|
|
||||||
},
|
|
||||||
solrQueryParser: {
|
|
||||||
sources: [path.join(JISON_FOLDER, 'solrQueryParser.jison')],
|
|
||||||
target: path.join(JISON_FOLDER, 'solrQueryParser.jison'),
|
|
||||||
outputFolder: PARSER_FOLDER,
|
|
||||||
afterParse: contents => new Promise(resolve => {
|
|
||||||
resolve(LICENSE + contents + 'export default solrQueryParser;\n');
|
|
||||||
})
|
|
||||||
},
|
|
||||||
sqlStatementsParser: {
|
|
||||||
sources: [path.join(JISON_FOLDER, 'sqlStatementsParser.jison')],
|
|
||||||
target: path.join(JISON_FOLDER, 'sqlStatementsParser.jison'),
|
|
||||||
outputFolder: PARSER_FOLDER,
|
|
||||||
afterParse: contents => new Promise(resolve => {
|
|
||||||
resolve(LICENSE +
|
|
||||||
contents.replace('parse: function parse', SQL_STATEMENTS_PARSER_JSDOC + 'parse: function parse') +
|
|
||||||
'export default sqlStatementsParser;\n');
|
|
||||||
})
|
|
||||||
}
|
|
||||||
};
|
|
||||||
const mkdir = path => new Promise((resolve, reject) => {
|
|
||||||
if (fs.existsSync(path)) {
|
|
||||||
resolve();
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
fs.mkdir(path, err => {
|
|
||||||
if (err) {
|
|
||||||
reject(err);
|
|
||||||
}
|
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
const readFile = path => new Promise((resolve, reject) => {
|
|
||||||
fs.readFile(path, (err, buf) => {
|
|
||||||
if (err) {
|
|
||||||
reject(err);
|
|
||||||
}
|
|
||||||
resolve(buf ? buf.toString() : '');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
const writeFile = (path, contents) => new Promise((resolve, reject) => {
|
|
||||||
fs.writeFile(path, contents, err => {
|
|
||||||
if (err) {
|
|
||||||
reject();
|
|
||||||
}
|
|
||||||
resolve();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
const copyFile = (source, destination, contentsCallback) => new Promise((resolve, reject) => {
|
|
||||||
readFile(source)
|
|
||||||
.then(contents => {
|
|
||||||
writeFile(destination, contentsCallback ? contentsCallback(contents) : contents)
|
|
||||||
.then(resolve)
|
|
||||||
.catch(reject);
|
|
||||||
})
|
|
||||||
.catch(reject);
|
|
||||||
});
|
|
||||||
const deleteFile = path => {
|
|
||||||
fs.unlinkSync(path);
|
|
||||||
};
|
|
||||||
const execCmd = cmd => new Promise((resolve, reject) => {
|
|
||||||
exec(cmd, (err, stdout, stderr) => {
|
|
||||||
if (err) {
|
|
||||||
reject('stderr:\n' + stderr + '\n\nstdout:\n' + stdout);
|
|
||||||
}
|
|
||||||
resolve(stdout);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
const generateParser = parserName => new Promise((resolve, reject) => {
|
|
||||||
const parserConfig = parserDefinitions[parserName];
|
|
||||||
/**
|
|
||||||
* 合并jison文件,生成待编译文件
|
|
||||||
*/
|
|
||||||
const concatPromise = new Promise((resolve, reject) => {
|
|
||||||
if (parserConfig.sources.length > 1 && parserConfig.target) {
|
|
||||||
console.log('Concatenating files...');
|
|
||||||
const promises = parserConfig.sources.map(fileName => readFile(fileName));
|
|
||||||
Promise.all(promises)
|
|
||||||
.then(contents => {
|
|
||||||
writeFile(parserConfig.target, contents.join('')).then(() => {
|
|
||||||
resolve(parserConfig.target);
|
|
||||||
});
|
|
||||||
})
|
|
||||||
.catch(reject);
|
|
||||||
}
|
|
||||||
else if (parserConfig.sources.length === 1) {
|
|
||||||
resolve(parserConfig.sources[0]);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
reject('No jison source specified');
|
|
||||||
}
|
|
||||||
});
|
|
||||||
concatPromise
|
|
||||||
.then(targetPath => {
|
|
||||||
console.log(`Generate precomplier jison success(${targetPath})...`);
|
|
||||||
let jisonCommand = 'jison ' + targetPath;
|
|
||||||
if (parserConfig.lexer) {
|
|
||||||
jisonCommand += ' ' + parserConfig.lexer;
|
|
||||||
}
|
|
||||||
jisonCommand += ' -m js';
|
|
||||||
console.log('Generating parser...');
|
|
||||||
execCmd(jisonCommand)
|
|
||||||
.then(stdout => {
|
|
||||||
if (/\S/.test(stdout)) {
|
|
||||||
console.log('got output for: ' + jisonCommand);
|
|
||||||
console.log(stdout);
|
|
||||||
}
|
|
||||||
if (parserConfig.sources.length > 1) {
|
|
||||||
deleteFile(targetPath); // Remove concatenated file
|
|
||||||
}
|
|
||||||
console.log('Adjusting JS...');
|
|
||||||
/**
|
|
||||||
* 删除生成文件,复制到配置的文件夹中
|
|
||||||
*/
|
|
||||||
const generatedJsFileName = parserConfig.target
|
|
||||||
.replace('.jison', '.js')
|
|
||||||
.replace(/^.*\/([^/]+)$/, '$1');
|
|
||||||
readFile(generatedJsFileName)
|
|
||||||
.then(contents => {
|
|
||||||
parserConfig
|
|
||||||
.afterParse(contents)
|
|
||||||
.then(finalContents => {
|
|
||||||
writeFile(path.join(parserConfig.outputFolder, generatedJsFileName), finalContents)
|
|
||||||
.then(() => {
|
|
||||||
deleteFile(generatedJsFileName);
|
|
||||||
resolve();
|
|
||||||
})
|
|
||||||
.catch(reject);
|
|
||||||
})
|
|
||||||
.catch(reject);
|
|
||||||
})
|
|
||||||
.catch(reject);
|
|
||||||
})
|
|
||||||
.catch(reject);
|
|
||||||
})
|
|
||||||
.catch(reject);
|
|
||||||
});
|
|
||||||
let parsersToGenerate = [];
|
|
||||||
const invalid = [];
|
|
||||||
let all = false;
|
|
||||||
const listDir = folder => new Promise(resolve => {
|
|
||||||
fs.readdir(folder, (err, files) => {
|
|
||||||
resolve(files);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
/**
|
|
||||||
* 构造,添加子语言模块编译配置
|
|
||||||
* @param {*} fileIndex 文件的存在表
|
|
||||||
* @param {*} folder 对应的子语言文件夹
|
|
||||||
* @param {*} sharedFiles 子语言核心jison文件
|
|
||||||
* @param {*} autocomplete 是否为补全文件
|
|
||||||
*/
|
|
||||||
const findParser = (fileIndex, folder, sharedFiles, autocomplete) => {
|
|
||||||
const prefix = autocomplete ? 'autocomplete' : 'syntax';
|
|
||||||
if (fileIndex[prefix + '_header.jison'] && fileIndex[prefix + '_footer.jison']) {
|
|
||||||
const parserName = folder + (autocomplete ? 'AutocompleteParser' : 'SyntaxParser');
|
|
||||||
const parserDefinition = {
|
|
||||||
sources: [path.join(JISON_FOLDER, 'sql', folder, prefix + '_header.jison')].concat(sharedFiles),
|
|
||||||
lexer: path.join(JISON_FOLDER, 'sql', folder, '/sql.jisonlex'),
|
|
||||||
target: path.join(JISON_FOLDER, 'sql', folder, parserName + '.jison'),
|
|
||||||
sqlParser: autocomplete ? 'AUTOCOMPLETE' : 'SYNTAX',
|
|
||||||
outputFolder: path.join(PARSER_FOLDER, folder),
|
|
||||||
afterParse: contents => new Promise(resolve => {
|
|
||||||
resolve(LICENSE +
|
|
||||||
contents
|
|
||||||
.replace('var ' + parserName + ' = ', "import SqlParseSupport from " +
|
|
||||||
"'./sqlParseSupport';\n\nvar " +
|
|
||||||
parserName +
|
|
||||||
' = ')
|
|
||||||
.replace('loc: yyloc,', "loc: lexer.yylloc, ruleId: stack.slice(stack.length - 2, stack.length).join(''),") +
|
|
||||||
'\nexport default ' +
|
|
||||||
parserName +
|
|
||||||
';\n');
|
|
||||||
})
|
|
||||||
};
|
|
||||||
parserDefinition.sources.push(path.join(JISON_FOLDER, 'sql', folder, prefix + '_footer.jison'));
|
|
||||||
parserDefinitions[parserName] = parserDefinition;
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
console.log("Warn: Could not find '" +
|
|
||||||
prefix +
|
|
||||||
"_header.jison' or '" +
|
|
||||||
prefix +
|
|
||||||
"_footer.jison' in " +
|
|
||||||
JISON_FOLDER +
|
|
||||||
'sql/' +
|
|
||||||
folder +
|
|
||||||
'/');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
/**
|
|
||||||
* 添加所有子语言编译配置
|
|
||||||
*/
|
|
||||||
const identifySqlParsers = () => new Promise(resolve => {
|
|
||||||
listDir(JISON_FOLDER + 'sql').then(files => {
|
|
||||||
const promises = [];
|
|
||||||
files.forEach(folder => {
|
|
||||||
const subLanguageJisonFolder = path.join(JISON_FOLDER, 'sql', folder);
|
|
||||||
promises.push(
|
|
||||||
/**
|
|
||||||
* 遍历具体的语言目录
|
|
||||||
*/
|
|
||||||
listDir(subLanguageJisonFolder).then(jisonFiles => {
|
|
||||||
/**
|
|
||||||
* 文件目录记录表
|
|
||||||
*/
|
|
||||||
const fileIndex = {};
|
|
||||||
jisonFiles.forEach(jisonFile => {
|
|
||||||
fileIndex[jisonFile] = true;
|
|
||||||
});
|
|
||||||
/**
|
|
||||||
* 挑选核心的jison文件(剥除autocomplate,syntax的功能文件)
|
|
||||||
*/
|
|
||||||
const sharedFiles = jisonFiles
|
|
||||||
.filter(jisonFile => jisonFile.indexOf('sql_') !== -1)
|
|
||||||
.map(jisonFile => path.join(subLanguageJisonFolder, jisonFile));
|
|
||||||
if (fileIndex['sql.jisonlex']) {
|
|
||||||
/**
|
|
||||||
* 添加子语言自动补全编译配置
|
|
||||||
* 加入了error.jison,为了在校验失败的情况下也能够提示?
|
|
||||||
*/
|
|
||||||
findParser(fileIndex, folder, sharedFiles, true);
|
|
||||||
/**
|
|
||||||
* 添加子语言语法检查配置
|
|
||||||
*/
|
|
||||||
findParser(fileIndex, folder, sharedFiles.filter(path => path.indexOf('_error.jison') === -1), false);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
console.log("Warn: Could not find 'sql.jisonlex' in " + JISON_FOLDER + 'sql/' + folder + '/');
|
|
||||||
}
|
|
||||||
}));
|
|
||||||
});
|
|
||||||
Promise.all(promises).then(resolve);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
const copyTests = (source, target) => new Promise((resolve, reject) => {
|
|
||||||
const replaceRegexp = new RegExp(source + '(Autocomplete|Syntax)Parser', 'g');
|
|
||||||
mkdir(PARSER_FOLDER + target)
|
|
||||||
.then(() => {
|
|
||||||
mkdir(PARSER_FOLDER + target + '/test')
|
|
||||||
.then(() => {
|
|
||||||
listDir(PARSER_FOLDER + source + '/test')
|
|
||||||
.then(testFiles => {
|
|
||||||
const copyPromises = [];
|
|
||||||
testFiles.forEach(testFile => {
|
|
||||||
copyPromises.push(copyFile(PARSER_FOLDER + source + '/test/' + testFile, PARSER_FOLDER + target + '/test/' + testFile.replace(source, target), contents => contents.replace(replaceRegexp, target + '$1Parser')));
|
|
||||||
});
|
|
||||||
Promise.all(copyPromises)
|
|
||||||
.then(resolve)
|
|
||||||
.catch(reject);
|
|
||||||
})
|
|
||||||
.catch(reject);
|
|
||||||
})
|
|
||||||
.catch(reject);
|
|
||||||
})
|
|
||||||
.catch(reject);
|
|
||||||
});
|
|
||||||
/**
|
|
||||||
* 校验,配置自定义语言
|
|
||||||
*/
|
|
||||||
const prepareForNewParser = () => new Promise((resolve, reject) => {
|
|
||||||
/**
|
|
||||||
* 根据一个子语言文件夹来生成一个特殊sql名字的语法文件
|
|
||||||
* -new generic postgresql
|
|
||||||
* 根据generic文件夹生成postgresql语法文件
|
|
||||||
*/
|
|
||||||
if (process.argv.length === 3 && process.argv[0] === '-new') {
|
|
||||||
process.argv.shift();
|
|
||||||
const source = process.argv.shift();
|
|
||||||
const target = process.argv.shift();
|
|
||||||
console.log("Generating new parser '" + target + "' based on '" + source + "'...");
|
|
||||||
process.argv.push(target);
|
|
||||||
if (!Object.keys(parserDefinitions).some(key => {
|
|
||||||
if (key.indexOf(source) === 0) {
|
|
||||||
copyTests(source, target)
|
|
||||||
.then(() => {
|
|
||||||
mkdir(JISON_FOLDER + 'sql/' + target)
|
|
||||||
.then(() => {
|
|
||||||
listDir(JISON_FOLDER + 'sql/' + source).then(files => {
|
|
||||||
const copyPromises = [];
|
|
||||||
files.forEach(file => {
|
|
||||||
copyPromises.push(copyFile(JISON_FOLDER + 'sql/' + source + '/' + file, JISON_FOLDER + 'sql/' + target + '/' + file));
|
|
||||||
});
|
|
||||||
Promise.all(copyPromises).then(() => {
|
|
||||||
const autocompleteSources = [
|
|
||||||
'sql/' + target + '/autocomplete_header.jison'
|
|
||||||
];
|
|
||||||
const syntaxSources = ['sql/' + target + '/syntax_header.jison'];
|
|
||||||
files.forEach(file => {
|
|
||||||
if (file.indexOf('sql_') === 0) {
|
|
||||||
autocompleteSources.push('sql/' + target + '/' + file);
|
|
||||||
syntaxSources.push('sql/' + target + '/' + file);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
autocompleteSources.push('sql/' + target + '/autocomplete_footer.jison');
|
|
||||||
syntaxSources.push('sql/' + target + '/syntax_footer.jison');
|
|
||||||
mkdir('desktop/core/src/desktop/js/parse/sql/' + target).then(() => {
|
|
||||||
copyFile('desktop/core/src/desktop/js/parse/sql/' +
|
|
||||||
source +
|
|
||||||
'/sqlParseSupport.js', 'desktop/core/src/desktop/js/parse/sql/' +
|
|
||||||
target +
|
|
||||||
'/sqlParseSupport.js', contents => contents.replace(/parser\.yy\.activeDialect = '[^']+';'/g, "parser.yy.activeDialect = '" + target + "';")).then(() => {
|
|
||||||
identifySqlParsers()
|
|
||||||
.then(resolve)
|
|
||||||
.catch(reject);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
})
|
|
||||||
.catch(err => {
|
|
||||||
console.log(err);
|
|
||||||
});
|
|
||||||
})
|
|
||||||
.catch(reject);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
})) {
|
|
||||||
reject("No existing parser found for '" + source + "'");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
resolve();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
identifySqlParsers().then(() => {
|
|
||||||
process.argv.shift();
|
|
||||||
process.argv.shift();
|
|
||||||
console.log('Generate sub language success...');
|
|
||||||
prepareForNewParser().then(() => {
|
|
||||||
console.log('Generate custom language success...');
|
|
||||||
process.argv.forEach(arg => {
|
|
||||||
if (arg === 'all') {
|
|
||||||
/**
|
|
||||||
* 编译全部
|
|
||||||
*/
|
|
||||||
all = true;
|
|
||||||
}
|
|
||||||
else if (parserDefinitions[arg]) {
|
|
||||||
/**
|
|
||||||
* 特点编译目标
|
|
||||||
*/
|
|
||||||
parsersToGenerate.push(arg);
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
/**
|
|
||||||
* 根据关键字匹配编译目标
|
|
||||||
*/
|
|
||||||
let prefixFound = false;
|
|
||||||
Object.keys(parserDefinitions).forEach(key => {
|
|
||||||
if (key.indexOf(arg) === 0) {
|
|
||||||
prefixFound = true;
|
|
||||||
parsersToGenerate.push(key);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if (!prefixFound) {
|
|
||||||
invalid.push(arg);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
if (all) {
|
|
||||||
parsersToGenerate = Object.keys(parserDefinitions);
|
|
||||||
}
|
|
||||||
if (invalid.length) {
|
|
||||||
console.log("No parser config found for: '" + invalid.join("', '") + "'");
|
|
||||||
console.log('\nPossible options are:\n ' +
|
|
||||||
['all'].concat(Object.keys(parserDefinitions)).join('\n ') +
|
|
||||||
'\n');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const parserCount = parsersToGenerate.length;
|
|
||||||
let idx = 0;
|
|
||||||
/**
|
|
||||||
* 执行编译
|
|
||||||
*/
|
|
||||||
const generateRecursive = () => {
|
|
||||||
idx++;
|
|
||||||
if (parsersToGenerate.length) {
|
|
||||||
const parserName = parsersToGenerate.pop();
|
|
||||||
if (parserCount > 1) {
|
|
||||||
console.log("Generating '" + parserName + "' (" + idx + '/' + parserCount + ')...');
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
console.log("Generating '" + parserName + "'...");
|
|
||||||
}
|
|
||||||
generateParser(parserName)
|
|
||||||
.then(generateRecursive)
|
|
||||||
.catch(error => {
|
|
||||||
console.log(error);
|
|
||||||
console.log('FAIL!');
|
|
||||||
});
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
const autocompParsers = [];
|
|
||||||
const syntaxParsers = [];
|
|
||||||
console.log('Updating sqlParserRepository.js...');
|
|
||||||
Object.keys(parserDefinitions).forEach(key => {
|
|
||||||
if (parserDefinitions[key].sqlParser === 'AUTOCOMPLETE') {
|
|
||||||
autocompParsers.push(AUTOCOMPLETE_PARSER_IMPORT_TEMPLATE.replace(/KEY/g, key.replace('AutocompleteParser', '')));
|
|
||||||
}
|
|
||||||
else if (parserDefinitions[key].sqlParser === 'SYNTAX') {
|
|
||||||
syntaxParsers.push(SYNTAX_PARSER_IMPORT_TEMPLATE.replace(/KEY/g, key.replace('SyntaxParser', '')));
|
|
||||||
}
|
|
||||||
});
|
|
||||||
readFile(SQL_PARSER_REPOSITORY_PATH).then(contents => {
|
|
||||||
contents = contents.replace(/const SYNTAX_MODULES = [^}]+}/, 'const SYNTAX_MODULES = {\n' + syntaxParsers.sort().join(',\n') + '\n}');
|
|
||||||
contents = contents.replace(/const AUTOCOMPLETE_MODULES = [^}]+}/, 'const AUTOCOMPLETE_MODULES = {\n' + autocompParsers.sort().join(',\n') + '\n}');
|
|
||||||
writeFile(SQL_PARSER_REPOSITORY_PATH, contents).then(() => {
|
|
||||||
console.log('Done!\n');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
/**
|
|
||||||
* 集中精力办大事
|
|
||||||
*/
|
|
||||||
generateRecursive();
|
|
||||||
});
|
|
||||||
});
|
|
||||||
/* eslint-enable no-restricted-syntax */
|
|
@ -1,15 +1,15 @@
|
|||||||
"use strict";
|
"use strict";
|
||||||
Object.defineProperty(exports, "__esModule", { value: true });
|
Object.defineProperty(exports, "__esModule", { value: true });
|
||||||
|
exports.splitSql = exports.replaceStrFormIndexArr = void 0;
|
||||||
function replaceStrFormIndexArr(str, replaceStr, indexArr) {
|
function replaceStrFormIndexArr(str, replaceStr, indexArr) {
|
||||||
let arr = [];
|
let result = '';
|
||||||
let result = "";
|
|
||||||
let index = 0;
|
let index = 0;
|
||||||
if (!indexArr || indexArr.length < 1) {
|
if (!indexArr || indexArr.length < 1) {
|
||||||
return str;
|
return str;
|
||||||
}
|
}
|
||||||
for (let i = 0; i < indexArr.length; i++) {
|
for (let i = 0; i < indexArr.length; i++) {
|
||||||
let indexItem = indexArr[i];
|
const indexItem = indexArr[i];
|
||||||
let begin = indexItem.begin;
|
const begin = indexItem.begin;
|
||||||
result = result + str.substring(index, begin) + replaceStr;
|
result = result + str.substring(index, begin) + replaceStr;
|
||||||
index = indexItem.end + 1;
|
index = indexItem.end + 1;
|
||||||
if (i == indexArr.length - 1) {
|
if (i == indexArr.length - 1) {
|
||||||
@ -37,10 +37,10 @@ function splitSql(sql) {
|
|||||||
}
|
}
|
||||||
// 处理引号
|
// 处理引号
|
||||||
function quoteToken(parser, sql) {
|
function quoteToken(parser, sql) {
|
||||||
let queue = parser.queue;
|
const queue = parser.queue;
|
||||||
let endsWith = queue[queue.length - 1];
|
const endsWith = queue[queue.length - 1];
|
||||||
if (endsWith == '\'' || endsWith == '"') {
|
if (endsWith == '\'' || endsWith == '"') {
|
||||||
let nextToken = sql.indexOf(endsWith, parser.index + 1);
|
const nextToken = sql.indexOf(endsWith, parser.index + 1);
|
||||||
if (nextToken != -1) {
|
if (nextToken != -1) {
|
||||||
parser.index = nextToken;
|
parser.index = nextToken;
|
||||||
parser.queue = '';
|
parser.queue = '';
|
||||||
@ -57,7 +57,7 @@ function splitSql(sql) {
|
|||||||
function singleLineCommentToken(parser, sql) {
|
function singleLineCommentToken(parser, sql) {
|
||||||
let queue = parser.queue;
|
let queue = parser.queue;
|
||||||
if (queue.endsWith('--')) {
|
if (queue.endsWith('--')) {
|
||||||
let nextToken = sql.indexOf('\n', parser.index + 1);
|
const nextToken = sql.indexOf('\n', parser.index + 1);
|
||||||
if (nextToken != -1) {
|
if (nextToken != -1) {
|
||||||
parser.index = nextToken;
|
parser.index = nextToken;
|
||||||
queue = '';
|
queue = '';
|
||||||
@ -72,9 +72,9 @@ function splitSql(sql) {
|
|||||||
}
|
}
|
||||||
// 处理多行注释
|
// 处理多行注释
|
||||||
function multipleLineCommentToken(parser, sql) {
|
function multipleLineCommentToken(parser, sql) {
|
||||||
let queue = parser.queue;
|
const queue = parser.queue;
|
||||||
if (queue.endsWith('/*')) {
|
if (queue.endsWith('/*')) {
|
||||||
let nextToken = sql.indexOf('*/', parser.index + 1);
|
const nextToken = sql.indexOf('*/', parser.index + 1);
|
||||||
if (nextToken != -1) {
|
if (nextToken != -1) {
|
||||||
parser.index = nextToken + 1;
|
parser.index = nextToken + 1;
|
||||||
parser.queue = '';
|
parser.queue = '';
|
||||||
@ -89,7 +89,7 @@ function splitSql(sql) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
function splitToken(parser, sql) {
|
function splitToken(parser, sql) {
|
||||||
let queue = parser.queue;
|
const queue = parser.queue;
|
||||||
if (queue.endsWith(';')) {
|
if (queue.endsWith(';')) {
|
||||||
pushSql(parser, sql);
|
pushSql(parser, sql);
|
||||||
}
|
}
|
||||||
@ -97,15 +97,20 @@ function splitSql(sql) {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let parser = {
|
const parser = {
|
||||||
index: 0,
|
index: 0,
|
||||||
queue: '',
|
queue: '',
|
||||||
sqls: []
|
sqls: [],
|
||||||
};
|
};
|
||||||
for (parser.index = 0; parser.index < sql.length; parser.index++) {
|
for (parser.index = 0; parser.index < sql.length; parser.index++) {
|
||||||
let char = sql[parser.index];
|
const char = sql[parser.index];
|
||||||
parser.queue += char;
|
parser.queue += char;
|
||||||
let tokenFuncs = [quoteToken, singleLineCommentToken, multipleLineCommentToken, splitToken];
|
const tokenFuncs = [
|
||||||
|
quoteToken,
|
||||||
|
singleLineCommentToken,
|
||||||
|
multipleLineCommentToken,
|
||||||
|
splitToken,
|
||||||
|
];
|
||||||
for (let i = 0; i < tokenFuncs.length; i++) {
|
for (let i = 0; i < tokenFuncs.length; i++) {
|
||||||
tokenFuncs[i](parser, sql);
|
tokenFuncs[i](parser, sql);
|
||||||
}
|
}
|
||||||
|
29
package.json
29
package.json
@ -1,33 +1,40 @@
|
|||||||
{
|
{
|
||||||
"name": "dt-sql-parser",
|
"name": "dt-sql-parser",
|
||||||
"version": "3.0.4",
|
"version": "3.0.4",
|
||||||
"description": "sql,hive,parser ",
|
"description": "There are some sql parsers in javascript and generated by antlr4",
|
||||||
"keywords": [
|
"keywords": [
|
||||||
"hive",
|
|
||||||
"hql",
|
|
||||||
"sql",
|
"sql",
|
||||||
"parser"
|
"parser",
|
||||||
|
"monaco-editor",
|
||||||
|
"hive",
|
||||||
|
"spark",
|
||||||
|
"flink",
|
||||||
|
"impala",
|
||||||
|
"bigdata"
|
||||||
],
|
],
|
||||||
"main": "lib/index.js",
|
"main": "lib/index.js",
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"build:parse": "pegjs -o core/astParser.js peg/nquery.pegjs ",
|
"build:parser": "node build/antlr4.js",
|
||||||
"build:filter": "pegjs -o core/comment.js peg/comment.pegjs ",
|
|
||||||
"build:syntax": "node ./src/scripts/generateParsers.js impala",
|
|
||||||
"build": "npm test && rm -rf lib && tsc",
|
"build": "npm test && rm -rf lib && tsc",
|
||||||
|
"eslint": "eslint ./src/**/*.ts",
|
||||||
|
"check-types": "tsc --skipLibCheck",
|
||||||
"test": "jest"
|
"test": "jest"
|
||||||
},
|
},
|
||||||
"author": "xiaokang",
|
"author": "dt-insight-front",
|
||||||
"license": "ISC",
|
"license": "ISC",
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/jest": "^24.0.13",
|
"@types/jest": "^24.0.13",
|
||||||
|
"@typescript-eslint/eslint-plugin": "^3.10.1",
|
||||||
|
"@typescript-eslint/parser": "^3.10.1",
|
||||||
|
"eslint": "^7.7.0",
|
||||||
|
"eslint-config-google": "^0.14.0",
|
||||||
"jest": "^24.8.0",
|
"jest": "^24.8.0",
|
||||||
"pegjs": "^0.10.0",
|
|
||||||
"ts-jest": "^24.1.0",
|
"ts-jest": "^24.1.0",
|
||||||
"typescript": "^3.6.3"
|
"typescript": "^3.6.3"
|
||||||
},
|
},
|
||||||
"git repository": "https://github.com/HSunboy/dt-sql-parser",
|
"git repository": "https://github.com/DTStack/dt-sql-parser",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@types/antlr4": "^4.7.0",
|
"@types/antlr4": "^4.7.2",
|
||||||
"antlr4": "^4.7.2"
|
"antlr4": "^4.7.2"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,223 +0,0 @@
|
|||||||
import * as dtSqlParser from '../src';
|
|
||||||
import { SyntaxResult } from '../src/core/sqlSyntaxParser';
|
|
||||||
const parser = dtSqlParser.parser;
|
|
||||||
const filter = dtSqlParser.filter;
|
|
||||||
const flinksqlParser = dtSqlParser.flinksqlParser;
|
|
||||||
|
|
||||||
|
|
||||||
describe('complete test', () => {
|
|
||||||
describe('hive', () => {
|
|
||||||
test('complete result', () => {
|
|
||||||
const sql = 'select id,name from `user` ;';
|
|
||||||
const result = parser.parserSql([sql, ''], dtSqlParser.parser.sqlType.Hive);
|
|
||||||
expect(result.locations).toBeInstanceOf(Array);
|
|
||||||
expect(result.suggestKeywords).toBeInstanceOf(Array);
|
|
||||||
});
|
|
||||||
test('empty result', () => {
|
|
||||||
const sql = 'i';
|
|
||||||
const result = parser.parserSql([sql, ''], dtSqlParser.parser.sqlType.Hive);
|
|
||||||
expect(result.locations).toBeInstanceOf(Array);
|
|
||||||
expect(result.locations).toHaveLength(0);
|
|
||||||
expect(result.suggestKeywords).toBeInstanceOf(Array);
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
||||||
|
|
||||||
describe('syntax test', () => {
|
|
||||||
describe('impala', () => {
|
|
||||||
test('no error', () => {
|
|
||||||
const sql = 'select id,name from user1 ';
|
|
||||||
const result = parser.parseSyntax([sql, ''], dtSqlParser.parser.sqlType.Impala);
|
|
||||||
expect(result).toBe(false);
|
|
||||||
});
|
|
||||||
test('insert', () => {
|
|
||||||
const sql = `insert into user1 (id, name) values (1 ,'a')`;
|
|
||||||
const result = parser.parseSyntax([sql, ''], dtSqlParser.parser.sqlType.Impala);
|
|
||||||
expect(result).toBe(false);
|
|
||||||
});
|
|
||||||
test('WITH SERDEPROPERTIES', () => {
|
|
||||||
const sql =`CREATE TABLE ih.h_b_py_detail (
|
|
||||||
contract_no STRING,
|
|
||||||
region_code STRING,
|
|
||||||
credit_code STRING
|
|
||||||
)
|
|
||||||
PARTITIONED BY (
|
|
||||||
cdate STRING
|
|
||||||
)
|
|
||||||
ROW FORMAT DELIMITED FIELDS TERMINATED BY ',' LINES TERMINATED BY '\n'
|
|
||||||
WITH SERDEPROPERTIES ('field.delim'=',', 'line.delim'='\n', 'serialization.format'=',')
|
|
||||||
STORED AS TEXTFILE
|
|
||||||
LOCATION 'hdfs://kudu1'
|
|
||||||
TBLPROPERTIES ('last_modified_by'='anonymous', 'last_modified_time'='1577082098', 'skip.header.line.count'='1')
|
|
||||||
lifecycle 888`;
|
|
||||||
const result = parser.parseSyntax([sql, ''], dtSqlParser.parser.sqlType.Impala);
|
|
||||||
expect(result).toBe(false);
|
|
||||||
})
|
|
||||||
test('left function', () => {
|
|
||||||
const sql = `select left(a) from sa;`
|
|
||||||
const result = parser.parseSyntax([sql, ''], dtSqlParser.parser.sqlType.Impala);
|
|
||||||
expect(result).toBe(false);
|
|
||||||
})
|
|
||||||
test('create as select', () => {
|
|
||||||
const sql = `create table partitions_yes partitioned by (year, month)
|
|
||||||
as select s, year, month from partitions_no;`
|
|
||||||
const result = parser.parseSyntax([sql, ''], dtSqlParser.parser.sqlType.Impala);
|
|
||||||
expect(result).toBe(false);
|
|
||||||
})
|
|
||||||
test('show grant ', () => {
|
|
||||||
const sql = `show grant role 18_112_a;`
|
|
||||||
const result = parser.parseSyntax([sql, ''], dtSqlParser.parser.sqlType.Impala);
|
|
||||||
expect(result).toBe(false);
|
|
||||||
})
|
|
||||||
});
|
|
||||||
describe('hive', () => {
|
|
||||||
test('no error', () => {
|
|
||||||
const sql = 'select id,name from user1 ';
|
|
||||||
const result = parser.parseSyntax([sql, ''], dtSqlParser.parser.sqlType.Hive);
|
|
||||||
expect(result).toBe(false);
|
|
||||||
});
|
|
||||||
test('insert', () => {
|
|
||||||
const sql = `insert into table user1 values (1, 'a'), (2, 'b'), (3, 'b')`;
|
|
||||||
const result = parser.parseSyntax([sql, ''], dtSqlParser.parser.sqlType.Hive);
|
|
||||||
expect(result).toBe(false);
|
|
||||||
});
|
|
||||||
test('select table should not be null', () => {
|
|
||||||
const sql = 'select id,name from ';
|
|
||||||
const result = parser.parseSyntax([sql, ''], dtSqlParser.parser.sqlType.Hive) as SyntaxResult;
|
|
||||||
expect(result.loc).toEqual({
|
|
||||||
first_line: 1,
|
|
||||||
last_line: 1,
|
|
||||||
first_column: 20,
|
|
||||||
last_column: 20
|
|
||||||
})
|
|
||||||
});
|
|
||||||
test('sql contains the wrong keyword', () => {
|
|
||||||
const sql = `create table if not 1exists ods_order_header (
|
|
||||||
order_header_id string comment 'order id'
|
|
||||||
,order_date bigint comment 'order date'
|
|
||||||
)comment 'order table'
|
|
||||||
PARTITIONED BY (ds string);`;
|
|
||||||
const result = parser.parseSyntax([sql, ''], dtSqlParser.parser.sqlType.Hive) as SyntaxResult;
|
|
||||||
expect(result.text).toBe('1exists');
|
|
||||||
expect(result.loc).toEqual({
|
|
||||||
first_line: 1,
|
|
||||||
last_line: 1,
|
|
||||||
first_column: 20,
|
|
||||||
last_column: 27
|
|
||||||
})
|
|
||||||
});
|
|
||||||
})
|
|
||||||
describe('flinksql', () => {
|
|
||||||
test('no error', () => {
|
|
||||||
const sql = `select id from use1r.id;`;
|
|
||||||
const result = flinksqlParser(sql);
|
|
||||||
expect(result).toBeNull();
|
|
||||||
});
|
|
||||||
test('empty sql', () => {
|
|
||||||
const sql = ``;
|
|
||||||
const result = flinksqlParser(sql);
|
|
||||||
expect(result).toBeNull();
|
|
||||||
});
|
|
||||||
test('sql comment', () => {
|
|
||||||
const sql = `-- name asf
|
|
||||||
-- type FlinkSQL
|
|
||||||
-- author admin@dtstack.com
|
|
||||||
-- create time 2019-06-12 18:11:05
|
|
||||||
-- desc `;
|
|
||||||
const result = flinksqlParser(sql);
|
|
||||||
expect(result).toBeNull();
|
|
||||||
});
|
|
||||||
test('position', () => {
|
|
||||||
const sql = `selec`;
|
|
||||||
const result = flinksqlParser(sql);
|
|
||||||
expect(result.token.start).toBe(0);
|
|
||||||
expect(result.token.stop).toBe(4);
|
|
||||||
});
|
|
||||||
test('test lateral table', () => {
|
|
||||||
const sql = `select
|
|
||||||
id
|
|
||||||
FROM
|
|
||||||
userTable, LATERAL table(json(mess)) as t(ord, name_id);`;
|
|
||||||
const result = flinksqlParser(sql);
|
|
||||||
expect(result).toBeNull();
|
|
||||||
});
|
|
||||||
test('syntax error', () => {
|
|
||||||
const sql = 'select id from us1er.id; \nselect id from us*er.id; \nselect id from *u1ser.id;';
|
|
||||||
const result = flinksqlParser(sql);
|
|
||||||
expect(result).toMatchObject({
|
|
||||||
line: 2,
|
|
||||||
column: 17,
|
|
||||||
});
|
|
||||||
expect(result.token.start).toBe(43);
|
|
||||||
expect(result.token.stop).toBe(43);
|
|
||||||
const sql2 = `CREATE TABLE MyTable(
|
|
||||||
message.after.id int AS id,
|
|
||||||
message.after.userid varchar AS userid,
|
|
||||||
message.after.username varchar AS username,
|
|
||||||
message.after.prodid varchar AS prodid,
|
|
||||||
message.after.price double AS price,
|
|
||||||
message.after.amount int AS amount,
|
|
||||||
message.after.discount double AS discount,
|
|
||||||
message.after.tm timestamp AS tm,
|
|
||||||
WATERMARK FOR tm AS withOffset(tm,1000)
|
|
||||||
)WITH(
|
|
||||||
'type' ='kafka11,
|
|
||||||
topic ='1'
|
|
||||||
);
|
|
||||||
|
|
||||||
CREATE TABLE MyResult(
|
|
||||||
a double,
|
|
||||||
b timestamp,
|
|
||||||
c timestamp
|
|
||||||
)WITH(
|
|
||||||
type ='mysql',
|
|
||||||
url ='jdbc:mysql://1:3306/yanxi?charset=utf8'
|
|
||||||
);
|
|
||||||
|
|
||||||
insert into MyResult
|
|
||||||
select
|
|
||||||
sum(price * amount * discount) as a,
|
|
||||||
TUMBLE_START( ROWTIME, INTERVAL '30' SECOND) as b
|
|
||||||
from MyTable
|
|
||||||
group by
|
|
||||||
TUMBLE( ROWTIME, INTERVAL '30' SECOND);`;
|
|
||||||
const result2 = flinksqlParser(sql2);
|
|
||||||
expect(result2).not.toBeNull();
|
|
||||||
});
|
|
||||||
test('MATCH_RECOGNIZE', () => {
|
|
||||||
const sql = `SELECT *
|
|
||||||
FROM Ticker
|
|
||||||
MATCH_RECOGNIZE (
|
|
||||||
PARTITION BY symbol
|
|
||||||
ORDER BY rowtime
|
|
||||||
MEASURES
|
|
||||||
START_ROW.rowtime AS start_tstamp,
|
|
||||||
LAST(PRICE_DOWN.rowtime) AS bottom_tstamp,
|
|
||||||
LAST(PRICE_UP.rowtime) AS end_tstamp
|
|
||||||
ONE ROW PER MATCH
|
|
||||||
AFTER MATCH SKIP TO LAST PRICE_UP
|
|
||||||
PATTERN (START_ROW PRICE_DOWN+ PRICE_UP)
|
|
||||||
DEFINE
|
|
||||||
PRICE_DOWN AS
|
|
||||||
(LAST(PRICE_DOWN.price, 1) IS NULL AND PRICE_DOWN.price < START_ROW.price) OR
|
|
||||||
PRICE_DOWN.price < LAST(PRICE_DOWN.price, 1),
|
|
||||||
PRICE_UP AS
|
|
||||||
PRICE_UP.price > LAST(PRICE_DOWN.price, 1)
|
|
||||||
) MR;`;
|
|
||||||
const result = flinksqlParser(sql);
|
|
||||||
expect(result).toBeNull();
|
|
||||||
})
|
|
||||||
test('test primary key', () => {
|
|
||||||
const sql = `create table aa(
|
|
||||||
name.a[1].a varchar as name ,
|
|
||||||
ts int,
|
|
||||||
primary key (id, id[2])
|
|
||||||
) with (
|
|
||||||
type = 'kafka',
|
|
||||||
aa = '12'
|
|
||||||
);`;
|
|
||||||
const result = flinksqlParser(sql);
|
|
||||||
expect(result).toBeNull();
|
|
||||||
})
|
|
||||||
})
|
|
||||||
})
|
|
16
test/parsers/mysql/lexer.test.ts
Normal file
16
test/parsers/mysql/lexer.test.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import MySQLParser from '../../../src/core/mysql';
|
||||||
|
|
||||||
|
describe('MySQL Lexer tests', () => {
|
||||||
|
const mysqlParser = new MySQLParser();
|
||||||
|
|
||||||
|
const sql = 'select id,name,sex from user1;';
|
||||||
|
const tokens = mysqlParser.getAllTokens(sql);
|
||||||
|
|
||||||
|
test('token counts', () => {
|
||||||
|
expect(tokens.length).toBe(12);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('token counts', () => {
|
||||||
|
expect(tokens.length).toBe(12);
|
||||||
|
});
|
||||||
|
});
|
24
test/parsers/mysql/parser.test.ts
Normal file
24
test/parsers/mysql/parser.test.ts
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
import MySQLParser from '../../../src/core/mysql';
|
||||||
|
|
||||||
|
describe('MySQL Parser tests', () => {
|
||||||
|
const mysql = new MySQLParser();
|
||||||
|
|
||||||
|
test('Select * FROM Statement', () => {
|
||||||
|
const sql = 'SELECT * FROM tb;';
|
||||||
|
const result = mysql.parserTreeToString(sql);
|
||||||
|
expect(result).toEqual(`
|
||||||
|
(statement (sqlStatements (
|
||||||
|
sqlStatement (
|
||||||
|
dmlStatement (
|
||||||
|
selectStatement (
|
||||||
|
querySpecification SELECT (selectElements *) (
|
||||||
|
fromClause FROM (
|
||||||
|
tableSources (tableSource (
|
||||||
|
tableSourceItem (tableName (
|
||||||
|
fullId (uid (
|
||||||
|
simpleId TB
|
||||||
|
)
|
||||||
|
))))))))))) (emptyStatement ;)) <EOF>
|
||||||
|
)`);
|
||||||
|
});
|
||||||
|
});
|
11
test/parsers/mysql/syntax.test.ts
Normal file
11
test/parsers/mysql/syntax.test.ts
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
import MySQLParser from '../../../src/core/mysql';
|
||||||
|
|
||||||
|
describe('MySQL Syntax tests', () => {
|
||||||
|
const mysql = new MySQLParser();
|
||||||
|
test('Select Statement', () => {
|
||||||
|
const sql = 'select id,name from user1;';
|
||||||
|
const result = mysql.validate(sql);
|
||||||
|
|
||||||
|
expect(result.length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
36
test/t.js
36
test/t.js
@ -1,36 +0,0 @@
|
|||||||
const dtSqlParser = require('../lib/index');
|
|
||||||
const flinkParser = require('../lib/lib/flinkParser').default;
|
|
||||||
const parser = dtSqlParser.parser;
|
|
||||||
|
|
||||||
console.log(flinkParser(`selec`))
|
|
||||||
console.time('t')
|
|
||||||
const sql = `INSERT INTO TABLE STUDENT_SCORES VALUES
|
|
||||||
('1','111','68','69','90','CLASS1','DEPARTMENT1'),
|
|
||||||
('2','112','73','80','96','CLASS1','DEPARTMENT1'),
|
|
||||||
('3','113','90','74','75','CLASS1','DEPARTMENT1'),
|
|
||||||
('4','114','89','94','93','CLASS1','DEPARTMENT2'),
|
|
||||||
('5','115','99','93','89','CLASS1','DEPARTMENT1'),
|
|
||||||
('6','121','96','74','79','CLASS2','DEPARTMENT1'),
|
|
||||||
('7','122','89','86','85','CLASS2','DEPARTMENT1'),
|
|
||||||
('8','123','70','78','61','CLASS2','DEPARTMENT1'),
|
|
||||||
('9','124','76','70','76','CLASS2','DEPARTMENT1'),
|
|
||||||
('10','211','89','93','60','CLASS1','DEPARTMENT2'),
|
|
||||||
('11','212','76','83','75','CLASS1','DEPARTMENT2'),
|
|
||||||
('12','213','71','94','90','CLASS2','DEPARTMENT2'),
|
|
||||||
('13','214','94','94','66','CLASS1','DEPARTMENT2'),
|
|
||||||
('14','215','84','82','73','CLASS1','DEPARTMENT2'),
|
|
||||||
('15','216','85','74','93','CLASS1','DEPARTMENT2'),
|
|
||||||
('16','221','77','99','61','CLASS2','DEPARTMENT2'),
|
|
||||||
('17','222','80','78','96','CLASS2','DEPARTMENT2'),
|
|
||||||
('18','223','79','74','96','CLASS2','DEPARTMENT2'),
|
|
||||||
('19','224','75','80','78','CLASS2','DEPARTMENT2'),
|
|
||||||
('19','224','75','80','78','CLASS2','DEPARTMENT2'),
|
|
||||||
('19','224','75','80','78','CLASS2','DEPARTMENT2'),
|
|
||||||
('19','224','75','80','78','CLASS2','DEPARTMENT2'),
|
|
||||||
('19','224','75','80','78','CLASS2','DEPARTMENT2'),
|
|
||||||
('19','224','75','80','78','CLASS2','DEPARTMENT2'),
|
|
||||||
('19','224','75','80','78','CLASS2','DEPARTMENT2'),
|
|
||||||
('19','224','75','80','78','CLASS2','DEPARTMENT2'),
|
|
||||||
('20','225','82','85','63','CLASS2','DEPARTMENT2')`;
|
|
||||||
const result = parser.parserSql([sql, ''], 'hive');
|
|
||||||
console.timeEnd('t')
|
|
@ -4,17 +4,17 @@ describe('utils', () => {
|
|||||||
test('single', () => {
|
test('single', () => {
|
||||||
let sql = 'select id,name from user';
|
let sql = 'select id,name from user';
|
||||||
let result = utils.splitSql(sql);
|
let result = utils.splitSql(sql);
|
||||||
expect(result).toEqual([sql.length - 1])
|
expect(result).toEqual([sql.length - 1]);
|
||||||
sql += ';';
|
sql += ';';
|
||||||
result = utils.splitSql(sql);
|
result = utils.splitSql(sql);
|
||||||
expect(result).toEqual([sql.length - 1])
|
expect(result).toEqual([sql.length - 1]);
|
||||||
});
|
});
|
||||||
test('multiple', () => {
|
test('multiple', () => {
|
||||||
const sql = `-- a ;
|
const sql = `-- a ;
|
||||||
select * from a;
|
select * from a;
|
||||||
select user from b`;
|
select user from b`;
|
||||||
const result = utils.splitSql(sql);
|
const result = utils.splitSql(sql);
|
||||||
expect(result).toEqual([34, 65])
|
expect(result).toEqual([34, 65]);
|
||||||
});
|
});
|
||||||
test('error sql', () => {
|
test('error sql', () => {
|
||||||
const sql = `CREATE TABLE MyResult(
|
const sql = `CREATE TABLE MyResult(
|
||||||
@ -29,7 +29,7 @@ describe('utils', () => {
|
|||||||
tableName ='user'
|
tableName ='user'
|
||||||
);`;
|
);`;
|
||||||
const result = utils.splitSql(sql);
|
const result = utils.splitSql(sql);
|
||||||
expect(result).toEqual([337])
|
expect(result).toEqual([337]);
|
||||||
const sql2 = `CREATE TABLE MyResult(
|
const sql2 = `CREATE TABLE MyResult(
|
||||||
a double,
|
a double,
|
||||||
b timestamp,
|
b timestamp,
|
||||||
@ -42,7 +42,7 @@ describe('utils', () => {
|
|||||||
tableName ='user'
|
tableName ='user'
|
||||||
)`;
|
)`;
|
||||||
const result2 = utils.splitSql(sql2);
|
const result2 = utils.splitSql(sql2);
|
||||||
expect(result2).toEqual([336])
|
expect(result2).toEqual([336]);
|
||||||
});
|
});
|
||||||
})
|
});
|
||||||
})
|
});
|
||||||
|
@ -4,7 +4,15 @@
|
|||||||
"sourceMap": false,
|
"sourceMap": false,
|
||||||
"allowJs":true,
|
"allowJs":true,
|
||||||
"target": "es6",
|
"target": "es6",
|
||||||
"module": "commonjs"
|
"module": "commonjs",
|
||||||
|
"noUnusedLocals": true,
|
||||||
|
"noUnusedParameters": false,
|
||||||
|
"typeRoots": [
|
||||||
|
"node",
|
||||||
|
"node_modules/@types",
|
||||||
|
"./src/typings"
|
||||||
|
]
|
||||||
|
|
||||||
},
|
},
|
||||||
"include": [
|
"include": [
|
||||||
"./src/**/*"
|
"./src/**/*"
|
||||||
|
Loading…
Reference in New Issue
Block a user