test: remove barrier files to improve unit testing performance (#249)
* test: remove barrier files to improve unit testing performance * chore: change check-types command
This commit is contained in:
parent
1038a3a828
commit
ecbbee32c7
@ -23,7 +23,7 @@ module.exports = {
|
|||||||
// collectCoverageFrom: undefined,
|
// collectCoverageFrom: undefined,
|
||||||
|
|
||||||
// The directory where Jest should output its coverage files
|
// The directory where Jest should output its coverage files
|
||||||
coverageDirectory: "coverage",
|
coverageDirectory: 'coverage',
|
||||||
|
|
||||||
// An array of regexp pattern strings used to skip coverage collection
|
// An array of regexp pattern strings used to skip coverage collection
|
||||||
// coveragePathIgnorePatterns: [
|
// coveragePathIgnorePatterns: [
|
||||||
@ -76,19 +76,13 @@ module.exports = {
|
|||||||
// ],
|
// ],
|
||||||
|
|
||||||
// An array of file extensions your modules use
|
// An array of file extensions your modules use
|
||||||
moduleFileExtensions: [
|
moduleFileExtensions: ['js', 'mjs', 'cjs', 'jsx', 'ts', 'tsx', 'json', 'node'],
|
||||||
"js",
|
|
||||||
"mjs",
|
|
||||||
"cjs",
|
|
||||||
"jsx",
|
|
||||||
"ts",
|
|
||||||
"tsx",
|
|
||||||
"json",
|
|
||||||
"node",
|
|
||||||
],
|
|
||||||
|
|
||||||
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
|
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
|
||||||
// moduleNameMapper: {},
|
moduleNameMapper: {
|
||||||
|
'^src/(.*)$': '<rootDir>/src/$1',
|
||||||
|
'^test/(.*)$': '<rootDir>/test/$1',
|
||||||
|
},
|
||||||
|
|
||||||
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||||
// modulePathIgnorePatterns: [],
|
// modulePathIgnorePatterns: [],
|
||||||
@ -144,7 +138,7 @@ module.exports = {
|
|||||||
// snapshotSerializers: [],
|
// snapshotSerializers: [],
|
||||||
|
|
||||||
// The test environment that will be used for testing
|
// The test environment that will be used for testing
|
||||||
testEnvironment: "node",
|
testEnvironment: 'node',
|
||||||
|
|
||||||
// Options that will be passed to the testEnvironment
|
// Options that will be passed to the testEnvironment
|
||||||
// testEnvironmentOptions: {},
|
// testEnvironmentOptions: {},
|
||||||
@ -153,7 +147,7 @@ module.exports = {
|
|||||||
// testLocationInResults: false,
|
// testLocationInResults: false,
|
||||||
|
|
||||||
// The glob patterns Jest uses to detect test files
|
// The glob patterns Jest uses to detect test files
|
||||||
testMatch: ["**/__tests__/**/*.[jt]s?(x)", "**/?(*.)+(spec|test).[tj]s?(x)"],
|
testMatch: ['**/__tests__/**/*.[jt]s?(x)', '**/?(*.)+(spec|test).[tj]s?(x)'],
|
||||||
|
|
||||||
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
||||||
// testPathIgnorePatterns: [
|
// testPathIgnorePatterns: [
|
||||||
@ -171,7 +165,7 @@ module.exports = {
|
|||||||
|
|
||||||
// A map from regular expressions to paths to transformers
|
// A map from regular expressions to paths to transformers
|
||||||
transform: {
|
transform: {
|
||||||
"\\.[jt]sx?$": ['@swc/jest']
|
'\\.[jt]sx?$': ['@swc/jest'],
|
||||||
},
|
},
|
||||||
|
|
||||||
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
||||||
|
@ -23,7 +23,7 @@
|
|||||||
"prepublishOnly": "npm run build",
|
"prepublishOnly": "npm run build",
|
||||||
"antlr4": "node ./scripts/antlr4.js",
|
"antlr4": "node ./scripts/antlr4.js",
|
||||||
"build": "rm -rf dist && tsc",
|
"build": "rm -rf dist && tsc",
|
||||||
"check-types": "tsc -p ./tsconfig.check.json",
|
"check-types": "tsc -p ./tsconfig.json && tsc -p ./test/tsconfig.json",
|
||||||
"test": "NODE_OPTIONS=--max_old_space_size=4096 && jest",
|
"test": "NODE_OPTIONS=--max_old_space_size=4096 && jest",
|
||||||
"release": "node ./scripts/release.js",
|
"release": "node ./scripts/release.js",
|
||||||
"lint": "prettier --check '**/*.ts' --config ./.prettierrc",
|
"lint": "prettier --check '**/*.ts' --config ./.prettierrc",
|
||||||
|
@ -1,4 +1,7 @@
|
|||||||
import { CommonTokenStream, ErrorListener, FlinkSQL, FlinkSqlLexer } from '../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
|
import { FlinkSqlLexer } from 'src/lib/flinksql/FlinkSqlLexer';
|
||||||
|
import { ErrorListener } from 'src/parser/common/parseErrorListener';
|
||||||
|
import { CommonTokenStream } from 'antlr4ts';
|
||||||
|
|
||||||
describe('BasicParser unit tests', () => {
|
describe('BasicParser unit tests', () => {
|
||||||
const flinkParser = new FlinkSQL();
|
const flinkParser = new FlinkSQL();
|
||||||
|
@ -1,63 +0,0 @@
|
|||||||
/**
|
|
||||||
* All unit tests should import parser about from this file.
|
|
||||||
* In this way, the exports of dt-sql-parser in the entry file is guaranteed to be complete.
|
|
||||||
*
|
|
||||||
* 单测文件中有关 parser 的导入,都应该从这个文件中导入。
|
|
||||||
* 通过这种方式,能保证 dt-sql-parser 的入口文件中的导出完整。
|
|
||||||
*
|
|
||||||
* See this issue https://github.com/DTStack/dt-sql-parser/issues/236.
|
|
||||||
*/
|
|
||||||
|
|
||||||
export * from '../../src';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Something required by unit test but dt-sql-parser'entry not.
|
|
||||||
* If you need to add an export to this file,
|
|
||||||
* consider whether it should be exported in src/index as well.
|
|
||||||
*
|
|
||||||
* 一些单测文件需要但是 dt-sql-parser的入口不需要的导出。
|
|
||||||
* 如果你需要在这个文件中添加新的导出,请考虑它是否应该在 dt-sql-parser 的入口文件中导出。
|
|
||||||
*/
|
|
||||||
export { CommonTokenStream } from 'antlr4ts';
|
|
||||||
|
|
||||||
export { ParseTreeWalker, ParseTreeListener } from 'antlr4ts/tree';
|
|
||||||
|
|
||||||
export { FlinkSqlLexer } from '../../src/lib/flinksql/FlinkSqlLexer';
|
|
||||||
export { FlinkSqlParser } from '../../src/lib/flinksql/FlinkSqlParser';
|
|
||||||
export * as FlinkSqlParserRuleContext from '../../src/lib/flinksql/FlinkSqlParser';
|
|
||||||
|
|
||||||
export { MySqlLexer } from '../../src/lib/mysql/MySqlLexer';
|
|
||||||
export { MySqlParser } from '../../src/lib/mysql/MySqlParser';
|
|
||||||
export * as MySqlParserRuleContext from '../../src/lib/mysql/MySqlParser';
|
|
||||||
|
|
||||||
export { HiveSqlLexer } from '../../src/lib/hive/HiveSqlLexer';
|
|
||||||
export { HiveSqlParser } from '../../src/lib/hive/HiveSqlParser';
|
|
||||||
export * as HiveSqlParserRuleContext from '../../src/lib/hive/HiveSqlParser';
|
|
||||||
|
|
||||||
export { PlSqlLexer } from '../../src/lib/plsql/PlSqlLexer';
|
|
||||||
export { PlSqlParser } from '../../src/lib/plsql/PlSqlParser';
|
|
||||||
export * as PlSqlParserRuleContext from '../../src/lib/plsql/PlSqlParser';
|
|
||||||
|
|
||||||
export { SparkSqlLexer } from '../../src/lib/spark/SparkSqlLexer';
|
|
||||||
export { SparkSqlParser } from '../../src/lib/spark/SparkSqlParser';
|
|
||||||
export * as SparkSQLParserRuleContext from '../../src/lib/spark/SparkSqlParser';
|
|
||||||
|
|
||||||
export { PostgreSQLLexer } from '../../src/lib/pgsql/PostgreSQLLexer';
|
|
||||||
export { PostgreSQLParser } from '../../src/lib/pgsql/PostgreSQLParser';
|
|
||||||
export * as PostgreSQLParserRuleContext from '../../src/lib/pgsql/PostgreSQLParser';
|
|
||||||
|
|
||||||
export { TrinoSqlLexer } from '../../src/lib/trinosql/TrinoSqlLexer';
|
|
||||||
export { TrinoSqlParser } from '../../src/lib/trinosql/TrinoSqlParser';
|
|
||||||
export * as TrinoSqlParserRuleContext from '../../src/lib/trinosql/TrinoSqlParser';
|
|
||||||
|
|
||||||
export { ImpalaSqlLexer } from '../../src/lib/impala/ImpalaSqlLexer';
|
|
||||||
export { ImpalaSqlParser } from '../../src/lib/impala/ImpalaSqlParser';
|
|
||||||
export * as ImpalaSqlParserRuleContext from '../../src/lib/impala/ImpalaSqlParser';
|
|
||||||
|
|
||||||
export { FlinkSqlSplitListener } from '../../src/parser/flinksql';
|
|
||||||
export { MysqlSplitListener } from '../../src/parser/mysql';
|
|
||||||
export { HiveSqlSplitListener } from '../../src/parser/hive';
|
|
||||||
export { SparkSqlSplitListener } from '../../src/parser/spark';
|
|
||||||
export { PgSqlSplitListener } from '../../src/parser/pgsql';
|
|
||||||
export { TrinoSqlSplitListener } from '../../src/parser/trinosql';
|
|
||||||
export { ImpalaSqlSplitListener } from '../../src/parser/impala';
|
|
@ -1,12 +1,12 @@
|
|||||||
# FlinkSQL Benchmark
|
# FlinkSQL Benchmark
|
||||||
| Name | Rows | Times | Total Time(ms) | Average Time(ms) |
|
| Name | Rows | Times | Total Time(ms) | Average Time(ms) |
|
||||||
| ---- | ---- | ---- | ---- | ---- |
|
| ---- | ---- | ---- | ---- | ---- |
|
||||||
| CreateTable | 100 | 1 | 3858.63 | 3858.63 |
|
| CreateTable | 100 | 1 | 256.26 | 256.26 |
|
||||||
| CreateTable | 1000 | 1 | 4845.70 | 4845.70 |
|
| CreateTable | 1000 | 1 | 343.14 | 343.14 |
|
||||||
| CreateTable | 5000 | 1 | 14629.31 | 14629.31 |
|
| CreateTable | 5000 | 1 | 1816.20 | 1816.20 |
|
||||||
| SelectTable | 100 | 1 | 3796.21 | 3796.21 |
|
| SelectTable | 100 | 1 | 460.68 | 460.68 |
|
||||||
| SelectTable | 1000 | 1 | 7207.75 | 7207.75 |
|
| SelectTable | 1000 | 1 | 1124.45 | 1124.45 |
|
||||||
| SelectTable | 5000 | 1 | 26171.22 | 26171.22 |
|
| SelectTable | 5000 | 1 | 5212.31 | 5212.31 |
|
||||||
| InsertTable | 100 | 1 | 57.62 | 57.62 |
|
| InsertTable | 100 | 1 | 9.10 | 9.10 |
|
||||||
| InsertTable | 1000 | 1 | 111.11 | 111.11 |
|
| InsertTable | 1000 | 1 | 41.80 | 41.80 |
|
||||||
| InsertTable | 5000 | 1 | 2056.96 | 2056.96 |
|
| InsertTable | 5000 | 1 | 318.83 | 318.83 |
|
@ -1,11 +1,11 @@
|
|||||||
import { FlinkSQL } from '../../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
import {
|
import {
|
||||||
readSQL,
|
readSQL,
|
||||||
benchmark,
|
benchmark,
|
||||||
getReportTableHeader,
|
getReportTableHeader,
|
||||||
getReportTableRow,
|
getReportTableRow,
|
||||||
exportReportTable,
|
exportReportTable,
|
||||||
} from '../../../helper';
|
} from 'test/helper';
|
||||||
|
|
||||||
const features = {
|
const features = {
|
||||||
selectTable: readSQL(__dirname, 'selectTable.sql'),
|
selectTable: readSQL(__dirname, 'selectTable.sql'),
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import { FlinkSQL, FlinkSqlSplitListener, FlinkSqlParserListener } from '../../filters';
|
import FlinkSQL, { FlinkSqlSplitListener } from 'src/parser/flinksql';
|
||||||
|
import { FlinkSqlParserListener } from 'src/lib/flinksql/FlinkSqlParserListener';
|
||||||
|
|
||||||
const validSQL1 = `INSERT INTO country_page_view
|
const validSQL1 = `INSERT INTO country_page_view
|
||||||
VALUES ('Chinese', 'mumiao', 18),
|
VALUES ('Chinese', 'mumiao', 18),
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { FlinkSQL } from '../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
|
|
||||||
describe('FlinkSQL Lexer tests', () => {
|
describe('FlinkSQL Lexer tests', () => {
|
||||||
const parser = new FlinkSQL();
|
const parser = new FlinkSQL();
|
||||||
|
@ -1,9 +1,7 @@
|
|||||||
import {
|
import { ParseTreeListener } from 'antlr4ts/tree/ParseTreeListener';
|
||||||
FlinkSQL,
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
FlinkSqlParserListener,
|
import { FlinkSqlParserListener } from 'src/lib/flinksql/FlinkSqlParserListener';
|
||||||
FlinkSqlParserRuleContext,
|
import { TableExpressionContext } from 'src/lib/flinksql/FlinkSqlParser';
|
||||||
ParseTreeListener,
|
|
||||||
} from '../../filters';
|
|
||||||
|
|
||||||
describe('Flink SQL Listener Tests', () => {
|
describe('Flink SQL Listener Tests', () => {
|
||||||
const expectTableName = 'user1';
|
const expectTableName = 'user1';
|
||||||
@ -15,9 +13,7 @@ describe('Flink SQL Listener Tests', () => {
|
|||||||
test('Listener enterTableName', async () => {
|
test('Listener enterTableName', async () => {
|
||||||
let result = '';
|
let result = '';
|
||||||
class MyListener implements FlinkSqlParserListener {
|
class MyListener implements FlinkSqlParserListener {
|
||||||
enterTableExpression = (
|
enterTableExpression = (ctx: TableExpressionContext): void => {
|
||||||
ctx: FlinkSqlParserRuleContext.TableExpressionContext
|
|
||||||
): void => {
|
|
||||||
result = ctx.text.toLowerCase();
|
result = ctx.text.toLowerCase();
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { CaretPosition, SyntaxContextType, FlinkSQL } from '../../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
|
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
|
||||||
const syntaxSql = fs.readFileSync(
|
const syntaxSql = fs.readFileSync(
|
||||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { CaretPosition, SyntaxContextType, FlinkSQL } from '../../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
import { commentOtherLine } from '../../../helper';
|
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
import { commentOtherLine } from 'test/helper';
|
||||||
|
|
||||||
const syntaxSql = fs.readFileSync(
|
const syntaxSql = fs.readFileSync(
|
||||||
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
|
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { FlinkSQL, CaretPosition } from '../../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
import { commentOtherLine } from '../../../helper';
|
import { CaretPosition } from 'src/parser/common/basic-parser-types';
|
||||||
|
import { commentOtherLine } from 'test/helper';
|
||||||
|
|
||||||
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
|
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { FlinkSQL } from '../../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const features = {
|
const features = {
|
||||||
table: readSQL(__dirname, 'alterTable.sql'),
|
table: readSQL(__dirname, 'alterTable.sql'),
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { FlinkSQL } from '../../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
// 综合测试的 sql 不做切割
|
// 综合测试的 sql 不做切割
|
||||||
const features = {
|
const features = {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { FlinkSQL } from '../../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
|
|
||||||
// 注释 sql 不做切割
|
// 注释 sql 不做切割
|
||||||
const features = {
|
const features = {
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { FlinkSQL } from '../../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
|
|
||||||
// 综合测试的 sql 不做切割
|
// 综合测试的 sql 不做切割
|
||||||
const features = {
|
const features = {
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { FlinkSQL } from '../../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new FlinkSQL();
|
const parser = new FlinkSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { FlinkSQL } from '../../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const features = {
|
const features = {
|
||||||
describes: readSQL(__dirname, 'describe.sql'),
|
describes: readSQL(__dirname, 'describe.sql'),
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { FlinkSQL } from '../../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const features = {
|
const features = {
|
||||||
table: readSQL(__dirname, 'dropTable.sql'),
|
table: readSQL(__dirname, 'dropTable.sql'),
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { FlinkSQL } from '../../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const features = {
|
const features = {
|
||||||
dtAddFiles: readSQL(__dirname, 'dtAddFile.sql'),
|
dtAddFiles: readSQL(__dirname, 'dtAddFile.sql'),
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { FlinkSQL } from '../../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const features = {
|
const features = {
|
||||||
explains: readSQL(__dirname, 'explain.sql'),
|
explains: readSQL(__dirname, 'explain.sql'),
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { FlinkSQL } from '../../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const features = {
|
const features = {
|
||||||
expression: readSQL(__dirname, 'expression.sql'),
|
expression: readSQL(__dirname, 'expression.sql'),
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { FlinkSQL } from '../../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new FlinkSQL();
|
const parser = new FlinkSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { FlinkSQL } from '../../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new FlinkSQL();
|
const parser = new FlinkSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { FlinkSQL } from '../../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const features = {
|
const features = {
|
||||||
shows: readSQL(__dirname, 'show.sql'),
|
shows: readSQL(__dirname, 'show.sql'),
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { FlinkSQL } from '../../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const features = {
|
const features = {
|
||||||
uses: readSQL(__dirname, 'use.sql'),
|
uses: readSQL(__dirname, 'use.sql'),
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { FlinkSQL } from '../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
|
|
||||||
const randomText = `dhsdansdnkla ndjnsla ndnalks`;
|
const randomText = `dhsdansdnkla ndjnsla ndnalks`;
|
||||||
const unCompleteSQL = `CREATE TABLE`;
|
const unCompleteSQL = `CREATE TABLE`;
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
import { FlinkSQL, AbstractParseTreeVisitor, FlinkSqlParserVisitor } from '../../filters';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
|
import { FlinkSqlParserVisitor } from 'src/lib/flinksql/FlinkSqlParserVisitor';
|
||||||
|
import { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor';
|
||||||
|
|
||||||
describe('Flink SQL Visitor Tests', () => {
|
describe('Flink SQL Visitor Tests', () => {
|
||||||
const expectTableName = 'user1';
|
const expectTableName = 'user1';
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import { HiveSQL, HiveSqlSplitListener, HiveSqlParserListener } from '../../filters';
|
import HiveSQL, { HiveSqlSplitListener } from 'src/parser/hive';
|
||||||
|
import { HiveSqlParserListener } from 'src/lib';
|
||||||
|
|
||||||
const validSQL1 = `INSERT INTO country_page_view
|
const validSQL1 = `INSERT INTO country_page_view
|
||||||
VALUES ('Chinese', 'mumiao', 18),
|
VALUES ('Chinese', 'mumiao', 18),
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { HiveSQL } from '../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
|
|
||||||
describe('HiveSQL Lexer tests', () => {
|
describe('HiveSQL Lexer tests', () => {
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
|
@ -1,5 +1,7 @@
|
|||||||
import { ParseTreeListener } from 'antlr4ts/tree';
|
import { ParseTreeListener } from 'antlr4ts/tree/ParseTreeListener';
|
||||||
import { HiveSQL, HiveSqlParserListener, HiveSqlParserRuleContext } from '../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
|
import { ProgramContext } from 'src/lib/hive/HiveSqlParser';
|
||||||
|
import { HiveSqlParserListener } from 'src/lib/hive/HiveSqlParserListener';
|
||||||
|
|
||||||
describe('HiveSQL Listener Tests', () => {
|
describe('HiveSQL Listener Tests', () => {
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
@ -16,10 +18,7 @@ describe('HiveSQL Listener Tests', () => {
|
|||||||
}
|
}
|
||||||
const listenTableName = new MyListener();
|
const listenTableName = new MyListener();
|
||||||
|
|
||||||
await parser.listen(
|
await parser.listen(listenTableName as ParseTreeListener, parseTree as ProgramContext);
|
||||||
listenTableName as ParseTreeListener,
|
|
||||||
parseTree as HiveSqlParserRuleContext.ProgramContext
|
|
||||||
);
|
|
||||||
expect(result).toBe(expectTableName.toUpperCase());
|
expect(result).toBe(expectTableName.toUpperCase());
|
||||||
});
|
});
|
||||||
test('Listener enterCreateTable', async () => {
|
test('Listener enterCreateTable', async () => {
|
||||||
@ -33,10 +32,7 @@ describe('HiveSQL Listener Tests', () => {
|
|||||||
}
|
}
|
||||||
const listenTableName = new MyListener();
|
const listenTableName = new MyListener();
|
||||||
|
|
||||||
await parser.listen(
|
await parser.listen(listenTableName as ParseTreeListener, parseTree as ProgramContext);
|
||||||
listenTableName as ParseTreeListener,
|
|
||||||
parseTree as HiveSqlParserRuleContext.ProgramContext
|
|
||||||
);
|
|
||||||
expect(result).toBe('DROPTABLETABLE_NAME');
|
expect(result).toBe('DROPTABLETABLE_NAME');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { CaretPosition, SyntaxContextType, HiveSQL } from '../../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
|
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
|
||||||
const syntaxSql = fs.readFileSync(
|
const syntaxSql = fs.readFileSync(
|
||||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { CaretPosition, SyntaxContextType, HiveSQL } from '../../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
import { commentOtherLine } from '../../../helper';
|
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
import { commentOtherLine } from 'test/helper';
|
||||||
|
|
||||||
const syntaxSql = fs.readFileSync(
|
const syntaxSql = fs.readFileSync(
|
||||||
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
|
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { CaretPosition, HiveSQL } from '../../../filters';
|
import { commentOtherLine } from 'test/helper';
|
||||||
import { commentOtherLine } from '../../../helper';
|
import HiveSQL from 'src/parser/hive';
|
||||||
|
import { CaretPosition } from 'src/parser/common/basic-parser-types';
|
||||||
|
|
||||||
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
|
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { HiveSQL } from '../../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { HiveSQL } from '../../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { HiveSQL } from '../../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { HiveSQL } from '../../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { HiveSQL } from '../../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { HiveSQL } from '../../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { HiveSQL } from '../../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { HiveSQL } from '../../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { HiveSQL } from '../../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const features = {
|
const features = {
|
||||||
exports: readSQL(__dirname, 'export.sql'),
|
exports: readSQL(__dirname, 'export.sql'),
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { HiveSQL } from '../../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const features = {
|
const features = {
|
||||||
imports: readSQL(__dirname, 'import.sql'),
|
imports: readSQL(__dirname, 'import.sql'),
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { HiveSQL } from '../../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { HiveSQL } from '../../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { HiveSQL } from '../../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { HiveSQL } from '../../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { HiveSQL } from '../../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { HiveSQL } from '../../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { HiveSQL } from '../../filters';
|
import HiveSQL from 'src/parser/hive';
|
||||||
|
|
||||||
const randomText = `dhsdansdnkla ndjnsla ndnalks`;
|
const randomText = `dhsdansdnkla ndjnsla ndnalks`;
|
||||||
const unCompleteSQL = `CREATE TABLE`;
|
const unCompleteSQL = `CREATE TABLE`;
|
||||||
|
@ -1,9 +1,8 @@
|
|||||||
import {
|
import { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor';
|
||||||
HiveSQL,
|
|
||||||
HiveSqlParserVisitor,
|
import HiveSQL from 'src/parser/hive';
|
||||||
AbstractParseTreeVisitor,
|
import { HiveSqlParserVisitor } from 'src/lib/hive/HiveSqlParserVisitor';
|
||||||
HiveSqlParserRuleContext,
|
import { ProgramContext } from 'src/lib/hive/HiveSqlParser';
|
||||||
} from '../../filters';
|
|
||||||
|
|
||||||
describe('HiveSQL Visitor Tests', () => {
|
describe('HiveSQL Visitor Tests', () => {
|
||||||
const expectTableName = 'dm_gis.dlv_addr_tc_count';
|
const expectTableName = 'dm_gis.dlv_addr_tc_count';
|
||||||
@ -27,7 +26,7 @@ describe('HiveSQL Visitor Tests', () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const visitor = new MyVisitor();
|
const visitor = new MyVisitor();
|
||||||
visitor.visit(parseTree as HiveSqlParserRuleContext.ProgramContext);
|
visitor.visit(parseTree as ProgramContext);
|
||||||
|
|
||||||
expect(result).toBe(expectTableName);
|
expect(result).toBe(expectTableName);
|
||||||
});
|
});
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import { ImpalaSQL, ImpalaSqlSplitListener, ImpalaSqlParserListener } from '../../filters';
|
import ImpalaSQL, { ImpalaSqlSplitListener } from 'src/parser/impala';
|
||||||
|
import { ImpalaSqlParserListener } from 'src/lib/impala/ImpalaSqlParserListener';
|
||||||
|
|
||||||
const validSQL1 = `INSERT INTO country_page_view
|
const validSQL1 = `INSERT INTO country_page_view
|
||||||
VALUES ('Chinese', 'mumiao', 18),
|
VALUES ('Chinese', 'mumiao', 18),
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { ImpalaSQL } from '../../filters';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
|
|
||||||
describe('ImpalaSQL Lexer tests', () => {
|
describe('ImpalaSQL Lexer tests', () => {
|
||||||
const parser = new ImpalaSQL();
|
const parser = new ImpalaSQL();
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
import { ImpalaSQL, ImpalaSqlParserListener, ParseTreeListener } from '../../filters';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
|
import { ImpalaSqlParserListener } from 'src/lib/impala/ImpalaSqlParserListener';
|
||||||
|
import { ParseTreeListener } from 'antlr4ts/tree/ParseTreeListener';
|
||||||
|
|
||||||
describe('impala SQL Listener Tests', () => {
|
describe('impala SQL Listener Tests', () => {
|
||||||
const expectTableName = 'user1';
|
const expectTableName = 'user1';
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { ImpalaSQL, CaretPosition, SyntaxContextType } from '../../../filters';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
|
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
|
||||||
const syntaxSql = fs.readFileSync(
|
const syntaxSql = fs.readFileSync(
|
||||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { ImpalaSQL, CaretPosition, SyntaxContextType } from '../../../filters';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
import { commentOtherLine } from '../../../helper';
|
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
import { commentOtherLine } from 'test/helper';
|
||||||
|
|
||||||
const syntaxSql = fs.readFileSync(
|
const syntaxSql = fs.readFileSync(
|
||||||
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
|
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { ImpalaSQL, CaretPosition } from '../../../filters';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
import { commentOtherLine } from '../../../helper';
|
import { CaretPosition } from 'src/parser/common/basic-parser-types';
|
||||||
|
import { commentOtherLine } from 'test/helper';
|
||||||
|
|
||||||
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
|
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { ImpalaSQL } from '../../../filters';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new ImpalaSQL();
|
const parser = new ImpalaSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { ImpalaSQL } from '../../../filters';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new ImpalaSQL();
|
const parser = new ImpalaSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { ImpalaSQL } from '../../../filters';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new ImpalaSQL();
|
const parser = new ImpalaSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { ImpalaSQL } from '../../../filters';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new ImpalaSQL();
|
const parser = new ImpalaSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { ImpalaSQL } from '../../../filters';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new ImpalaSQL();
|
const parser = new ImpalaSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { ImpalaSQL } from '../../../filters';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new ImpalaSQL();
|
const parser = new ImpalaSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { ImpalaSQL } from '../../../filters';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new ImpalaSQL();
|
const parser = new ImpalaSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { ImpalaSQL } from '../../../filters';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new ImpalaSQL();
|
const parser = new ImpalaSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { ImpalaSQL } from '../../../filters';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new ImpalaSQL();
|
const parser = new ImpalaSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { ImpalaSQL } from '../../../filters';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new ImpalaSQL();
|
const parser = new ImpalaSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { ImpalaSQL } from '../../../filters';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new ImpalaSQL();
|
const parser = new ImpalaSQL();
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { ImpalaSQL } from '../../filters';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
|
|
||||||
const randomText = `dhsdansdnkla ndjnsla ndnalks`;
|
const randomText = `dhsdansdnkla ndjnsla ndnalks`;
|
||||||
const unCompleteSQL = `CREATE TABLE`;
|
const unCompleteSQL = `CREATE TABLE`;
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
import { ImpalaSQL, ImpalaSqlParserVisitor, AbstractParseTreeVisitor } from '../../filters';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
|
import { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor';
|
||||||
|
import { ImpalaSqlParserVisitor } from 'src/lib/impala/ImpalaSqlParserVisitor';
|
||||||
|
|
||||||
describe('impala SQL Visitor Tests', () => {
|
describe('impala SQL Visitor Tests', () => {
|
||||||
const expectTableName = 'user1';
|
const expectTableName = 'user1';
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { MySQL } from '../../filters';
|
import MySQL, { MysqlSplitListener } from 'src/parser/mysql';
|
||||||
import { MysqlSplitListener, MySqlParserListener } from '../../filters';
|
import { MySqlParserListener } from 'src/lib/mysql/MySqlParserListener';
|
||||||
|
|
||||||
const validSQL1 = `INSERT INTO country_page_view
|
const validSQL1 = `INSERT INTO country_page_view
|
||||||
VALUES ('Chinese', 'mumiao', 18),
|
VALUES ('Chinese', 'mumiao', 18),
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { MySQL } from '../../filters';
|
import MySQL from 'src/parser/mysql';
|
||||||
|
|
||||||
describe('MySQL Lexer tests', () => {
|
describe('MySQL Lexer tests', () => {
|
||||||
const parser = new MySQL();
|
const parser = new MySQL();
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import { MySQL } from '../../filters';
|
import MySQL from 'src/parser/mysql';
|
||||||
import { MySqlParserListener, ParseTreeListener } from '../../filters';
|
import { MySqlParserListener } from 'src/lib/mysql/MySqlParserListener';
|
||||||
|
import { ParseTreeListener } from 'antlr4ts/tree/ParseTreeListener';
|
||||||
|
|
||||||
describe('MySQL Listener Tests', () => {
|
describe('MySQL Listener Tests', () => {
|
||||||
const expectTableName = 'user1';
|
const expectTableName = 'user1';
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { MySQL, CaretPosition, SyntaxContextType } from '../../../filters';
|
import MySQL from 'src/parser/mysql';
|
||||||
|
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
|
||||||
const syntaxSql = fs.readFileSync(
|
const syntaxSql = fs.readFileSync(
|
||||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { MySQL, CaretPosition, SyntaxContextType } from '../../../filters';
|
import MySQL from 'src/parser/mysql';
|
||||||
import { commentOtherLine } from '../../../helper';
|
import { SyntaxContextType, CaretPosition } from 'src/parser/common/basic-parser-types';
|
||||||
|
import { commentOtherLine } from 'test/helper';
|
||||||
|
|
||||||
const syntaxSql = fs.readFileSync(
|
const syntaxSql = fs.readFileSync(
|
||||||
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
|
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { MySQL, CaretPosition } from '../../../filters';
|
import MySQL from 'src/parser/mysql';
|
||||||
import { commentOtherLine } from '../../../helper';
|
import { CaretPosition } from 'src/parser/common/basic-parser-types';
|
||||||
|
import { commentOtherLine } from 'test/helper';
|
||||||
|
|
||||||
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
|
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { MySQL } from '../../filters';
|
import MySQL from 'src/parser/mysql';
|
||||||
|
|
||||||
describe('MySQL Syntax Tests', () => {
|
describe('MySQL Syntax Tests', () => {
|
||||||
const parser = new MySQL();
|
const parser = new MySQL();
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { MySQL } from '../../../filters';
|
import MySQL from 'src/parser/mysql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new MySQL();
|
const parser = new MySQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { MySQL } from '../../../filters';
|
import MySQL from 'src/parser/mysql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new MySQL();
|
const parser = new MySQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { MySQL } from '../../../filters';
|
import MySQL from 'src/parser/mysql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new MySQL();
|
const parser = new MySQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { MySQL } from '../../../filters';
|
import MySQL from 'src/parser/mysql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new MySQL();
|
const parser = new MySQL();
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { MySQL } from '../../filters';
|
import MySQL from 'src/parser/mysql';
|
||||||
|
|
||||||
const randomText = `dhsdansdnkla ndjnsla ndnalks`;
|
const randomText = `dhsdansdnkla ndjnsla ndnalks`;
|
||||||
const unCompleteSQL = `CREATE TABLE`;
|
const unCompleteSQL = `CREATE TABLE`;
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
import { MySQL, MySqlParserVisitor, AbstractParseTreeVisitor } from '../../filters';
|
import MySQL from 'src/parser/mysql';
|
||||||
|
import { MySqlParserVisitor } from 'src/lib/mysql/MySqlParserVisitor';
|
||||||
|
import { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor';
|
||||||
|
|
||||||
describe('MySQL Visitor Tests', () => {
|
describe('MySQL Visitor Tests', () => {
|
||||||
const expectTableName = 'user1';
|
const expectTableName = 'user1';
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
import { PostgresSQL, PgSqlSplitListener, PostgreSQLParserListener } from '../../filters';
|
import PostgresSQL, { PgSqlSplitListener } from 'src/parser/pgsql';
|
||||||
|
import { PostgreSQLParserListener } from 'src/lib/pgsql/PostgreSQLParserListener';
|
||||||
|
|
||||||
const validSQL1 = `INSERT INTO country_page_view
|
const validSQL1 = `INSERT INTO country_page_view
|
||||||
VALUES ('Chinese', 'mumiao', 18),
|
VALUES ('Chinese', 'mumiao', 18),
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { PostgresSQL } from '../../filters';
|
import PostgresSQL from 'src/parser/pgsql';
|
||||||
|
|
||||||
describe('PostgresSQL Lexer tests', () => {
|
describe('PostgresSQL Lexer tests', () => {
|
||||||
const mysqlParser = new PostgresSQL();
|
const mysqlParser = new PostgresSQL();
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
import { PostgresSQL, PostgreSQLParserListener, ParseTreeListener } from '../../filters';
|
import PostgresSQL from 'src/parser/pgsql';
|
||||||
|
import { PostgreSQLParserListener } from 'src/lib/pgsql/PostgreSQLParserListener';
|
||||||
|
import { ParseTreeListener } from 'antlr4ts/tree/ParseTreeListener';
|
||||||
|
|
||||||
describe('PostgresSQL Listener Tests', () => {
|
describe('PostgresSQL Listener Tests', () => {
|
||||||
const expectTableName = 'user1';
|
const expectTableName = 'user1';
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { CaretPosition, SyntaxContextType, PostgresSQL } from '../../../filters';
|
import PostgresSQL from 'src/parser/pgsql';
|
||||||
|
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
|
||||||
const syntaxSql = fs.readFileSync(
|
const syntaxSql = fs.readFileSync(
|
||||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { PostgresSQL, CaretPosition, SyntaxContextType } from '../../../filters';
|
import PostgresSQL from 'src/parser/pgsql';
|
||||||
import { commentOtherLine } from '../../../helper';
|
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
import { commentOtherLine } from 'test/helper';
|
||||||
|
|
||||||
const syntaxSql = fs.readFileSync(
|
const syntaxSql = fs.readFileSync(
|
||||||
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
|
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
|
||||||
|
@ -1,7 +1,8 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { PostgresSQL, CaretPosition } from '../../../filters';
|
import PostgresSQL from 'src/parser/pgsql';
|
||||||
import { commentOtherLine } from '../../../helper';
|
import { CaretPosition } from 'src/parser/common/basic-parser-types';
|
||||||
|
import { commentOtherLine } from 'test/helper';
|
||||||
|
|
||||||
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
|
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { PostgresSQL } from '../../../filters';
|
import PostgresSQL from 'src/parser/pgsql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new PostgresSQL();
|
const parser = new PostgresSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { PostgresSQL } from '../../../filters';
|
import PostgresSQL from 'src/parser/pgsql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new PostgresSQL();
|
const parser = new PostgresSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { PostgresSQL } from '../../../filters';
|
import PostgresSQL from 'src/parser/pgsql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new PostgresSQL();
|
const parser = new PostgresSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { PostgresSQL } from '../../../filters';
|
import PostgresSQL from 'src/parser/pgsql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new PostgresSQL();
|
const parser = new PostgresSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { PostgresSQL } from '../../../filters';
|
import PostgresSQL from 'src/parser/pgsql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new PostgresSQL();
|
const parser = new PostgresSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { PostgresSQL } from '../../../filters';
|
import PostgresSQL from 'src/parser/pgsql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new PostgresSQL();
|
const parser = new PostgresSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { PostgresSQL } from '../../../filters';
|
import PostgresSQL from 'src/parser/pgsql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new PostgresSQL();
|
const parser = new PostgresSQL();
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { PostgresSQL } from '../../../filters';
|
import PostgresSQL from 'src/parser/pgsql';
|
||||||
import { readSQL } from '../../../helper';
|
import { readSQL } from 'test/helper';
|
||||||
|
|
||||||
const parser = new PostgresSQL();
|
const parser = new PostgresSQL();
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
import { PostgresSQL } from '../../filters';
|
import PostgresSQL from 'src/parser/pgsql';
|
||||||
|
|
||||||
const randomText = `dhsdansdnkla ndjnsla ndnalks`;
|
const randomText = `dhsdansdnkla ndjnsla ndnalks`;
|
||||||
const unCompleteSQL = `CREATE TABLE`;
|
const unCompleteSQL = `CREATE TABLE`;
|
||||||
|
@ -1,4 +1,6 @@
|
|||||||
import { PostgresSQL, AbstractParseTreeVisitor, PostgreSQLParserVisitor } from '../../filters';
|
import PostgresSQL from 'src/parser/pgsql';
|
||||||
|
import { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor';
|
||||||
|
import { PostgreSQLParserVisitor } from 'src/lib/pgsql/PostgreSQLParserVisitor';
|
||||||
|
|
||||||
describe('MySQL Visitor Tests', () => {
|
describe('MySQL Visitor Tests', () => {
|
||||||
const expectTableName = 'user1';
|
const expectTableName = 'user1';
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user