test: remove barrier files to improve unit testing performance (#249)

* test: remove barrier files to improve unit testing performance

* chore: change check-types command
This commit is contained in:
Hayden 2024-01-19 21:10:00 +08:00 committed by GitHub
parent 1038a3a828
commit ecbbee32c7
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
171 changed files with 501 additions and 517 deletions

View File

@ -4,187 +4,181 @@
*/ */
module.exports = { module.exports = {
// All imported modules in your tests should be mocked automatically // All imported modules in your tests should be mocked automatically
// automock: false, // automock: false,
// Stop running tests after `n` failures // Stop running tests after `n` failures
// bail: 0, // bail: 0,
// The directory where Jest should store its cached dependency information // The directory where Jest should store its cached dependency information
// cacheDirectory: "/private/var/folders/70/21p94l8j6cd9vv9t990g8cj00000gn/T/jest_dx", // cacheDirectory: "/private/var/folders/70/21p94l8j6cd9vv9t990g8cj00000gn/T/jest_dx",
// Automatically clear mock calls, instances, contexts and results before every test // Automatically clear mock calls, instances, contexts and results before every test
clearMocks: true, clearMocks: true,
// Indicates whether the coverage information should be collected while executing the test // Indicates whether the coverage information should be collected while executing the test
collectCoverage: true, collectCoverage: true,
// An array of glob patterns indicating a set of files for which coverage information should be collected // An array of glob patterns indicating a set of files for which coverage information should be collected
// collectCoverageFrom: undefined, // collectCoverageFrom: undefined,
// The directory where Jest should output its coverage files // The directory where Jest should output its coverage files
coverageDirectory: "coverage", coverageDirectory: 'coverage',
// An array of regexp pattern strings used to skip coverage collection // An array of regexp pattern strings used to skip coverage collection
// coveragePathIgnorePatterns: [ // coveragePathIgnorePatterns: [
// "/node_modules/" // "/node_modules/"
// ], // ],
// Indicates which provider should be used to instrument code for coverage // Indicates which provider should be used to instrument code for coverage
// coverageProvider: "babel", // coverageProvider: "babel",
// A list of reporter names that Jest uses when writing coverage reports // A list of reporter names that Jest uses when writing coverage reports
// coverageReporters: [ // coverageReporters: [
// "json", // "json",
// "text", // "text",
// "lcov", // "lcov",
// "clover" // "clover"
// ], // ],
// An object that configures minimum threshold enforcement for coverage results // An object that configures minimum threshold enforcement for coverage results
// coverageThreshold: undefined, // coverageThreshold: undefined,
// A path to a custom dependency extractor // A path to a custom dependency extractor
// dependencyExtractor: undefined, // dependencyExtractor: undefined,
// Make calling deprecated APIs throw helpful error messages // Make calling deprecated APIs throw helpful error messages
// errorOnDeprecated: false, // errorOnDeprecated: false,
// The default configuration for fake timers // The default configuration for fake timers
// fakeTimers: { // fakeTimers: {
// "enableGlobally": false // "enableGlobally": false
// }, // },
// Force coverage collection from ignored files using an array of glob patterns // Force coverage collection from ignored files using an array of glob patterns
// forceCoverageMatch: [], // forceCoverageMatch: [],
// A path to a module which exports an async function that is triggered once before all test suites // A path to a module which exports an async function that is triggered once before all test suites
// globalSetup: undefined, // globalSetup: undefined,
// A path to a module which exports an async function that is triggered once after all test suites // A path to a module which exports an async function that is triggered once after all test suites
// globalTeardown: undefined, // globalTeardown: undefined,
// A set of global variables that need to be available in all test environments // A set of global variables that need to be available in all test environments
// globals: {}, // globals: {},
// The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers. // The maximum amount of workers used to run your tests. Can be specified as % or a number. E.g. maxWorkers: 10% will use 10% of your CPU amount + 1 as the maximum worker number. maxWorkers: 2 will use a maximum of 2 workers.
// maxWorkers: "50%", // maxWorkers: "50%",
// An array of directory names to be searched recursively up from the requiring module's location // An array of directory names to be searched recursively up from the requiring module's location
// moduleDirectories: [ // moduleDirectories: [
// "node_modules" // "node_modules"
// ], // ],
// An array of file extensions your modules use // An array of file extensions your modules use
moduleFileExtensions: [ moduleFileExtensions: ['js', 'mjs', 'cjs', 'jsx', 'ts', 'tsx', 'json', 'node'],
"js",
"mjs",
"cjs",
"jsx",
"ts",
"tsx",
"json",
"node",
],
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module // A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
// moduleNameMapper: {}, moduleNameMapper: {
'^src/(.*)$': '<rootDir>/src/$1',
'^test/(.*)$': '<rootDir>/test/$1',
},
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader // An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
// modulePathIgnorePatterns: [], // modulePathIgnorePatterns: [],
// Activates notifications for test results // Activates notifications for test results
// notify: false, // notify: false,
// An enum that specifies notification mode. Requires { notify: true } // An enum that specifies notification mode. Requires { notify: true }
// notifyMode: "failure-change", // notifyMode: "failure-change",
// A preset that is used as a base for Jest's configuration // A preset that is used as a base for Jest's configuration
// preset: "ts-jest/presets/js-with-ts-esm", // preset: "ts-jest/presets/js-with-ts-esm",
// Run tests from one or more projects // Run tests from one or more projects
// projects: undefined, // projects: undefined,
// Use this configuration option to add custom reporters to Jest // Use this configuration option to add custom reporters to Jest
// reporters: undefined, // reporters: undefined,
// Automatically reset mock state before every test // Automatically reset mock state before every test
// resetMocks: false, // resetMocks: false,
// Reset the module registry before running each individual test // Reset the module registry before running each individual test
// resetModules: false, // resetModules: false,
// A path to a custom resolver // A path to a custom resolver
// resolver: undefined, // resolver: undefined,
// Automatically restore mock state and implementation before every test // Automatically restore mock state and implementation before every test
// restoreMocks: false, // restoreMocks: false,
// The root directory that Jest should scan for tests and modules within // The root directory that Jest should scan for tests and modules within
// rootDir: undefined, // rootDir: undefined,
// A list of paths to directories that Jest should use to search for files in // A list of paths to directories that Jest should use to search for files in
// roots: [ // roots: [
// "<rootDir>" // "<rootDir>"
// ], // ],
// Allows you to use a custom runner instead of Jest's default test runner // Allows you to use a custom runner instead of Jest's default test runner
// runner: "jest-runner", // runner: "jest-runner",
// The paths to modules that run some code to configure or set up the testing environment before each test // The paths to modules that run some code to configure or set up the testing environment before each test
// setupFiles: [], // setupFiles: [],
// A list of paths to modules that run some code to configure or set up the testing framework before each test // A list of paths to modules that run some code to configure or set up the testing framework before each test
// setupFilesAfterEnv: [], // setupFilesAfterEnv: [],
// The number of seconds after which a test is considered as slow and reported as such in the results. // The number of seconds after which a test is considered as slow and reported as such in the results.
// slowTestThreshold: 5, // slowTestThreshold: 5,
// A list of paths to snapshot serializer modules Jest should use for snapshot testing // A list of paths to snapshot serializer modules Jest should use for snapshot testing
// snapshotSerializers: [], // snapshotSerializers: [],
// The test environment that will be used for testing // The test environment that will be used for testing
testEnvironment: "node", testEnvironment: 'node',
// Options that will be passed to the testEnvironment // Options that will be passed to the testEnvironment
// testEnvironmentOptions: {}, // testEnvironmentOptions: {},
// Adds a location field to test results // Adds a location field to test results
// testLocationInResults: false, // testLocationInResults: false,
// The glob patterns Jest uses to detect test files // The glob patterns Jest uses to detect test files
testMatch: ["**/__tests__/**/*.[jt]s?(x)", "**/?(*.)+(spec|test).[tj]s?(x)"], testMatch: ['**/__tests__/**/*.[jt]s?(x)', '**/?(*.)+(spec|test).[tj]s?(x)'],
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped // An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
// testPathIgnorePatterns: [ // testPathIgnorePatterns: [
// "/node_modules/" // "/node_modules/"
// ], // ],
// The regexp pattern or array of patterns that Jest uses to detect test files // The regexp pattern or array of patterns that Jest uses to detect test files
// testRegex: [], // testRegex: [],
// This option allows the use of a custom results processor // This option allows the use of a custom results processor
// testResultsProcessor: undefined, // testResultsProcessor: undefined,
// This option allows use of a custom test runner // This option allows use of a custom test runner
// testRunner: "jest-circus/runner", // testRunner: "jest-circus/runner",
// A map from regular expressions to paths to transformers // A map from regular expressions to paths to transformers
transform: { transform: {
"\\.[jt]sx?$": ['@swc/jest'] '\\.[jt]sx?$': ['@swc/jest'],
}, },
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation // An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
extensionsToTreatAsEsm: ['.ts', '.tsx'], extensionsToTreatAsEsm: ['.ts', '.tsx'],
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them // An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// unmockedModulePathPatterns: undefined, // unmockedModulePathPatterns: undefined,
// Indicates whether each individual test should be reported during the run // Indicates whether each individual test should be reported during the run
// verbose: undefined, // verbose: undefined,
// An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode // An array of regexp patterns that are matched against all source file paths before re-running tests in watch mode
// watchPathIgnorePatterns: [], // watchPathIgnorePatterns: [],
// Whether to use watchman for file crawling // Whether to use watchman for file crawling
// watchman: true // watchman: true
}; };

View File

@ -23,7 +23,7 @@
"prepublishOnly": "npm run build", "prepublishOnly": "npm run build",
"antlr4": "node ./scripts/antlr4.js", "antlr4": "node ./scripts/antlr4.js",
"build": "rm -rf dist && tsc", "build": "rm -rf dist && tsc",
"check-types": "tsc -p ./tsconfig.check.json", "check-types": "tsc -p ./tsconfig.json && tsc -p ./test/tsconfig.json",
"test": "NODE_OPTIONS=--max_old_space_size=4096 && jest", "test": "NODE_OPTIONS=--max_old_space_size=4096 && jest",
"release": "node ./scripts/release.js", "release": "node ./scripts/release.js",
"lint": "prettier --check '**/*.ts' --config ./.prettierrc", "lint": "prettier --check '**/*.ts' --config ./.prettierrc",

View File

@ -1,4 +1,7 @@
import { CommonTokenStream, ErrorListener, FlinkSQL, FlinkSqlLexer } from '../filters'; import FlinkSQL from 'src/parser/flinksql';
import { FlinkSqlLexer } from 'src/lib/flinksql/FlinkSqlLexer';
import { ErrorListener } from 'src/parser/common/parseErrorListener';
import { CommonTokenStream } from 'antlr4ts';
describe('BasicParser unit tests', () => { describe('BasicParser unit tests', () => {
const flinkParser = new FlinkSQL(); const flinkParser = new FlinkSQL();

View File

@ -1,63 +0,0 @@
/**
* All unit tests should import parser about from this file.
* In this way, the exports of dt-sql-parser in the entry file is guaranteed to be complete.
*
* parser
* dt-sql-parser
*
* See this issue https://github.com/DTStack/dt-sql-parser/issues/236.
*/
export * from '../../src';
/**
* Something required by unit test but dt-sql-parser'entry not.
* If you need to add an export to this file,
* consider whether it should be exported in src/index as well.
*
* dt-sql-parser的入口不需要的导出
* dt-sql-parser
*/
export { CommonTokenStream } from 'antlr4ts';
export { ParseTreeWalker, ParseTreeListener } from 'antlr4ts/tree';
export { FlinkSqlLexer } from '../../src/lib/flinksql/FlinkSqlLexer';
export { FlinkSqlParser } from '../../src/lib/flinksql/FlinkSqlParser';
export * as FlinkSqlParserRuleContext from '../../src/lib/flinksql/FlinkSqlParser';
export { MySqlLexer } from '../../src/lib/mysql/MySqlLexer';
export { MySqlParser } from '../../src/lib/mysql/MySqlParser';
export * as MySqlParserRuleContext from '../../src/lib/mysql/MySqlParser';
export { HiveSqlLexer } from '../../src/lib/hive/HiveSqlLexer';
export { HiveSqlParser } from '../../src/lib/hive/HiveSqlParser';
export * as HiveSqlParserRuleContext from '../../src/lib/hive/HiveSqlParser';
export { PlSqlLexer } from '../../src/lib/plsql/PlSqlLexer';
export { PlSqlParser } from '../../src/lib/plsql/PlSqlParser';
export * as PlSqlParserRuleContext from '../../src/lib/plsql/PlSqlParser';
export { SparkSqlLexer } from '../../src/lib/spark/SparkSqlLexer';
export { SparkSqlParser } from '../../src/lib/spark/SparkSqlParser';
export * as SparkSQLParserRuleContext from '../../src/lib/spark/SparkSqlParser';
export { PostgreSQLLexer } from '../../src/lib/pgsql/PostgreSQLLexer';
export { PostgreSQLParser } from '../../src/lib/pgsql/PostgreSQLParser';
export * as PostgreSQLParserRuleContext from '../../src/lib/pgsql/PostgreSQLParser';
export { TrinoSqlLexer } from '../../src/lib/trinosql/TrinoSqlLexer';
export { TrinoSqlParser } from '../../src/lib/trinosql/TrinoSqlParser';
export * as TrinoSqlParserRuleContext from '../../src/lib/trinosql/TrinoSqlParser';
export { ImpalaSqlLexer } from '../../src/lib/impala/ImpalaSqlLexer';
export { ImpalaSqlParser } from '../../src/lib/impala/ImpalaSqlParser';
export * as ImpalaSqlParserRuleContext from '../../src/lib/impala/ImpalaSqlParser';
export { FlinkSqlSplitListener } from '../../src/parser/flinksql';
export { MysqlSplitListener } from '../../src/parser/mysql';
export { HiveSqlSplitListener } from '../../src/parser/hive';
export { SparkSqlSplitListener } from '../../src/parser/spark';
export { PgSqlSplitListener } from '../../src/parser/pgsql';
export { TrinoSqlSplitListener } from '../../src/parser/trinosql';
export { ImpalaSqlSplitListener } from '../../src/parser/impala';

View File

@ -1,12 +1,12 @@
# FlinkSQL Benchmark # FlinkSQL Benchmark
| Name | Rows | Times | Total Time(ms) | Average Time(ms) | | Name | Rows | Times | Total Time(ms) | Average Time(ms) |
| ---- | ---- | ---- | ---- | ---- | | ---- | ---- | ---- | ---- | ---- |
| CreateTable | 100 | 1 | 3858.63 | 3858.63 | | CreateTable | 100 | 1 | 256.26 | 256.26 |
| CreateTable | 1000 | 1 | 4845.70 | 4845.70 | | CreateTable | 1000 | 1 | 343.14 | 343.14 |
| CreateTable | 5000 | 1 | 14629.31 | 14629.31 | | CreateTable | 5000 | 1 | 1816.20 | 1816.20 |
| SelectTable | 100 | 1 | 3796.21 | 3796.21 | | SelectTable | 100 | 1 | 460.68 | 460.68 |
| SelectTable | 1000 | 1 | 7207.75 | 7207.75 | | SelectTable | 1000 | 1 | 1124.45 | 1124.45 |
| SelectTable | 5000 | 1 | 26171.22 | 26171.22 | | SelectTable | 5000 | 1 | 5212.31 | 5212.31 |
| InsertTable | 100 | 1 | 57.62 | 57.62 | | InsertTable | 100 | 1 | 9.10 | 9.10 |
| InsertTable | 1000 | 1 | 111.11 | 111.11 | | InsertTable | 1000 | 1 | 41.80 | 41.80 |
| InsertTable | 5000 | 1 | 2056.96 | 2056.96 | | InsertTable | 5000 | 1 | 318.83 | 318.83 |

View File

@ -1,11 +1,11 @@
import { FlinkSQL } from '../../../filters'; import FlinkSQL from 'src/parser/flinksql';
import { import {
readSQL, readSQL,
benchmark, benchmark,
getReportTableHeader, getReportTableHeader,
getReportTableRow, getReportTableRow,
exportReportTable, exportReportTable,
} from '../../../helper'; } from 'test/helper';
const features = { const features = {
selectTable: readSQL(__dirname, 'selectTable.sql'), selectTable: readSQL(__dirname, 'selectTable.sql'),

View File

@ -1,4 +1,5 @@
import { FlinkSQL, FlinkSqlSplitListener, FlinkSqlParserListener } from '../../filters'; import FlinkSQL, { FlinkSqlSplitListener } from 'src/parser/flinksql';
import { FlinkSqlParserListener } from 'src/lib/flinksql/FlinkSqlParserListener';
const validSQL1 = `INSERT INTO country_page_view const validSQL1 = `INSERT INTO country_page_view
VALUES ('Chinese', 'mumiao', 18), VALUES ('Chinese', 'mumiao', 18),

View File

@ -1,4 +1,4 @@
import { FlinkSQL } from '../../filters'; import FlinkSQL from 'src/parser/flinksql';
describe('FlinkSQL Lexer tests', () => { describe('FlinkSQL Lexer tests', () => {
const parser = new FlinkSQL(); const parser = new FlinkSQL();

View File

@ -1,9 +1,7 @@
import { import { ParseTreeListener } from 'antlr4ts/tree/ParseTreeListener';
FlinkSQL, import FlinkSQL from 'src/parser/flinksql';
FlinkSqlParserListener, import { FlinkSqlParserListener } from 'src/lib/flinksql/FlinkSqlParserListener';
FlinkSqlParserRuleContext, import { TableExpressionContext } from 'src/lib/flinksql/FlinkSqlParser';
ParseTreeListener,
} from '../../filters';
describe('Flink SQL Listener Tests', () => { describe('Flink SQL Listener Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';
@ -15,9 +13,7 @@ describe('Flink SQL Listener Tests', () => {
test('Listener enterTableName', async () => { test('Listener enterTableName', async () => {
let result = ''; let result = '';
class MyListener implements FlinkSqlParserListener { class MyListener implements FlinkSqlParserListener {
enterTableExpression = ( enterTableExpression = (ctx: TableExpressionContext): void => {
ctx: FlinkSqlParserRuleContext.TableExpressionContext
): void => {
result = ctx.text.toLowerCase(); result = ctx.text.toLowerCase();
}; };
} }

View File

@ -1,6 +1,7 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { CaretPosition, SyntaxContextType, FlinkSQL } from '../../../filters'; import FlinkSQL from 'src/parser/flinksql';
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
const syntaxSql = fs.readFileSync( const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'multipleStatement.sql'), path.join(__dirname, 'fixtures', 'multipleStatement.sql'),

View File

@ -1,7 +1,8 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { CaretPosition, SyntaxContextType, FlinkSQL } from '../../../filters'; import FlinkSQL from 'src/parser/flinksql';
import { commentOtherLine } from '../../../helper'; import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
import { commentOtherLine } from 'test/helper';
const syntaxSql = fs.readFileSync( const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'), path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),

View File

@ -1,7 +1,8 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { FlinkSQL, CaretPosition } from '../../../filters'; import FlinkSQL from 'src/parser/flinksql';
import { commentOtherLine } from '../../../helper'; import { CaretPosition } from 'src/parser/common/basic-parser-types';
import { commentOtherLine } from 'test/helper';
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8'); const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');

View File

@ -1,5 +1,5 @@
import { FlinkSQL } from '../../../filters'; import FlinkSQL from 'src/parser/flinksql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const features = { const features = {
table: readSQL(__dirname, 'alterTable.sql'), table: readSQL(__dirname, 'alterTable.sql'),

View File

@ -1,5 +1,5 @@
import { FlinkSQL } from '../../../filters'; import FlinkSQL from 'src/parser/flinksql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
// 综合测试的 sql 不做切割 // 综合测试的 sql 不做切割
const features = { const features = {

View File

@ -1,6 +1,6 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { FlinkSQL } from '../../../filters'; import FlinkSQL from 'src/parser/flinksql';
// 注释 sql 不做切割 // 注释 sql 不做切割
const features = { const features = {

View File

@ -1,6 +1,6 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { FlinkSQL } from '../../../filters'; import FlinkSQL from 'src/parser/flinksql';
// 综合测试的 sql 不做切割 // 综合测试的 sql 不做切割
const features = { const features = {

View File

@ -1,5 +1,5 @@
import { FlinkSQL } from '../../../filters'; import FlinkSQL from 'src/parser/flinksql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new FlinkSQL(); const parser = new FlinkSQL();

View File

@ -1,5 +1,5 @@
import { FlinkSQL } from '../../../filters'; import FlinkSQL from 'src/parser/flinksql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const features = { const features = {
describes: readSQL(__dirname, 'describe.sql'), describes: readSQL(__dirname, 'describe.sql'),

View File

@ -1,5 +1,5 @@
import { FlinkSQL } from '../../../filters'; import FlinkSQL from 'src/parser/flinksql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const features = { const features = {
table: readSQL(__dirname, 'dropTable.sql'), table: readSQL(__dirname, 'dropTable.sql'),

View File

@ -1,5 +1,5 @@
import { FlinkSQL } from '../../../filters'; import FlinkSQL from 'src/parser/flinksql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const features = { const features = {
dtAddFiles: readSQL(__dirname, 'dtAddFile.sql'), dtAddFiles: readSQL(__dirname, 'dtAddFile.sql'),

View File

@ -1,5 +1,5 @@
import { FlinkSQL } from '../../../filters'; import FlinkSQL from 'src/parser/flinksql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const features = { const features = {
explains: readSQL(__dirname, 'explain.sql'), explains: readSQL(__dirname, 'explain.sql'),

View File

@ -1,5 +1,5 @@
import { FlinkSQL } from '../../../filters'; import FlinkSQL from 'src/parser/flinksql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const features = { const features = {
expression: readSQL(__dirname, 'expression.sql'), expression: readSQL(__dirname, 'expression.sql'),

View File

@ -1,5 +1,5 @@
import { FlinkSQL } from '../../../filters'; import FlinkSQL from 'src/parser/flinksql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new FlinkSQL(); const parser = new FlinkSQL();

View File

@ -1,5 +1,5 @@
import { FlinkSQL } from '../../../filters'; import FlinkSQL from 'src/parser/flinksql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new FlinkSQL(); const parser = new FlinkSQL();

View File

@ -1,5 +1,5 @@
import { FlinkSQL } from '../../../filters'; import FlinkSQL from 'src/parser/flinksql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const features = { const features = {
shows: readSQL(__dirname, 'show.sql'), shows: readSQL(__dirname, 'show.sql'),

View File

@ -1,5 +1,5 @@
import { FlinkSQL } from '../../../filters'; import FlinkSQL from 'src/parser/flinksql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const features = { const features = {
uses: readSQL(__dirname, 'use.sql'), uses: readSQL(__dirname, 'use.sql'),

View File

@ -1,4 +1,4 @@
import { FlinkSQL } from '../../filters'; import FlinkSQL from 'src/parser/flinksql';
const randomText = `dhsdansdnkla ndjnsla ndnalks`; const randomText = `dhsdansdnkla ndjnsla ndnalks`;
const unCompleteSQL = `CREATE TABLE`; const unCompleteSQL = `CREATE TABLE`;

View File

@ -1,4 +1,6 @@
import { FlinkSQL, AbstractParseTreeVisitor, FlinkSqlParserVisitor } from '../../filters'; import FlinkSQL from 'src/parser/flinksql';
import { FlinkSqlParserVisitor } from 'src/lib/flinksql/FlinkSqlParserVisitor';
import { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor';
describe('Flink SQL Visitor Tests', () => { describe('Flink SQL Visitor Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';

View File

@ -1,4 +1,5 @@
import { HiveSQL, HiveSqlSplitListener, HiveSqlParserListener } from '../../filters'; import HiveSQL, { HiveSqlSplitListener } from 'src/parser/hive';
import { HiveSqlParserListener } from 'src/lib';
const validSQL1 = `INSERT INTO country_page_view const validSQL1 = `INSERT INTO country_page_view
VALUES ('Chinese', 'mumiao', 18), VALUES ('Chinese', 'mumiao', 18),

View File

@ -1,4 +1,4 @@
import { HiveSQL } from '../../filters'; import HiveSQL from 'src/parser/hive';
describe('HiveSQL Lexer tests', () => { describe('HiveSQL Lexer tests', () => {
const parser = new HiveSQL(); const parser = new HiveSQL();

View File

@ -1,5 +1,7 @@
import { ParseTreeListener } from 'antlr4ts/tree'; import { ParseTreeListener } from 'antlr4ts/tree/ParseTreeListener';
import { HiveSQL, HiveSqlParserListener, HiveSqlParserRuleContext } from '../../filters'; import HiveSQL from 'src/parser/hive';
import { ProgramContext } from 'src/lib/hive/HiveSqlParser';
import { HiveSqlParserListener } from 'src/lib/hive/HiveSqlParserListener';
describe('HiveSQL Listener Tests', () => { describe('HiveSQL Listener Tests', () => {
const parser = new HiveSQL(); const parser = new HiveSQL();
@ -16,10 +18,7 @@ describe('HiveSQL Listener Tests', () => {
} }
const listenTableName = new MyListener(); const listenTableName = new MyListener();
await parser.listen( await parser.listen(listenTableName as ParseTreeListener, parseTree as ProgramContext);
listenTableName as ParseTreeListener,
parseTree as HiveSqlParserRuleContext.ProgramContext
);
expect(result).toBe(expectTableName.toUpperCase()); expect(result).toBe(expectTableName.toUpperCase());
}); });
test('Listener enterCreateTable', async () => { test('Listener enterCreateTable', async () => {
@ -33,10 +32,7 @@ describe('HiveSQL Listener Tests', () => {
} }
const listenTableName = new MyListener(); const listenTableName = new MyListener();
await parser.listen( await parser.listen(listenTableName as ParseTreeListener, parseTree as ProgramContext);
listenTableName as ParseTreeListener,
parseTree as HiveSqlParserRuleContext.ProgramContext
);
expect(result).toBe('DROPTABLETABLE_NAME'); expect(result).toBe('DROPTABLETABLE_NAME');
}); });

View File

@ -1,6 +1,7 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { CaretPosition, SyntaxContextType, HiveSQL } from '../../../filters'; import HiveSQL from 'src/parser/hive';
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
const syntaxSql = fs.readFileSync( const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'multipleStatement.sql'), path.join(__dirname, 'fixtures', 'multipleStatement.sql'),

View File

@ -1,7 +1,8 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { CaretPosition, SyntaxContextType, HiveSQL } from '../../../filters'; import HiveSQL from 'src/parser/hive';
import { commentOtherLine } from '../../../helper'; import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
import { commentOtherLine } from 'test/helper';
const syntaxSql = fs.readFileSync( const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'), path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),

View File

@ -1,7 +1,8 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { CaretPosition, HiveSQL } from '../../../filters'; import { commentOtherLine } from 'test/helper';
import { commentOtherLine } from '../../../helper'; import HiveSQL from 'src/parser/hive';
import { CaretPosition } from 'src/parser/common/basic-parser-types';
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8'); const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');

View File

@ -1,5 +1,5 @@
import { HiveSQL } from '../../../filters'; import HiveSQL from 'src/parser/hive';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new HiveSQL(); const parser = new HiveSQL();

View File

@ -1,5 +1,5 @@
import { HiveSQL } from '../../../filters'; import HiveSQL from 'src/parser/hive';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new HiveSQL(); const parser = new HiveSQL();

View File

@ -1,5 +1,5 @@
import { HiveSQL } from '../../../filters'; import HiveSQL from 'src/parser/hive';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new HiveSQL(); const parser = new HiveSQL();

View File

@ -1,5 +1,5 @@
import { HiveSQL } from '../../../filters'; import HiveSQL from 'src/parser/hive';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new HiveSQL(); const parser = new HiveSQL();

View File

@ -1,5 +1,5 @@
import { HiveSQL } from '../../../filters'; import HiveSQL from 'src/parser/hive';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new HiveSQL(); const parser = new HiveSQL();

View File

@ -1,5 +1,5 @@
import { HiveSQL } from '../../../filters'; import HiveSQL from 'src/parser/hive';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new HiveSQL(); const parser = new HiveSQL();

View File

@ -1,5 +1,5 @@
import { HiveSQL } from '../../../filters'; import HiveSQL from 'src/parser/hive';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new HiveSQL(); const parser = new HiveSQL();

View File

@ -1,5 +1,5 @@
import { HiveSQL } from '../../../filters'; import HiveSQL from 'src/parser/hive';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new HiveSQL(); const parser = new HiveSQL();

View File

@ -1,5 +1,5 @@
import { HiveSQL } from '../../../filters'; import HiveSQL from 'src/parser/hive';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const features = { const features = {
exports: readSQL(__dirname, 'export.sql'), exports: readSQL(__dirname, 'export.sql'),

View File

@ -1,5 +1,5 @@
import { HiveSQL } from '../../../filters'; import HiveSQL from 'src/parser/hive';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const features = { const features = {
imports: readSQL(__dirname, 'import.sql'), imports: readSQL(__dirname, 'import.sql'),

View File

@ -1,5 +1,5 @@
import { HiveSQL } from '../../../filters'; import HiveSQL from 'src/parser/hive';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new HiveSQL(); const parser = new HiveSQL();

View File

@ -1,5 +1,5 @@
import { HiveSQL } from '../../../filters'; import HiveSQL from 'src/parser/hive';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new HiveSQL(); const parser = new HiveSQL();

View File

@ -1,5 +1,5 @@
import { HiveSQL } from '../../../filters'; import HiveSQL from 'src/parser/hive';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new HiveSQL(); const parser = new HiveSQL();

View File

@ -1,5 +1,5 @@
import { HiveSQL } from '../../../filters'; import HiveSQL from 'src/parser/hive';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new HiveSQL(); const parser = new HiveSQL();

View File

@ -1,5 +1,5 @@
import { HiveSQL } from '../../../filters'; import HiveSQL from 'src/parser/hive';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new HiveSQL(); const parser = new HiveSQL();

View File

@ -1,5 +1,5 @@
import { HiveSQL } from '../../../filters'; import HiveSQL from 'src/parser/hive';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new HiveSQL(); const parser = new HiveSQL();

View File

@ -1,4 +1,4 @@
import { HiveSQL } from '../../filters'; import HiveSQL from 'src/parser/hive';
const randomText = `dhsdansdnkla ndjnsla ndnalks`; const randomText = `dhsdansdnkla ndjnsla ndnalks`;
const unCompleteSQL = `CREATE TABLE`; const unCompleteSQL = `CREATE TABLE`;

View File

@ -1,9 +1,8 @@
import { import { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor';
HiveSQL,
HiveSqlParserVisitor, import HiveSQL from 'src/parser/hive';
AbstractParseTreeVisitor, import { HiveSqlParserVisitor } from 'src/lib/hive/HiveSqlParserVisitor';
HiveSqlParserRuleContext, import { ProgramContext } from 'src/lib/hive/HiveSqlParser';
} from '../../filters';
describe('HiveSQL Visitor Tests', () => { describe('HiveSQL Visitor Tests', () => {
const expectTableName = 'dm_gis.dlv_addr_tc_count'; const expectTableName = 'dm_gis.dlv_addr_tc_count';
@ -27,7 +26,7 @@ describe('HiveSQL Visitor Tests', () => {
} }
const visitor = new MyVisitor(); const visitor = new MyVisitor();
visitor.visit(parseTree as HiveSqlParserRuleContext.ProgramContext); visitor.visit(parseTree as ProgramContext);
expect(result).toBe(expectTableName); expect(result).toBe(expectTableName);
}); });

View File

@ -1,4 +1,5 @@
import { ImpalaSQL, ImpalaSqlSplitListener, ImpalaSqlParserListener } from '../../filters'; import ImpalaSQL, { ImpalaSqlSplitListener } from 'src/parser/impala';
import { ImpalaSqlParserListener } from 'src/lib/impala/ImpalaSqlParserListener';
const validSQL1 = `INSERT INTO country_page_view const validSQL1 = `INSERT INTO country_page_view
VALUES ('Chinese', 'mumiao', 18), VALUES ('Chinese', 'mumiao', 18),

View File

@ -1,4 +1,4 @@
import { ImpalaSQL } from '../../filters'; import ImpalaSQL from 'src/parser/impala';
describe('ImpalaSQL Lexer tests', () => { describe('ImpalaSQL Lexer tests', () => {
const parser = new ImpalaSQL(); const parser = new ImpalaSQL();

View File

@ -1,4 +1,6 @@
import { ImpalaSQL, ImpalaSqlParserListener, ParseTreeListener } from '../../filters'; import ImpalaSQL from 'src/parser/impala';
import { ImpalaSqlParserListener } from 'src/lib/impala/ImpalaSqlParserListener';
import { ParseTreeListener } from 'antlr4ts/tree/ParseTreeListener';
describe('impala SQL Listener Tests', () => { describe('impala SQL Listener Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';

View File

@ -1,6 +1,7 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { ImpalaSQL, CaretPosition, SyntaxContextType } from '../../../filters'; import ImpalaSQL from 'src/parser/impala';
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
const syntaxSql = fs.readFileSync( const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'multipleStatement.sql'), path.join(__dirname, 'fixtures', 'multipleStatement.sql'),

View File

@ -1,7 +1,8 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { ImpalaSQL, CaretPosition, SyntaxContextType } from '../../../filters'; import ImpalaSQL from 'src/parser/impala';
import { commentOtherLine } from '../../../helper'; import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
import { commentOtherLine } from 'test/helper';
const syntaxSql = fs.readFileSync( const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'), path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),

View File

@ -1,7 +1,8 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { ImpalaSQL, CaretPosition } from '../../../filters'; import ImpalaSQL from 'src/parser/impala';
import { commentOtherLine } from '../../../helper'; import { CaretPosition } from 'src/parser/common/basic-parser-types';
import { commentOtherLine } from 'test/helper';
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8'); const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');

View File

@ -1,5 +1,5 @@
import { ImpalaSQL } from '../../../filters'; import ImpalaSQL from 'src/parser/impala';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new ImpalaSQL(); const parser = new ImpalaSQL();

View File

@ -1,5 +1,5 @@
import { ImpalaSQL } from '../../../filters'; import ImpalaSQL from 'src/parser/impala';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new ImpalaSQL(); const parser = new ImpalaSQL();

View File

@ -1,5 +1,5 @@
import { ImpalaSQL } from '../../../filters'; import ImpalaSQL from 'src/parser/impala';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new ImpalaSQL(); const parser = new ImpalaSQL();

View File

@ -1,5 +1,5 @@
import { ImpalaSQL } from '../../../filters'; import ImpalaSQL from 'src/parser/impala';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new ImpalaSQL(); const parser = new ImpalaSQL();

View File

@ -1,5 +1,5 @@
import { ImpalaSQL } from '../../../filters'; import ImpalaSQL from 'src/parser/impala';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new ImpalaSQL(); const parser = new ImpalaSQL();

View File

@ -1,5 +1,5 @@
import { ImpalaSQL } from '../../../filters'; import ImpalaSQL from 'src/parser/impala';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new ImpalaSQL(); const parser = new ImpalaSQL();

View File

@ -1,5 +1,5 @@
import { ImpalaSQL } from '../../../filters'; import ImpalaSQL from 'src/parser/impala';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new ImpalaSQL(); const parser = new ImpalaSQL();

View File

@ -1,5 +1,5 @@
import { ImpalaSQL } from '../../../filters'; import ImpalaSQL from 'src/parser/impala';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new ImpalaSQL(); const parser = new ImpalaSQL();

View File

@ -1,5 +1,5 @@
import { ImpalaSQL } from '../../../filters'; import ImpalaSQL from 'src/parser/impala';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new ImpalaSQL(); const parser = new ImpalaSQL();

View File

@ -1,5 +1,5 @@
import { ImpalaSQL } from '../../../filters'; import ImpalaSQL from 'src/parser/impala';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new ImpalaSQL(); const parser = new ImpalaSQL();

View File

@ -1,5 +1,5 @@
import { ImpalaSQL } from '../../../filters'; import ImpalaSQL from 'src/parser/impala';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new ImpalaSQL(); const parser = new ImpalaSQL();

View File

@ -1,4 +1,4 @@
import { ImpalaSQL } from '../../filters'; import ImpalaSQL from 'src/parser/impala';
const randomText = `dhsdansdnkla ndjnsla ndnalks`; const randomText = `dhsdansdnkla ndjnsla ndnalks`;
const unCompleteSQL = `CREATE TABLE`; const unCompleteSQL = `CREATE TABLE`;

View File

@ -1,4 +1,6 @@
import { ImpalaSQL, ImpalaSqlParserVisitor, AbstractParseTreeVisitor } from '../../filters'; import ImpalaSQL from 'src/parser/impala';
import { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor';
import { ImpalaSqlParserVisitor } from 'src/lib/impala/ImpalaSqlParserVisitor';
describe('impala SQL Visitor Tests', () => { describe('impala SQL Visitor Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';

View File

@ -1,5 +1,5 @@
import { MySQL } from '../../filters'; import MySQL, { MysqlSplitListener } from 'src/parser/mysql';
import { MysqlSplitListener, MySqlParserListener } from '../../filters'; import { MySqlParserListener } from 'src/lib/mysql/MySqlParserListener';
const validSQL1 = `INSERT INTO country_page_view const validSQL1 = `INSERT INTO country_page_view
VALUES ('Chinese', 'mumiao', 18), VALUES ('Chinese', 'mumiao', 18),

View File

@ -1,4 +1,4 @@
import { MySQL } from '../../filters'; import MySQL from 'src/parser/mysql';
describe('MySQL Lexer tests', () => { describe('MySQL Lexer tests', () => {
const parser = new MySQL(); const parser = new MySQL();

View File

@ -1,5 +1,6 @@
import { MySQL } from '../../filters'; import MySQL from 'src/parser/mysql';
import { MySqlParserListener, ParseTreeListener } from '../../filters'; import { MySqlParserListener } from 'src/lib/mysql/MySqlParserListener';
import { ParseTreeListener } from 'antlr4ts/tree/ParseTreeListener';
describe('MySQL Listener Tests', () => { describe('MySQL Listener Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';

View File

@ -1,6 +1,7 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { MySQL, CaretPosition, SyntaxContextType } from '../../../filters'; import MySQL from 'src/parser/mysql';
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
const syntaxSql = fs.readFileSync( const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'multipleStatement.sql'), path.join(__dirname, 'fixtures', 'multipleStatement.sql'),

View File

@ -1,7 +1,8 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { MySQL, CaretPosition, SyntaxContextType } from '../../../filters'; import MySQL from 'src/parser/mysql';
import { commentOtherLine } from '../../../helper'; import { SyntaxContextType, CaretPosition } from 'src/parser/common/basic-parser-types';
import { commentOtherLine } from 'test/helper';
const syntaxSql = fs.readFileSync( const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'), path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),

View File

@ -1,7 +1,8 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { MySQL, CaretPosition } from '../../../filters'; import MySQL from 'src/parser/mysql';
import { commentOtherLine } from '../../../helper'; import { CaretPosition } from 'src/parser/common/basic-parser-types';
import { commentOtherLine } from 'test/helper';
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8'); const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');

View File

@ -1,4 +1,4 @@
import { MySQL } from '../../filters'; import MySQL from 'src/parser/mysql';
describe('MySQL Syntax Tests', () => { describe('MySQL Syntax Tests', () => {
const parser = new MySQL(); const parser = new MySQL();

View File

@ -1,5 +1,5 @@
import { MySQL } from '../../../filters'; import MySQL from 'src/parser/mysql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new MySQL(); const parser = new MySQL();

View File

@ -1,5 +1,5 @@
import { MySQL } from '../../../filters'; import MySQL from 'src/parser/mysql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new MySQL(); const parser = new MySQL();

View File

@ -1,5 +1,5 @@
import { MySQL } from '../../../filters'; import MySQL from 'src/parser/mysql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new MySQL(); const parser = new MySQL();

View File

@ -1,5 +1,5 @@
import { MySQL } from '../../../filters'; import MySQL from 'src/parser/mysql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new MySQL(); const parser = new MySQL();

View File

@ -1,4 +1,4 @@
import { MySQL } from '../../filters'; import MySQL from 'src/parser/mysql';
const randomText = `dhsdansdnkla ndjnsla ndnalks`; const randomText = `dhsdansdnkla ndjnsla ndnalks`;
const unCompleteSQL = `CREATE TABLE`; const unCompleteSQL = `CREATE TABLE`;

View File

@ -1,4 +1,6 @@
import { MySQL, MySqlParserVisitor, AbstractParseTreeVisitor } from '../../filters'; import MySQL from 'src/parser/mysql';
import { MySqlParserVisitor } from 'src/lib/mysql/MySqlParserVisitor';
import { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor';
describe('MySQL Visitor Tests', () => { describe('MySQL Visitor Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';

View File

@ -1,4 +1,5 @@
import { PostgresSQL, PgSqlSplitListener, PostgreSQLParserListener } from '../../filters'; import PostgresSQL, { PgSqlSplitListener } from 'src/parser/pgsql';
import { PostgreSQLParserListener } from 'src/lib/pgsql/PostgreSQLParserListener';
const validSQL1 = `INSERT INTO country_page_view const validSQL1 = `INSERT INTO country_page_view
VALUES ('Chinese', 'mumiao', 18), VALUES ('Chinese', 'mumiao', 18),

View File

@ -1,4 +1,4 @@
import { PostgresSQL } from '../../filters'; import PostgresSQL from 'src/parser/pgsql';
describe('PostgresSQL Lexer tests', () => { describe('PostgresSQL Lexer tests', () => {
const mysqlParser = new PostgresSQL(); const mysqlParser = new PostgresSQL();

View File

@ -1,4 +1,6 @@
import { PostgresSQL, PostgreSQLParserListener, ParseTreeListener } from '../../filters'; import PostgresSQL from 'src/parser/pgsql';
import { PostgreSQLParserListener } from 'src/lib/pgsql/PostgreSQLParserListener';
import { ParseTreeListener } from 'antlr4ts/tree/ParseTreeListener';
describe('PostgresSQL Listener Tests', () => { describe('PostgresSQL Listener Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';

View File

@ -1,6 +1,7 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { CaretPosition, SyntaxContextType, PostgresSQL } from '../../../filters'; import PostgresSQL from 'src/parser/pgsql';
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
const syntaxSql = fs.readFileSync( const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'multipleStatement.sql'), path.join(__dirname, 'fixtures', 'multipleStatement.sql'),

View File

@ -1,7 +1,8 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { PostgresSQL, CaretPosition, SyntaxContextType } from '../../../filters'; import PostgresSQL from 'src/parser/pgsql';
import { commentOtherLine } from '../../../helper'; import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
import { commentOtherLine } from 'test/helper';
const syntaxSql = fs.readFileSync( const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'), path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),

View File

@ -1,7 +1,8 @@
import fs from 'fs'; import fs from 'fs';
import path from 'path'; import path from 'path';
import { PostgresSQL, CaretPosition } from '../../../filters'; import PostgresSQL from 'src/parser/pgsql';
import { commentOtherLine } from '../../../helper'; import { CaretPosition } from 'src/parser/common/basic-parser-types';
import { commentOtherLine } from 'test/helper';
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8'); const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');

View File

@ -1,5 +1,5 @@
import { PostgresSQL } from '../../../filters'; import PostgresSQL from 'src/parser/pgsql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new PostgresSQL(); const parser = new PostgresSQL();

View File

@ -1,5 +1,5 @@
import { PostgresSQL } from '../../../filters'; import PostgresSQL from 'src/parser/pgsql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new PostgresSQL(); const parser = new PostgresSQL();

View File

@ -1,5 +1,5 @@
import { PostgresSQL } from '../../../filters'; import PostgresSQL from 'src/parser/pgsql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new PostgresSQL(); const parser = new PostgresSQL();

View File

@ -1,5 +1,5 @@
import { PostgresSQL } from '../../../filters'; import PostgresSQL from 'src/parser/pgsql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new PostgresSQL(); const parser = new PostgresSQL();

View File

@ -1,5 +1,5 @@
import { PostgresSQL } from '../../../filters'; import PostgresSQL from 'src/parser/pgsql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new PostgresSQL(); const parser = new PostgresSQL();

View File

@ -1,5 +1,5 @@
import { PostgresSQL } from '../../../filters'; import PostgresSQL from 'src/parser/pgsql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new PostgresSQL(); const parser = new PostgresSQL();

View File

@ -1,5 +1,5 @@
import { PostgresSQL } from '../../../filters'; import PostgresSQL from 'src/parser/pgsql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new PostgresSQL(); const parser = new PostgresSQL();

View File

@ -1,5 +1,5 @@
import { PostgresSQL } from '../../../filters'; import PostgresSQL from 'src/parser/pgsql';
import { readSQL } from '../../../helper'; import { readSQL } from 'test/helper';
const parser = new PostgresSQL(); const parser = new PostgresSQL();

View File

@ -1,4 +1,4 @@
import { PostgresSQL } from '../../filters'; import PostgresSQL from 'src/parser/pgsql';
const randomText = `dhsdansdnkla ndjnsla ndnalks`; const randomText = `dhsdansdnkla ndjnsla ndnalks`;
const unCompleteSQL = `CREATE TABLE`; const unCompleteSQL = `CREATE TABLE`;

View File

@ -1,4 +1,6 @@
import { PostgresSQL, AbstractParseTreeVisitor, PostgreSQLParserVisitor } from '../../filters'; import PostgresSQL from 'src/parser/pgsql';
import { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor';
import { PostgreSQLParserVisitor } from 'src/lib/pgsql/PostgreSQLParserVisitor';
describe('MySQL Visitor Tests', () => { describe('MySQL Visitor Tests', () => {
const expectTableName = 'user1'; const expectTableName = 'user1';

Some files were not shown because too many files have changed in this diff Show More