refactor: standard naming (#278)
* refactor: rename flinksql to flink * refactor: rename pgsql to postgresql * refactor: rename trinosql to trino * refactor: replace all default exports with named export * refactor: rename basicParser to basicSQL * refactor: rename basic-parser-types to types * refactor: replace arrow func with plain func
This commit is contained in:
@ -1,7 +1,7 @@
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new SparkSQL();
|
||||
const spark = new SparkSQL();
|
||||
|
||||
const features = {
|
||||
add: readSQL(__dirname, 'add.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('Spark Add Syntax Tests', () => {
|
||||
features.add.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new SparkSQL();
|
||||
const spark = new SparkSQL();
|
||||
|
||||
const features = {
|
||||
alterDatabase: readSQL(__dirname, 'alterDatabase.sql'),
|
||||
@ -14,7 +14,7 @@ describe('SparkSQL Alter Syntax Tests', () => {
|
||||
Object.keys(features).forEach((key) => {
|
||||
features[key].forEach((sql) => {
|
||||
it(sql, () => {
|
||||
expect(parser.validate(sql).length).toBe(0);
|
||||
expect(spark.validate(sql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new SparkSQL();
|
||||
const spark = new SparkSQL();
|
||||
|
||||
const features = {
|
||||
analyzeTable: readSQL(__dirname, 'analyzeTable.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('Spark Analyze Table Syntax Tests', () => {
|
||||
features.analyzeTable.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new SparkSQL();
|
||||
const spark = new SparkSQL();
|
||||
|
||||
const features = {
|
||||
cache: readSQL(__dirname, 'cache.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('Spark Cache Syntax Tests', () => {
|
||||
features.cache.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new SparkSQL();
|
||||
const spark = new SparkSQL();
|
||||
|
||||
const features = {
|
||||
createDatabase: readSQL(__dirname, 'createDatabase.sql'),
|
||||
@ -14,7 +14,7 @@ describe('SparkSQL Create Syntax Tests', () => {
|
||||
Object.keys(features).forEach((key) => {
|
||||
features[key].forEach((sql) => {
|
||||
it(sql, () => {
|
||||
expect(parser.validate(sql).length).toBe(0);
|
||||
expect(spark.validate(sql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new SparkSQL();
|
||||
const spark = new SparkSQL();
|
||||
|
||||
const features = {
|
||||
describe: readSQL(__dirname, 'describe.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('Spark Describe Syntax Tests', () => {
|
||||
features.describe.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new SparkSQL();
|
||||
const spark = new SparkSQL();
|
||||
|
||||
const features = {
|
||||
dropDatabase: readSQL(__dirname, 'dropDatabase.sql'),
|
||||
@ -15,7 +15,7 @@ describe('SparkSQL Drop Syntax Tests', () => {
|
||||
Object.keys(features).forEach((key) => {
|
||||
features[key].forEach((sql) => {
|
||||
it(sql, () => {
|
||||
expect(parser.validate(sql).length).toBe(0);
|
||||
expect(spark.validate(sql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new SparkSQL();
|
||||
const spark = new SparkSQL();
|
||||
|
||||
const features = {
|
||||
insertIntoTable: readSQL(__dirname, 'insertIntoTable.sql'),
|
||||
@ -14,7 +14,7 @@ describe('SparkSQL Insert Syntax Tests', () => {
|
||||
Object.keys(features).forEach((key) => {
|
||||
features[key].forEach((sql) => {
|
||||
it(sql, () => {
|
||||
expect(parser.validate(sql).length).toBe(0);
|
||||
expect(spark.validate(sql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new SparkSQL();
|
||||
const spark = new SparkSQL();
|
||||
|
||||
/**
|
||||
* 关键词有多个值
|
||||
@ -16,7 +16,7 @@ describe('SparkSQL Keyword Has Multiple Values Syntax Tests', () => {
|
||||
Object.keys(features).forEach((key) => {
|
||||
features[key].forEach((sql) => {
|
||||
it(sql, () => {
|
||||
expect(parser.validate(sql).length).toBe(0);
|
||||
expect(spark.validate(sql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new SparkSQL();
|
||||
const spark = new SparkSQL();
|
||||
|
||||
const features = {
|
||||
list: readSQL(__dirname, 'list.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('Spark List Syntax Tests', () => {
|
||||
features.list.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new SparkSQL();
|
||||
const spark = new SparkSQL();
|
||||
|
||||
const features = {
|
||||
loadData: readSQL(__dirname, 'loadData.sql'),
|
||||
@ -11,7 +11,7 @@ describe('SparkSQL Load Syntax Tests', () => {
|
||||
Object.keys(features).forEach((key) => {
|
||||
features[key].forEach((sql) => {
|
||||
it(sql, () => {
|
||||
expect(parser.validate(sql).length).toBe(0);
|
||||
expect(spark.validate(sql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new SparkSQL();
|
||||
const spark = new SparkSQL();
|
||||
|
||||
const features = {
|
||||
set: readSQL(__dirname, 'optimize.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('Spark Optimize Syntax Tests', () => {
|
||||
features.set.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new SparkSQL();
|
||||
const spark = new SparkSQL();
|
||||
|
||||
const features = {
|
||||
refresh: readSQL(__dirname, 'refresh.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('Spark Refresh Syntax Tests', () => {
|
||||
features.refresh.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new SparkSQL();
|
||||
const spark = new SparkSQL();
|
||||
|
||||
const features = {
|
||||
reset: readSQL(__dirname, 'reset.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('Spark Reset Syntax Tests', () => {
|
||||
features.reset.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new SparkSQL();
|
||||
const spark = new SparkSQL();
|
||||
|
||||
const features = {
|
||||
selectAggregateFn: readSQL(__dirname, 'selectAggregateFn.sql'),
|
||||
@ -34,132 +34,132 @@ const features = {
|
||||
describe('Spark Select Syntax Tests', () => {
|
||||
features.selectAggregateFn.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectCase.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectCET.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectWindowFn.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectWhere.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectUnPivot.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectTVF.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectTransform.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectTableSample.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectSortBy.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectPivot.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectOrderBy.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectOffset.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectLimit.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectLike.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectLateralView.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectLateralSubQuery.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectJoin.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectInlineTable.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectHiving.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectHint.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectGroupBy.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectFile.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectExplain.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectDistributeBy.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
features.selectClusterBy.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new SparkSQL();
|
||||
const spark = new SparkSQL();
|
||||
|
||||
const features = {
|
||||
set: readSQL(__dirname, 'set.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('Spark Set Syntax Tests', () => {
|
||||
features.set.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new SparkSQL();
|
||||
const spark = new SparkSQL();
|
||||
|
||||
const features = {
|
||||
show: readSQL(__dirname, 'show.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('Spark Show Syntax Tests', () => {
|
||||
features.show.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
expect(spark.validate(itemSql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new SparkSQL();
|
||||
const spark = new SparkSQL();
|
||||
|
||||
const features = {
|
||||
createDataSourceTable: readSQL(__dirname, 'createDataSourceTable.sql'),
|
||||
@ -16,7 +16,7 @@ describe('SparkSQL About Table Syntax Tests', () => {
|
||||
Object.keys(features).forEach((key) => {
|
||||
features[key].forEach((sql) => {
|
||||
it(sql, () => {
|
||||
expect(parser.validate(sql).length).toBe(0);
|
||||
expect(spark.validate(sql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSQL } from 'src/parser/spark';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new SparkSQL();
|
||||
const spark = new SparkSQL();
|
||||
|
||||
const features = {
|
||||
useDatabase: readSQL(__dirname, 'useDatabase.sql'),
|
||||
@ -11,7 +11,7 @@ describe('SparkSQL Use Database Syntax Tests', () => {
|
||||
Object.keys(features).forEach((key) => {
|
||||
features[key].forEach((sql) => {
|
||||
it(sql, () => {
|
||||
expect(parser.validate(sql).length).toBe(0);
|
||||
expect(spark.validate(sql).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
Reference in New Issue
Block a user