test: accurate description (#183)
* test: accurate description * refactor: optimize the use of tableName and viewName for --------- Co-authored-by: liuyi <liuyi@dtstack.com>
This commit is contained in:
@ -7,7 +7,7 @@ const features = {
|
||||
add: readSQL(__dirname, 'add.sql'),
|
||||
};
|
||||
|
||||
describe('Spark add Syntax Tests', () => {
|
||||
describe('Spark Add Syntax Tests', () => {
|
||||
features.add.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
|
@ -9,7 +9,7 @@ const features = {
|
||||
alertView: readSQL(__dirname, 'alertView.sql'),
|
||||
};
|
||||
|
||||
describe('SparkSQL Insert Syntax Tests', () => {
|
||||
describe('SparkSQL Alert Syntax Tests', () => {
|
||||
Object.keys(features).forEach((key) => {
|
||||
features[key].forEach((sql) => {
|
||||
it(sql, () => {
|
||||
|
@ -7,7 +7,7 @@ const features = {
|
||||
analyzeTable: readSQL(__dirname, 'analyzeTable.sql'),
|
||||
};
|
||||
|
||||
describe('Spark analyzeTable Syntax Tests', () => {
|
||||
describe('Spark Analyze Table Syntax Tests', () => {
|
||||
features.analyzeTable.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
|
@ -7,7 +7,7 @@ const features = {
|
||||
cache: readSQL(__dirname, 'cache.sql'),
|
||||
};
|
||||
|
||||
describe('Spark cache Syntax Tests', () => {
|
||||
describe('Spark Cache Syntax Tests', () => {
|
||||
features.cache.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
|
@ -9,7 +9,7 @@ const features = {
|
||||
createView: readSQL(__dirname, 'createView.sql'),
|
||||
};
|
||||
|
||||
describe('SparkSQL Insert Syntax Tests', () => {
|
||||
describe('SparkSQL Create Syntax Tests', () => {
|
||||
Object.keys(features).forEach((key) => {
|
||||
features[key].forEach((sql) => {
|
||||
it(sql, () => {
|
||||
|
@ -7,7 +7,7 @@ const features = {
|
||||
describe: readSQL(__dirname, 'describe.sql'),
|
||||
};
|
||||
|
||||
describe('Spark describe Syntax Tests', () => {
|
||||
describe('Spark Describe Syntax Tests', () => {
|
||||
features.describe.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
|
@ -10,7 +10,7 @@ const features = {
|
||||
dropView: readSQL(__dirname, 'dropView.sql'),
|
||||
};
|
||||
|
||||
describe('SparkSQL Insert Syntax Tests', () => {
|
||||
describe('SparkSQL Drop Syntax Tests', () => {
|
||||
Object.keys(features).forEach((key) => {
|
||||
features[key].forEach((sql) => {
|
||||
it(sql, () => {
|
||||
|
@ -12,7 +12,7 @@ const features = {
|
||||
kwMultipleValues: readSQL(__dirname, 'kwMultipleValues.sql'),
|
||||
};
|
||||
|
||||
describe('SparkSQL Insert Syntax Tests', () => {
|
||||
describe('SparkSQL Keyword Has Multiple Values Syntax Tests', () => {
|
||||
Object.keys(features).forEach((key) => {
|
||||
features[key].forEach((sql) => {
|
||||
it(sql, () => {
|
||||
|
@ -7,7 +7,7 @@ const features = {
|
||||
list: readSQL(__dirname, 'list.sql'),
|
||||
};
|
||||
|
||||
describe('Spark list Syntax Tests', () => {
|
||||
describe('Spark List Syntax Tests', () => {
|
||||
features.list.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
|
@ -7,7 +7,7 @@ const features = {
|
||||
loadData: readSQL(__dirname, 'loadData.sql'),
|
||||
};
|
||||
|
||||
describe('SparkSQL Insert Syntax Tests', () => {
|
||||
describe('SparkSQL Load Syntax Tests', () => {
|
||||
Object.keys(features).forEach((key) => {
|
||||
features[key].forEach((sql) => {
|
||||
it(sql, () => {
|
||||
|
@ -7,7 +7,7 @@ const features = {
|
||||
refresh: readSQL(__dirname, 'refresh.sql'),
|
||||
};
|
||||
|
||||
describe('Spark refresh Syntax Tests', () => {
|
||||
describe('Spark Refresh Syntax Tests', () => {
|
||||
features.refresh.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
|
@ -7,7 +7,7 @@ const features = {
|
||||
reset: readSQL(__dirname, 'reset.sql'),
|
||||
};
|
||||
|
||||
describe('Spark reset Syntax Tests', () => {
|
||||
describe('Spark Reset Syntax Tests', () => {
|
||||
features.reset.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
|
@ -31,7 +31,7 @@ const features = {
|
||||
selectDistributeBy: readSQL(__dirname, 'selectDistributeBy.sql'),
|
||||
selectClusterBy: readSQL(__dirname, 'selectClusterBy.sql'),
|
||||
};
|
||||
describe('Spark select Syntax Tests', () => {
|
||||
describe('Spark Select Syntax Tests', () => {
|
||||
features.selectAggregateFn.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
|
@ -7,7 +7,7 @@ const features = {
|
||||
set: readSQL(__dirname, 'set.sql'),
|
||||
};
|
||||
|
||||
describe('Spark set Syntax Tests', () => {
|
||||
describe('Spark Set Syntax Tests', () => {
|
||||
features.set.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
|
@ -7,7 +7,7 @@ const features = {
|
||||
show: readSQL(__dirname, 'show.sql'),
|
||||
};
|
||||
|
||||
describe('Spark show Syntax Tests', () => {
|
||||
describe('Spark Show Syntax Tests', () => {
|
||||
features.show.forEach((itemSql) => {
|
||||
it(itemSql, () => {
|
||||
expect(parser.validate(itemSql).length).toBe(0);
|
||||
|
@ -12,7 +12,7 @@ const features = {
|
||||
truncateTable: readSQL(__dirname, 'truncateTable.sql'),
|
||||
};
|
||||
|
||||
describe('SparkSQL Insert Syntax Tests', () => {
|
||||
describe('SparkSQL About Table Syntax Tests', () => {
|
||||
Object.keys(features).forEach((key) => {
|
||||
features[key].forEach((sql) => {
|
||||
it(sql, () => {
|
||||
|
@ -7,7 +7,7 @@ const features = {
|
||||
useDatabase: readSQL(__dirname, 'useDatabase.sql'),
|
||||
};
|
||||
|
||||
describe('SparkSQL Insert Syntax Tests', () => {
|
||||
describe('SparkSQL Use Database Syntax Tests', () => {
|
||||
Object.keys(features).forEach((key) => {
|
||||
features[key].forEach((sql) => {
|
||||
it(sql, () => {
|
||||
|
Reference in New Issue
Block a user