test: accurate description (#183)

* test: accurate description

* refactor: optimize the use of tableName and viewName for

---------

Co-authored-by: liuyi <liuyi@dtstack.com>
This commit is contained in:
琉易 2023-10-18 10:53:43 +08:00 committed by GitHub
parent f9dbd9fc23
commit 0d9d3d65f3
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
22 changed files with 4801 additions and 4834 deletions

View File

@ -104,11 +104,11 @@ statement
| KW_ALTER KW_TABLE tableName
KW_DROP (KW_COLUMN | KW_COLUMNS) (ifExists)?
multipartIdentifierList
| KW_ALTER (KW_TABLE | KW_VIEW) (tableName | viewName)
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName)
KW_RENAME KW_TO multipartIdentifier
| KW_ALTER (KW_TABLE | KW_VIEW) (tableName | viewName)
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName)
KW_SET KW_TBLPROPERTIES propertyList
| KW_ALTER (KW_TABLE | KW_VIEW) (tableName | viewName)
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName)
KW_UNSET KW_TBLPROPERTIES (ifExists)? propertyList
| KW_ALTER KW_TABLE table=tableName
(KW_ALTER | KW_CHANGE) KW_COLUMN? column=multipartIdentifier
@ -124,11 +124,11 @@ statement
KW_SET KW_SERDE stringLit (KW_WITH KW_SERDEPROPERTIES propertyList)?
| KW_ALTER KW_TABLE tableName (partitionSpec)?
KW_SET KW_SERDEPROPERTIES propertyList
| KW_ALTER (KW_TABLE | KW_VIEW) (tableName | viewName) KW_ADD (ifNotExists)?
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName) KW_ADD (ifNotExists)?
partitionSpecLocation+
| KW_ALTER KW_TABLE tableName
partitionSpec KW_RENAME KW_TO partitionSpec
| KW_ALTER (KW_TABLE | KW_VIEW) (tableName | viewName)
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName)
KW_DROP (ifExists)? partitionSpec (COMMA partitionSpec)* KW_PURGE?
| KW_ALTER KW_TABLE tableName
(partitionSpec)? KW_SET locationSpec

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -5,7 +5,7 @@ import HiveSQL from '../../../../src/parser/hive';
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
describe('Hive SQL Syntax Suggestion', () => {
describe('Hive SQL Token Suggestion', () => {
const parser = new HiveSQL();
test('After ALTER', () => {

View File

@ -5,7 +5,7 @@ import SparkSQL from '../../../../src/parser/spark';
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
describe('Spark SQL Syntax Suggestion', () => {
describe('Spark SQL Token Suggestion', () => {
const parser = new SparkSQL();
test('After ALTER', () => {

View File

@ -7,7 +7,7 @@ const features = {
add: readSQL(__dirname, 'add.sql'),
};
describe('Spark add Syntax Tests', () => {
describe('Spark Add Syntax Tests', () => {
features.add.forEach((itemSql) => {
it(itemSql, () => {
expect(parser.validate(itemSql).length).toBe(0);

View File

@ -9,7 +9,7 @@ const features = {
alertView: readSQL(__dirname, 'alertView.sql'),
};
describe('SparkSQL Insert Syntax Tests', () => {
describe('SparkSQL Alert Syntax Tests', () => {
Object.keys(features).forEach((key) => {
features[key].forEach((sql) => {
it(sql, () => {

View File

@ -7,7 +7,7 @@ const features = {
analyzeTable: readSQL(__dirname, 'analyzeTable.sql'),
};
describe('Spark analyzeTable Syntax Tests', () => {
describe('Spark Analyze Table Syntax Tests', () => {
features.analyzeTable.forEach((itemSql) => {
it(itemSql, () => {
expect(parser.validate(itemSql).length).toBe(0);

View File

@ -7,7 +7,7 @@ const features = {
cache: readSQL(__dirname, 'cache.sql'),
};
describe('Spark cache Syntax Tests', () => {
describe('Spark Cache Syntax Tests', () => {
features.cache.forEach((itemSql) => {
it(itemSql, () => {
expect(parser.validate(itemSql).length).toBe(0);

View File

@ -9,7 +9,7 @@ const features = {
createView: readSQL(__dirname, 'createView.sql'),
};
describe('SparkSQL Insert Syntax Tests', () => {
describe('SparkSQL Create Syntax Tests', () => {
Object.keys(features).forEach((key) => {
features[key].forEach((sql) => {
it(sql, () => {

View File

@ -7,7 +7,7 @@ const features = {
describe: readSQL(__dirname, 'describe.sql'),
};
describe('Spark describe Syntax Tests', () => {
describe('Spark Describe Syntax Tests', () => {
features.describe.forEach((itemSql) => {
it(itemSql, () => {
expect(parser.validate(itemSql).length).toBe(0);

View File

@ -10,7 +10,7 @@ const features = {
dropView: readSQL(__dirname, 'dropView.sql'),
};
describe('SparkSQL Insert Syntax Tests', () => {
describe('SparkSQL Drop Syntax Tests', () => {
Object.keys(features).forEach((key) => {
features[key].forEach((sql) => {
it(sql, () => {

View File

@ -12,7 +12,7 @@ const features = {
kwMultipleValues: readSQL(__dirname, 'kwMultipleValues.sql'),
};
describe('SparkSQL Insert Syntax Tests', () => {
describe('SparkSQL Keyword Has Multiple Values Syntax Tests', () => {
Object.keys(features).forEach((key) => {
features[key].forEach((sql) => {
it(sql, () => {

View File

@ -7,7 +7,7 @@ const features = {
list: readSQL(__dirname, 'list.sql'),
};
describe('Spark list Syntax Tests', () => {
describe('Spark List Syntax Tests', () => {
features.list.forEach((itemSql) => {
it(itemSql, () => {
expect(parser.validate(itemSql).length).toBe(0);

View File

@ -7,7 +7,7 @@ const features = {
loadData: readSQL(__dirname, 'loadData.sql'),
};
describe('SparkSQL Insert Syntax Tests', () => {
describe('SparkSQL Load Syntax Tests', () => {
Object.keys(features).forEach((key) => {
features[key].forEach((sql) => {
it(sql, () => {

View File

@ -7,7 +7,7 @@ const features = {
refresh: readSQL(__dirname, 'refresh.sql'),
};
describe('Spark refresh Syntax Tests', () => {
describe('Spark Refresh Syntax Tests', () => {
features.refresh.forEach((itemSql) => {
it(itemSql, () => {
expect(parser.validate(itemSql).length).toBe(0);

View File

@ -7,7 +7,7 @@ const features = {
reset: readSQL(__dirname, 'reset.sql'),
};
describe('Spark reset Syntax Tests', () => {
describe('Spark Reset Syntax Tests', () => {
features.reset.forEach((itemSql) => {
it(itemSql, () => {
expect(parser.validate(itemSql).length).toBe(0);

View File

@ -31,7 +31,7 @@ const features = {
selectDistributeBy: readSQL(__dirname, 'selectDistributeBy.sql'),
selectClusterBy: readSQL(__dirname, 'selectClusterBy.sql'),
};
describe('Spark select Syntax Tests', () => {
describe('Spark Select Syntax Tests', () => {
features.selectAggregateFn.forEach((itemSql) => {
it(itemSql, () => {
expect(parser.validate(itemSql).length).toBe(0);

View File

@ -7,7 +7,7 @@ const features = {
set: readSQL(__dirname, 'set.sql'),
};
describe('Spark set Syntax Tests', () => {
describe('Spark Set Syntax Tests', () => {
features.set.forEach((itemSql) => {
it(itemSql, () => {
expect(parser.validate(itemSql).length).toBe(0);

View File

@ -7,7 +7,7 @@ const features = {
show: readSQL(__dirname, 'show.sql'),
};
describe('Spark show Syntax Tests', () => {
describe('Spark Show Syntax Tests', () => {
features.show.forEach((itemSql) => {
it(itemSql, () => {
expect(parser.validate(itemSql).length).toBe(0);

View File

@ -12,7 +12,7 @@ const features = {
truncateTable: readSQL(__dirname, 'truncateTable.sql'),
};
describe('SparkSQL Insert Syntax Tests', () => {
describe('SparkSQL About Table Syntax Tests', () => {
Object.keys(features).forEach((key) => {
features[key].forEach((sql) => {
it(sql, () => {

View File

@ -7,7 +7,7 @@ const features = {
useDatabase: readSQL(__dirname, 'useDatabase.sql'),
};
describe('SparkSQL Insert Syntax Tests', () => {
describe('SparkSQL Use Database Syntax Tests', () => {
Object.keys(features).forEach((key) => {
features[key].forEach((sql) => {
it(sql, () => {