test: accurate description (#183)
* test: accurate description * refactor: optimize the use of tableName and viewName for --------- Co-authored-by: liuyi <liuyi@dtstack.com>
This commit is contained in:
parent
f9dbd9fc23
commit
0d9d3d65f3
@ -104,11 +104,11 @@ statement
|
|||||||
| KW_ALTER KW_TABLE tableName
|
| KW_ALTER KW_TABLE tableName
|
||||||
KW_DROP (KW_COLUMN | KW_COLUMNS) (ifExists)?
|
KW_DROP (KW_COLUMN | KW_COLUMNS) (ifExists)?
|
||||||
multipartIdentifierList
|
multipartIdentifierList
|
||||||
| KW_ALTER (KW_TABLE | KW_VIEW) (tableName | viewName)
|
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName)
|
||||||
KW_RENAME KW_TO multipartIdentifier
|
KW_RENAME KW_TO multipartIdentifier
|
||||||
| KW_ALTER (KW_TABLE | KW_VIEW) (tableName | viewName)
|
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName)
|
||||||
KW_SET KW_TBLPROPERTIES propertyList
|
KW_SET KW_TBLPROPERTIES propertyList
|
||||||
| KW_ALTER (KW_TABLE | KW_VIEW) (tableName | viewName)
|
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName)
|
||||||
KW_UNSET KW_TBLPROPERTIES (ifExists)? propertyList
|
KW_UNSET KW_TBLPROPERTIES (ifExists)? propertyList
|
||||||
| KW_ALTER KW_TABLE table=tableName
|
| KW_ALTER KW_TABLE table=tableName
|
||||||
(KW_ALTER | KW_CHANGE) KW_COLUMN? column=multipartIdentifier
|
(KW_ALTER | KW_CHANGE) KW_COLUMN? column=multipartIdentifier
|
||||||
@ -124,11 +124,11 @@ statement
|
|||||||
KW_SET KW_SERDE stringLit (KW_WITH KW_SERDEPROPERTIES propertyList)?
|
KW_SET KW_SERDE stringLit (KW_WITH KW_SERDEPROPERTIES propertyList)?
|
||||||
| KW_ALTER KW_TABLE tableName (partitionSpec)?
|
| KW_ALTER KW_TABLE tableName (partitionSpec)?
|
||||||
KW_SET KW_SERDEPROPERTIES propertyList
|
KW_SET KW_SERDEPROPERTIES propertyList
|
||||||
| KW_ALTER (KW_TABLE | KW_VIEW) (tableName | viewName) KW_ADD (ifNotExists)?
|
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName) KW_ADD (ifNotExists)?
|
||||||
partitionSpecLocation+
|
partitionSpecLocation+
|
||||||
| KW_ALTER KW_TABLE tableName
|
| KW_ALTER KW_TABLE tableName
|
||||||
partitionSpec KW_RENAME KW_TO partitionSpec
|
partitionSpec KW_RENAME KW_TO partitionSpec
|
||||||
| KW_ALTER (KW_TABLE | KW_VIEW) (tableName | viewName)
|
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName)
|
||||||
KW_DROP (ifExists)? partitionSpec (COMMA partitionSpec)* KW_PURGE?
|
KW_DROP (ifExists)? partitionSpec (COMMA partitionSpec)* KW_PURGE?
|
||||||
| KW_ALTER KW_TABLE tableName
|
| KW_ALTER KW_TABLE tableName
|
||||||
(partitionSpec)? KW_SET locationSpec
|
(partitionSpec)? KW_SET locationSpec
|
||||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -5,7 +5,7 @@ import HiveSQL from '../../../../src/parser/hive';
|
|||||||
|
|
||||||
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
|
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
|
||||||
|
|
||||||
describe('Hive SQL Syntax Suggestion', () => {
|
describe('Hive SQL Token Suggestion', () => {
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
test('After ALTER', () => {
|
test('After ALTER', () => {
|
||||||
|
@ -5,7 +5,7 @@ import SparkSQL from '../../../../src/parser/spark';
|
|||||||
|
|
||||||
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
|
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
|
||||||
|
|
||||||
describe('Spark SQL Syntax Suggestion', () => {
|
describe('Spark SQL Token Suggestion', () => {
|
||||||
const parser = new SparkSQL();
|
const parser = new SparkSQL();
|
||||||
|
|
||||||
test('After ALTER', () => {
|
test('After ALTER', () => {
|
||||||
|
@ -7,7 +7,7 @@ const features = {
|
|||||||
add: readSQL(__dirname, 'add.sql'),
|
add: readSQL(__dirname, 'add.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('Spark add Syntax Tests', () => {
|
describe('Spark Add Syntax Tests', () => {
|
||||||
features.add.forEach((itemSql) => {
|
features.add.forEach((itemSql) => {
|
||||||
it(itemSql, () => {
|
it(itemSql, () => {
|
||||||
expect(parser.validate(itemSql).length).toBe(0);
|
expect(parser.validate(itemSql).length).toBe(0);
|
||||||
|
@ -9,7 +9,7 @@ const features = {
|
|||||||
alertView: readSQL(__dirname, 'alertView.sql'),
|
alertView: readSQL(__dirname, 'alertView.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('SparkSQL Insert Syntax Tests', () => {
|
describe('SparkSQL Alert Syntax Tests', () => {
|
||||||
Object.keys(features).forEach((key) => {
|
Object.keys(features).forEach((key) => {
|
||||||
features[key].forEach((sql) => {
|
features[key].forEach((sql) => {
|
||||||
it(sql, () => {
|
it(sql, () => {
|
||||||
|
@ -7,7 +7,7 @@ const features = {
|
|||||||
analyzeTable: readSQL(__dirname, 'analyzeTable.sql'),
|
analyzeTable: readSQL(__dirname, 'analyzeTable.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('Spark analyzeTable Syntax Tests', () => {
|
describe('Spark Analyze Table Syntax Tests', () => {
|
||||||
features.analyzeTable.forEach((itemSql) => {
|
features.analyzeTable.forEach((itemSql) => {
|
||||||
it(itemSql, () => {
|
it(itemSql, () => {
|
||||||
expect(parser.validate(itemSql).length).toBe(0);
|
expect(parser.validate(itemSql).length).toBe(0);
|
||||||
|
@ -7,7 +7,7 @@ const features = {
|
|||||||
cache: readSQL(__dirname, 'cache.sql'),
|
cache: readSQL(__dirname, 'cache.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('Spark cache Syntax Tests', () => {
|
describe('Spark Cache Syntax Tests', () => {
|
||||||
features.cache.forEach((itemSql) => {
|
features.cache.forEach((itemSql) => {
|
||||||
it(itemSql, () => {
|
it(itemSql, () => {
|
||||||
expect(parser.validate(itemSql).length).toBe(0);
|
expect(parser.validate(itemSql).length).toBe(0);
|
||||||
|
@ -9,7 +9,7 @@ const features = {
|
|||||||
createView: readSQL(__dirname, 'createView.sql'),
|
createView: readSQL(__dirname, 'createView.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('SparkSQL Insert Syntax Tests', () => {
|
describe('SparkSQL Create Syntax Tests', () => {
|
||||||
Object.keys(features).forEach((key) => {
|
Object.keys(features).forEach((key) => {
|
||||||
features[key].forEach((sql) => {
|
features[key].forEach((sql) => {
|
||||||
it(sql, () => {
|
it(sql, () => {
|
||||||
|
@ -7,7 +7,7 @@ const features = {
|
|||||||
describe: readSQL(__dirname, 'describe.sql'),
|
describe: readSQL(__dirname, 'describe.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('Spark describe Syntax Tests', () => {
|
describe('Spark Describe Syntax Tests', () => {
|
||||||
features.describe.forEach((itemSql) => {
|
features.describe.forEach((itemSql) => {
|
||||||
it(itemSql, () => {
|
it(itemSql, () => {
|
||||||
expect(parser.validate(itemSql).length).toBe(0);
|
expect(parser.validate(itemSql).length).toBe(0);
|
||||||
|
@ -10,7 +10,7 @@ const features = {
|
|||||||
dropView: readSQL(__dirname, 'dropView.sql'),
|
dropView: readSQL(__dirname, 'dropView.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('SparkSQL Insert Syntax Tests', () => {
|
describe('SparkSQL Drop Syntax Tests', () => {
|
||||||
Object.keys(features).forEach((key) => {
|
Object.keys(features).forEach((key) => {
|
||||||
features[key].forEach((sql) => {
|
features[key].forEach((sql) => {
|
||||||
it(sql, () => {
|
it(sql, () => {
|
||||||
|
@ -12,7 +12,7 @@ const features = {
|
|||||||
kwMultipleValues: readSQL(__dirname, 'kwMultipleValues.sql'),
|
kwMultipleValues: readSQL(__dirname, 'kwMultipleValues.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('SparkSQL Insert Syntax Tests', () => {
|
describe('SparkSQL Keyword Has Multiple Values Syntax Tests', () => {
|
||||||
Object.keys(features).forEach((key) => {
|
Object.keys(features).forEach((key) => {
|
||||||
features[key].forEach((sql) => {
|
features[key].forEach((sql) => {
|
||||||
it(sql, () => {
|
it(sql, () => {
|
||||||
|
@ -7,7 +7,7 @@ const features = {
|
|||||||
list: readSQL(__dirname, 'list.sql'),
|
list: readSQL(__dirname, 'list.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('Spark list Syntax Tests', () => {
|
describe('Spark List Syntax Tests', () => {
|
||||||
features.list.forEach((itemSql) => {
|
features.list.forEach((itemSql) => {
|
||||||
it(itemSql, () => {
|
it(itemSql, () => {
|
||||||
expect(parser.validate(itemSql).length).toBe(0);
|
expect(parser.validate(itemSql).length).toBe(0);
|
||||||
|
@ -7,7 +7,7 @@ const features = {
|
|||||||
loadData: readSQL(__dirname, 'loadData.sql'),
|
loadData: readSQL(__dirname, 'loadData.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('SparkSQL Insert Syntax Tests', () => {
|
describe('SparkSQL Load Syntax Tests', () => {
|
||||||
Object.keys(features).forEach((key) => {
|
Object.keys(features).forEach((key) => {
|
||||||
features[key].forEach((sql) => {
|
features[key].forEach((sql) => {
|
||||||
it(sql, () => {
|
it(sql, () => {
|
||||||
|
@ -7,7 +7,7 @@ const features = {
|
|||||||
refresh: readSQL(__dirname, 'refresh.sql'),
|
refresh: readSQL(__dirname, 'refresh.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('Spark refresh Syntax Tests', () => {
|
describe('Spark Refresh Syntax Tests', () => {
|
||||||
features.refresh.forEach((itemSql) => {
|
features.refresh.forEach((itemSql) => {
|
||||||
it(itemSql, () => {
|
it(itemSql, () => {
|
||||||
expect(parser.validate(itemSql).length).toBe(0);
|
expect(parser.validate(itemSql).length).toBe(0);
|
||||||
|
@ -7,7 +7,7 @@ const features = {
|
|||||||
reset: readSQL(__dirname, 'reset.sql'),
|
reset: readSQL(__dirname, 'reset.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('Spark reset Syntax Tests', () => {
|
describe('Spark Reset Syntax Tests', () => {
|
||||||
features.reset.forEach((itemSql) => {
|
features.reset.forEach((itemSql) => {
|
||||||
it(itemSql, () => {
|
it(itemSql, () => {
|
||||||
expect(parser.validate(itemSql).length).toBe(0);
|
expect(parser.validate(itemSql).length).toBe(0);
|
||||||
|
@ -31,7 +31,7 @@ const features = {
|
|||||||
selectDistributeBy: readSQL(__dirname, 'selectDistributeBy.sql'),
|
selectDistributeBy: readSQL(__dirname, 'selectDistributeBy.sql'),
|
||||||
selectClusterBy: readSQL(__dirname, 'selectClusterBy.sql'),
|
selectClusterBy: readSQL(__dirname, 'selectClusterBy.sql'),
|
||||||
};
|
};
|
||||||
describe('Spark select Syntax Tests', () => {
|
describe('Spark Select Syntax Tests', () => {
|
||||||
features.selectAggregateFn.forEach((itemSql) => {
|
features.selectAggregateFn.forEach((itemSql) => {
|
||||||
it(itemSql, () => {
|
it(itemSql, () => {
|
||||||
expect(parser.validate(itemSql).length).toBe(0);
|
expect(parser.validate(itemSql).length).toBe(0);
|
||||||
|
@ -7,7 +7,7 @@ const features = {
|
|||||||
set: readSQL(__dirname, 'set.sql'),
|
set: readSQL(__dirname, 'set.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('Spark set Syntax Tests', () => {
|
describe('Spark Set Syntax Tests', () => {
|
||||||
features.set.forEach((itemSql) => {
|
features.set.forEach((itemSql) => {
|
||||||
it(itemSql, () => {
|
it(itemSql, () => {
|
||||||
expect(parser.validate(itemSql).length).toBe(0);
|
expect(parser.validate(itemSql).length).toBe(0);
|
||||||
|
@ -7,7 +7,7 @@ const features = {
|
|||||||
show: readSQL(__dirname, 'show.sql'),
|
show: readSQL(__dirname, 'show.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('Spark show Syntax Tests', () => {
|
describe('Spark Show Syntax Tests', () => {
|
||||||
features.show.forEach((itemSql) => {
|
features.show.forEach((itemSql) => {
|
||||||
it(itemSql, () => {
|
it(itemSql, () => {
|
||||||
expect(parser.validate(itemSql).length).toBe(0);
|
expect(parser.validate(itemSql).length).toBe(0);
|
||||||
|
@ -12,7 +12,7 @@ const features = {
|
|||||||
truncateTable: readSQL(__dirname, 'truncateTable.sql'),
|
truncateTable: readSQL(__dirname, 'truncateTable.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('SparkSQL Insert Syntax Tests', () => {
|
describe('SparkSQL About Table Syntax Tests', () => {
|
||||||
Object.keys(features).forEach((key) => {
|
Object.keys(features).forEach((key) => {
|
||||||
features[key].forEach((sql) => {
|
features[key].forEach((sql) => {
|
||||||
it(sql, () => {
|
it(sql, () => {
|
||||||
|
@ -7,7 +7,7 @@ const features = {
|
|||||||
useDatabase: readSQL(__dirname, 'useDatabase.sql'),
|
useDatabase: readSQL(__dirname, 'useDatabase.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('SparkSQL Insert Syntax Tests', () => {
|
describe('SparkSQL Use Database Syntax Tests', () => {
|
||||||
Object.keys(features).forEach((key) => {
|
Object.keys(features).forEach((key) => {
|
||||||
features[key].forEach((sql) => {
|
features[key].forEach((sql) => {
|
||||||
it(sql, () => {
|
it(sql, () => {
|
||||||
|
Loading…
Reference in New Issue
Block a user