Feat/spark sql auxiliary syntax (#165)
* feat: supplement SparkSQL add syntax unit test * chore: recompile spark listener & visitor --------- Co-authored-by: dilu <dilu@dtstack.com>
This commit is contained in:
parent
05da14d007
commit
d1c2920f80
@ -1,4 +1,4 @@
|
|||||||
// Generated from /Users/liuyi/Desktop/Projects/dtstack/dt-sql-parser/src/grammar/spark/SparkSqlParser.g4 by ANTLR 4.9.0-SNAPSHOT
|
// Generated from /Users/edy/github/dt-sql-parser/src/grammar/spark/SparkSqlParser.g4 by ANTLR 4.9.0-SNAPSHOT
|
||||||
|
|
||||||
|
|
||||||
import { ParseTreeListener } from "antlr4ts/tree/ParseTreeListener";
|
import { ParseTreeListener } from "antlr4ts/tree/ParseTreeListener";
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Generated from /Users/liuyi/Desktop/Projects/dtstack/dt-sql-parser/src/grammar/spark/SparkSqlParser.g4 by ANTLR 4.9.0-SNAPSHOT
|
// Generated from /Users/edy/github/dt-sql-parser/src/grammar/spark/SparkSqlParser.g4 by ANTLR 4.9.0-SNAPSHOT
|
||||||
|
|
||||||
|
|
||||||
import { ParseTreeVisitor } from "antlr4ts/tree/ParseTreeVisitor";
|
import { ParseTreeVisitor } from "antlr4ts/tree/ParseTreeVisitor";
|
||||||
|
@ -24,11 +24,11 @@ export default class SparkSQL extends BasicParser<SparkSqlLexer, ProgramContext,
|
|||||||
protected processCandidates(
|
protected processCandidates(
|
||||||
candidates: CandidatesCollection,
|
candidates: CandidatesCollection,
|
||||||
allTokens: Token[],
|
allTokens: Token[],
|
||||||
caretTokenIndex: number
|
caretTokenIndex: number,
|
||||||
): Suggestions<Token> {
|
): Suggestions<Token> {
|
||||||
return {
|
return {
|
||||||
syntax: [],
|
syntax: [],
|
||||||
keywords: []
|
keywords: [],
|
||||||
}
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,7 @@ describe('Spark SQL Listener Tests', () => {
|
|||||||
result = ctx.text.toLowerCase();
|
result = ctx.text.toLowerCase();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const listenTableName: any = new MyListener();
|
const listenTableName = new MyListener();
|
||||||
|
|
||||||
parser.listen(listenTableName, parserTree);
|
parser.listen(listenTableName, parserTree);
|
||||||
expect(result).toBe(expectTableName);
|
expect(result).toBe(expectTableName);
|
||||||
|
16
test/parser/spark/syntax/addStatement.test.ts
Normal file
16
test/parser/spark/syntax/addStatement.test.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import SparkSQL from '../../../../src/parser/spark';
|
||||||
|
import { readSQL } from '../../../helper';
|
||||||
|
|
||||||
|
const parser = new SparkSQL();
|
||||||
|
|
||||||
|
const features = {
|
||||||
|
add: readSQL(__dirname, 'add.sql'),
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('Spark add Syntax Tests', () => {
|
||||||
|
features.add.forEach((itemSql) => {
|
||||||
|
it(itemSql, () => {
|
||||||
|
expect(parser.validate(itemSql).length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
16
test/parser/spark/syntax/analyzeTableStatement.test.ts
Normal file
16
test/parser/spark/syntax/analyzeTableStatement.test.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import SparkSQL from '../../../../src/parser/spark';
|
||||||
|
import { readSQL } from '../../../helper';
|
||||||
|
|
||||||
|
const parser = new SparkSQL();
|
||||||
|
|
||||||
|
const features = {
|
||||||
|
analyzeTable: readSQL(__dirname, 'analyzeTable.sql'),
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('Spark analyzeTable Syntax Tests', () => {
|
||||||
|
features.analyzeTable.forEach((itemSql) => {
|
||||||
|
it(itemSql, () => {
|
||||||
|
expect(parser.validate(itemSql).length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
16
test/parser/spark/syntax/cacheStatement.test.ts
Normal file
16
test/parser/spark/syntax/cacheStatement.test.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import SparkSQL from '../../../../src/parser/spark';
|
||||||
|
import { readSQL } from '../../../helper';
|
||||||
|
|
||||||
|
const parser = new SparkSQL();
|
||||||
|
|
||||||
|
const features = {
|
||||||
|
cache: readSQL(__dirname, 'cache.sql'),
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('Spark cache Syntax Tests', () => {
|
||||||
|
features.cache.forEach((itemSql) => {
|
||||||
|
it(itemSql, () => {
|
||||||
|
expect(parser.validate(itemSql).length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
16
test/parser/spark/syntax/describeStatement.test.ts
Normal file
16
test/parser/spark/syntax/describeStatement.test.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import SparkSQL from '../../../../src/parser/spark';
|
||||||
|
import { readSQL } from '../../../helper';
|
||||||
|
|
||||||
|
const parser = new SparkSQL();
|
||||||
|
|
||||||
|
const features = {
|
||||||
|
describe: readSQL(__dirname, 'describe.sql'),
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('Spark describe Syntax Tests', () => {
|
||||||
|
features.describe.forEach((itemSql) => {
|
||||||
|
it(itemSql, () => {
|
||||||
|
expect(parser.validate(itemSql).length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
34
test/parser/spark/syntax/fixtures/add.sql
Normal file
34
test/parser/spark/syntax/fixtures/add.sql
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
|
||||||
|
-- ADD { FILE | FILES } resource_name [ ... ]
|
||||||
|
|
||||||
|
ADD FILE /tmp/test;
|
||||||
|
|
||||||
|
ADD FILE "/path/to/file/abc.txt";
|
||||||
|
|
||||||
|
ADD FILE '/another/test.txt';
|
||||||
|
|
||||||
|
ADD FILE "/path with space/abc.txt";
|
||||||
|
|
||||||
|
ADD FILE "/path/to/some/directory";
|
||||||
|
|
||||||
|
ADD FILES "/path with space/cde.txt" '/path with space/fgh.txt';
|
||||||
|
|
||||||
|
-- ADD { JAR | JARS } file_name [ ... ]
|
||||||
|
|
||||||
|
ADD JAR /tmp/test.jar;
|
||||||
|
|
||||||
|
ADD JAR "/path/to/some.jar";
|
||||||
|
|
||||||
|
ADD JAR '/some/other.jar';
|
||||||
|
|
||||||
|
ADD JAR "/path with space/abc.jar";
|
||||||
|
|
||||||
|
ADD JARS "/path with space/def.jar" '/path with space/ghi.jar';
|
||||||
|
|
||||||
|
ADD JAR "ivy://group:module:version";
|
||||||
|
|
||||||
|
ADD JAR "ivy://group:module:version?transitive=false"
|
||||||
|
|
||||||
|
ADD JAR "ivy://group:module:version?transitive=true"
|
||||||
|
|
||||||
|
ADD JAR "ivy://group:module:version?exclude=group:module&transitive=true"
|
32
test/parser/spark/syntax/fixtures/analyzeTable.sql
Normal file
32
test/parser/spark/syntax/fixtures/analyzeTable.sql
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
CREATE DATABASE school_db;
|
||||||
|
USE school_db;
|
||||||
|
|
||||||
|
CREATE TABLE teachers (name STRING, teacher_id INT);
|
||||||
|
INSERT INTO teachers VALUES ('Tom', 1), ('Jerry', 2);
|
||||||
|
|
||||||
|
CREATE TABLE students (name STRING, student_id INT) PARTITIONED BY (student_id);
|
||||||
|
INSERT INTO students VALUES ('Mark', 111111), ('John', 222222);
|
||||||
|
|
||||||
|
ANALYZE TABLE students COMPUTE STATISTICS NOSCAN;
|
||||||
|
|
||||||
|
DESC EXTENDED students;
|
||||||
|
|
||||||
|
ANALYZE TABLE students COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
DESC EXTENDED students;
|
||||||
|
|
||||||
|
ANALYZE TABLE students PARTITION (student_id = 111111) COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
DESC EXTENDED students PARTITION (student_id = 111111);
|
||||||
|
|
||||||
|
ANALYZE TABLE students COMPUTE STATISTICS FOR COLUMNS name;
|
||||||
|
|
||||||
|
DESC EXTENDED students name;
|
||||||
|
|
||||||
|
ANALYZE TABLES IN school_db COMPUTE STATISTICS NOSCAN;
|
||||||
|
|
||||||
|
DESC EXTENDED teachers;
|
||||||
|
|
||||||
|
ANALYZE TABLES COMPUTE STATISTICS;
|
||||||
|
|
||||||
|
DESC EXTENDED teachers;
|
17
test/parser/spark/syntax/fixtures/cache.sql
Normal file
17
test/parser/spark/syntax/fixtures/cache.sql
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
-- CACHE LAZY TABLE testCache1 [ OPTIONS ( 'storageLevel' [ = ] value ) ] [ [ AS ] query ]
|
||||||
|
|
||||||
|
CACHE TABLE testCache OPTIONS ('storageLevel' 'DISK_ONLY') SELECT * FROM testData;
|
||||||
|
|
||||||
|
CACHE LAZY TABLE testCache1 SELECT * FROM testData;
|
||||||
|
|
||||||
|
CACHE LAZY TABLE testCache2 AS SELECT * FROM testData;
|
||||||
|
|
||||||
|
|
||||||
|
-- CLEAR CACHE
|
||||||
|
|
||||||
|
CLEAR CACHE;
|
||||||
|
|
||||||
|
|
||||||
|
-- UNCACHE TABLE [ IF EXISTS ] table_identifier
|
||||||
|
|
||||||
|
UNCACHE TABLE t1;
|
45
test/parser/spark/syntax/fixtures/describe.sql
Normal file
45
test/parser/spark/syntax/fixtures/describe.sql
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
-- { DESC | DESCRIBE } DATABASE [ EXTENDED ] db_name
|
||||||
|
|
||||||
|
DESCRIBE DATABASE employees;
|
||||||
|
|
||||||
|
DESCRIBE DATABASE EXTENDED employees;
|
||||||
|
|
||||||
|
DESC DATABASE deployment;
|
||||||
|
|
||||||
|
|
||||||
|
-- { DESC | DESCRIBE } FUNCTION [ EXTENDED ] function_name
|
||||||
|
|
||||||
|
DESC FUNCTION abs;
|
||||||
|
|
||||||
|
DESC FUNCTION EXTENDED abs;
|
||||||
|
|
||||||
|
DESC FUNCTION max;
|
||||||
|
|
||||||
|
DESC FUNCTION EXTENDED explode;
|
||||||
|
|
||||||
|
|
||||||
|
-- { DESC | DESCRIBE } [ QUERY ] input_statement
|
||||||
|
|
||||||
|
DESCRIBE QUERY SELECT age, sum(age) FROM person GROUP BY age;
|
||||||
|
|
||||||
|
DESCRIBE QUERY WITH all_names_cte
|
||||||
|
AS (SELECT name from person) SELECT * FROM all_names_cte;
|
||||||
|
|
||||||
|
DESC QUERY VALUES(100, 'John', 10000) AS employee(id, name, salary);
|
||||||
|
|
||||||
|
DESC QUERY TABLE person;
|
||||||
|
|
||||||
|
DESCRIBE FROM person SELECT age;
|
||||||
|
|
||||||
|
|
||||||
|
-- { DESC | DESCRIBE } [ TABLE ] [ format ] table_identifier [ partition_spec ] [ col_name ]
|
||||||
|
|
||||||
|
DESCRIBE TABLE customer;
|
||||||
|
|
||||||
|
DESCRIBE TABLE salesdb.customer;
|
||||||
|
|
||||||
|
DESCRIBE TABLE EXTENDED customer;
|
||||||
|
|
||||||
|
DESCRIBE TABLE EXTENDED customer PARTITION (state = 'AR');
|
||||||
|
|
||||||
|
DESCRIBE customer salesdb.customer.name;
|
12
test/parser/spark/syntax/fixtures/list.sql
Normal file
12
test/parser/spark/syntax/fixtures/list.sql
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
-- LIST { FILE | FILES } file_name [ ... ]
|
||||||
|
|
||||||
|
LIST FILE;
|
||||||
|
|
||||||
|
LIST FILE /tmp/test /some/random/file /another/random/file;
|
||||||
|
|
||||||
|
|
||||||
|
-- LIST { JAR | JARS } file_name [ ... ]
|
||||||
|
|
||||||
|
LIST JAR;
|
||||||
|
|
||||||
|
LIST JAR /tmp/test.jar /some/random.jar /another/random.jar;
|
17
test/parser/spark/syntax/fixtures/refresh.sql
Normal file
17
test/parser/spark/syntax/fixtures/refresh.sql
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
-- REFRESH resource_path
|
||||||
|
|
||||||
|
REFRESH "hdfs://path/to/table";
|
||||||
|
|
||||||
|
|
||||||
|
-- REFRESH FUNCTION function_identifier
|
||||||
|
|
||||||
|
REFRESH FUNCTION func1;
|
||||||
|
|
||||||
|
REFRESH FUNCTION db1.func1;
|
||||||
|
|
||||||
|
|
||||||
|
-- REFRESH [TABLE] table_identifier
|
||||||
|
|
||||||
|
REFRESH TABLE tbl1;
|
||||||
|
|
||||||
|
REFRESH TABLE tempDB.view1;
|
9
test/parser/spark/syntax/fixtures/reset.sql
Normal file
9
test/parser/spark/syntax/fixtures/reset.sql
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
-- RESET;
|
||||||
|
|
||||||
|
-- RESET configuration_key;
|
||||||
|
|
||||||
|
-- Reset any runtime configurations specific to the current session which were set via the SET command to their default values.
|
||||||
|
RESET;
|
||||||
|
|
||||||
|
-- If you start your application with --conf spark.foo=bar and set spark.foo=foobar in runtime, the example below will restore it to 'bar'. If spark.foo is not specified during starting, the example below will remove this config from the SQLConf. It will ignore nonexistent keys.
|
||||||
|
RESET spark.abc;
|
15
test/parser/spark/syntax/fixtures/set.sql
Normal file
15
test/parser/spark/syntax/fixtures/set.sql
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
-- SET
|
||||||
|
-- SET [ -v ]
|
||||||
|
-- SET property_key[ = property_value ]
|
||||||
|
|
||||||
|
-- Set a property.
|
||||||
|
SET spark.sql.variable.substitute=false;
|
||||||
|
|
||||||
|
-- List all SQLConf properties with value and meaning.
|
||||||
|
SET -v;
|
||||||
|
|
||||||
|
-- List all SQLConf properties with value for current session.
|
||||||
|
SET;
|
||||||
|
|
||||||
|
-- List the value of specified property key.
|
||||||
|
SET spark.sql.variable.substitute;
|
101
test/parser/spark/syntax/fixtures/show.sql
Normal file
101
test/parser/spark/syntax/fixtures/show.sql
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
-- SHOW COLUMNS table_identifier [ database ]
|
||||||
|
|
||||||
|
SHOW COLUMNS IN customer;
|
||||||
|
|
||||||
|
SHOW COLUMNS IN salesdb.customer;
|
||||||
|
|
||||||
|
SHOW COLUMNS IN customer IN salesdb;
|
||||||
|
|
||||||
|
|
||||||
|
-- SHOW CREATE TABLE table_identifier [ AS SERDE ]
|
||||||
|
|
||||||
|
SHOW CREATE TABLE test;
|
||||||
|
|
||||||
|
SHOW CREATE TABLE test AS SERDE;
|
||||||
|
|
||||||
|
|
||||||
|
-- SHOW { DATABASES | SCHEMAS } [ LIKE regex_pattern ]
|
||||||
|
|
||||||
|
SHOW DATABASES;
|
||||||
|
|
||||||
|
SHOW DATABASES LIKE 'pay*';
|
||||||
|
|
||||||
|
SHOW SCHEMAS;
|
||||||
|
|
||||||
|
|
||||||
|
-- SHOW [ function_kind ] FUNCTIONS [ { FROM | IN } database_name ] [ LIKE regex_pattern ]
|
||||||
|
|
||||||
|
SHOW FUNCTIONS trim;
|
||||||
|
|
||||||
|
SHOW SYSTEM FUNCTIONS concat;
|
||||||
|
|
||||||
|
SHOW SYSTEM FUNCTIONS FROM salesdb LIKE 'max';
|
||||||
|
|
||||||
|
SHOW FUNCTIONS LIKE 't*';
|
||||||
|
|
||||||
|
SHOW FUNCTIONS LIKE 'yea*|windo*';
|
||||||
|
|
||||||
|
SHOW FUNCTIONS LIKE 't[a-z][a-z][a-z]';
|
||||||
|
|
||||||
|
|
||||||
|
-- SHOW PARTITIONS table_identifier [ partition_spec ]
|
||||||
|
|
||||||
|
SHOW PARTITIONS customer;
|
||||||
|
|
||||||
|
SHOW PARTITIONS salesdb.customer;
|
||||||
|
|
||||||
|
SHOW PARTITIONS customer PARTITION (state = 'CA', city = 'Fremont');
|
||||||
|
|
||||||
|
SHOW PARTITIONS customer PARTITION (state = 'CA');
|
||||||
|
|
||||||
|
SHOW PARTITIONS customer PARTITION (city = 'San Jose');
|
||||||
|
|
||||||
|
|
||||||
|
-- SHOW TABLE EXTENDED [ { IN | FROM } database_name ] LIKE regex_pattern
|
||||||
|
-- [ partition_spec ]
|
||||||
|
|
||||||
|
SHOW TABLE EXTENDED LIKE 'employee';
|
||||||
|
|
||||||
|
SHOW TABLE EXTENDED LIKE 'employe*';
|
||||||
|
|
||||||
|
SHOW TABLE EXTENDED IN default LIKE 'employee' PARTITION (grade=1);
|
||||||
|
|
||||||
|
SHOW TABLE EXTENDED IN default LIKE 'empl*' PARTITION (grade=1);
|
||||||
|
|
||||||
|
|
||||||
|
-- SHOW TABLES [ { FROM | IN } database_name ] [ LIKE regex_pattern ]
|
||||||
|
|
||||||
|
SHOW TABLES;
|
||||||
|
|
||||||
|
SHOW TABLES FROM userdb;
|
||||||
|
|
||||||
|
SHOW TABLES IN userdb;
|
||||||
|
|
||||||
|
SHOW TABLES FROM default LIKE 'sam*';
|
||||||
|
|
||||||
|
SHOW TABLES LIKE 'sam*|suj';
|
||||||
|
|
||||||
|
|
||||||
|
-- SHOW TBLPROPERTIES table_identifier
|
||||||
|
-- [ ( unquoted_property_key | property_key_as_string_literal ) ]
|
||||||
|
|
||||||
|
SHOW TBLPROPERTIES customer;
|
||||||
|
|
||||||
|
SHOW TBLPROPERTIES salesdb.customer;
|
||||||
|
|
||||||
|
SHOW TBLPROPERTIES customer (created.by.user);
|
||||||
|
|
||||||
|
SHOW TBLPROPERTIES customer ('created.date');
|
||||||
|
|
||||||
|
|
||||||
|
-- SHOW VIEWS [ { FROM | IN } database_name ] [ LIKE regex_pattern ]
|
||||||
|
|
||||||
|
SHOW VIEWS;
|
||||||
|
|
||||||
|
SHOW VIEWS FROM userdb;
|
||||||
|
|
||||||
|
SHOW VIEWS IN global_temp;
|
||||||
|
|
||||||
|
SHOW VIEWS FROM default LIKE 'sam*';
|
||||||
|
|
||||||
|
SHOW VIEWS LIKE 'sam|suj|temp*';
|
16
test/parser/spark/syntax/listStatement.test.ts
Normal file
16
test/parser/spark/syntax/listStatement.test.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import SparkSQL from '../../../../src/parser/spark';
|
||||||
|
import { readSQL } from '../../../helper';
|
||||||
|
|
||||||
|
const parser = new SparkSQL();
|
||||||
|
|
||||||
|
const features = {
|
||||||
|
list: readSQL(__dirname, 'list.sql'),
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('Spark list Syntax Tests', () => {
|
||||||
|
features.list.forEach((itemSql) => {
|
||||||
|
it(itemSql, () => {
|
||||||
|
expect(parser.validate(itemSql).length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
16
test/parser/spark/syntax/refreshStatement.test.ts
Normal file
16
test/parser/spark/syntax/refreshStatement.test.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import SparkSQL from '../../../../src/parser/spark';
|
||||||
|
import { readSQL } from '../../../helper';
|
||||||
|
|
||||||
|
const parser = new SparkSQL();
|
||||||
|
|
||||||
|
const features = {
|
||||||
|
refresh: readSQL(__dirname, 'refresh.sql'),
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('Spark refresh Syntax Tests', () => {
|
||||||
|
features.refresh.forEach((itemSql) => {
|
||||||
|
it(itemSql, () => {
|
||||||
|
expect(parser.validate(itemSql).length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
16
test/parser/spark/syntax/resetStatement.test.ts
Normal file
16
test/parser/spark/syntax/resetStatement.test.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import SparkSQL from '../../../../src/parser/spark';
|
||||||
|
import { readSQL } from '../../../helper';
|
||||||
|
|
||||||
|
const parser = new SparkSQL();
|
||||||
|
|
||||||
|
const features = {
|
||||||
|
reset: readSQL(__dirname, 'reset.sql'),
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('Spark reset Syntax Tests', () => {
|
||||||
|
features.reset.forEach((itemSql) => {
|
||||||
|
it(itemSql, () => {
|
||||||
|
expect(parser.validate(itemSql).length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
16
test/parser/spark/syntax/setStatement.test.ts
Normal file
16
test/parser/spark/syntax/setStatement.test.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import SparkSQL from '../../../../src/parser/spark';
|
||||||
|
import { readSQL } from '../../../helper';
|
||||||
|
|
||||||
|
const parser = new SparkSQL();
|
||||||
|
|
||||||
|
const features = {
|
||||||
|
set: readSQL(__dirname, 'set.sql'),
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('Spark set Syntax Tests', () => {
|
||||||
|
features.set.forEach((itemSql) => {
|
||||||
|
it(itemSql, () => {
|
||||||
|
expect(parser.validate(itemSql).length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
16
test/parser/spark/syntax/showStatement.test.ts
Normal file
16
test/parser/spark/syntax/showStatement.test.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import SparkSQL from '../../../../src/parser/spark';
|
||||||
|
import { readSQL } from '../../../helper';
|
||||||
|
|
||||||
|
const parser = new SparkSQL();
|
||||||
|
|
||||||
|
const features = {
|
||||||
|
show: readSQL(__dirname, 'show.sql'),
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('Spark show Syntax Tests', () => {
|
||||||
|
features.show.forEach((itemSql) => {
|
||||||
|
it(itemSql, () => {
|
||||||
|
expect(parser.validate(itemSql).length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@ -12,18 +12,18 @@ describe('Spark SQL Visitor Tests', () => {
|
|||||||
});
|
});
|
||||||
|
|
||||||
test('Visitor visitRelationPrimary', () => {
|
test('Visitor visitRelationPrimary', () => {
|
||||||
let result = '';
|
|
||||||
class MyVisitor extends AbstractParseTreeVisitor<any> implements SparkSqlParserVisitor<any> {
|
class MyVisitor extends AbstractParseTreeVisitor<any> implements SparkSqlParserVisitor<any> {
|
||||||
|
result: string = '';
|
||||||
protected defaultResult() {
|
protected defaultResult() {
|
||||||
return result;
|
return this.result;
|
||||||
}
|
}
|
||||||
visitRelationPrimary = (ctx): void => {
|
visitRelationPrimary = (ctx): void => {
|
||||||
result = ctx.text.toLowerCase();
|
this.result = ctx.text.toLowerCase();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const visitor: any = new MyVisitor();
|
const visitor = new MyVisitor();
|
||||||
visitor.visit(parserTree);
|
visitor.visit(parserTree);
|
||||||
|
|
||||||
expect(result).toBe(expectTableName);
|
expect(visitor.result).toBe(expectTableName);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
Loading…
Reference in New Issue
Block a user