refactor: standard naming (#278)
* refactor: rename flinksql to flink * refactor: rename pgsql to postgresql * refactor: rename trinosql to trino * refactor: replace all default exports with named export * refactor: rename basicParser to basicSQL * refactor: rename basic-parser-types to types * refactor: replace arrow func with plain func
This commit is contained in:
		@ -1,10 +1,9 @@
 | 
			
		||||
import fs from 'fs';
 | 
			
		||||
import path from 'path';
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSqlSplitListener, SparkEntityCollector } from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL, SparkSqlSplitListener, SparkEntityCollector } from 'src/parser/spark';
 | 
			
		||||
import { ParseTreeListener } from 'antlr4ng';
 | 
			
		||||
import { SparkSqlParserListener } from 'src/lib/spark/SparkSqlParserListener';
 | 
			
		||||
import { EntityContextType } from 'src/parser/common/basic-parser-types';
 | 
			
		||||
import { EntityContextType } from 'src/parser/common/types';
 | 
			
		||||
import { StmtContextType } from 'src/parser/common/entityCollector';
 | 
			
		||||
 | 
			
		||||
const commonSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'common.sql'), 'utf-8');
 | 
			
		||||
 | 
			
		||||
@ -1,4 +1,4 @@
 | 
			
		||||
import SparkSQL, { SparkSqlSplitListener } from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL, SparkSqlSplitListener } from 'src/parser/spark';
 | 
			
		||||
import { SparkSqlParserListener } from 'src/lib/spark/SparkSqlParserListener';
 | 
			
		||||
 | 
			
		||||
const validSQL1 = `INSERT INTO country_page_view
 | 
			
		||||
 | 
			
		||||
@ -1,17 +1,17 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
 | 
			
		||||
describe('SparkSQL Lexer tests', () => {
 | 
			
		||||
    const parser = new SparkSQL();
 | 
			
		||||
    const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
    test('select id,name from user1;', () => {
 | 
			
		||||
        const sql = `select id,name from user1;`;
 | 
			
		||||
        const tokens = parser.getAllTokens(sql);
 | 
			
		||||
        const tokens = spark.getAllTokens(sql);
 | 
			
		||||
        expect(tokens.length).toBe(10);
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('SELECT * FROM t WHERE x = 1 AND y = 2;', () => {
 | 
			
		||||
        const sql = `SELECT * FROM t WHERE x = 1 AND y = 2;`;
 | 
			
		||||
        const tokens = parser.getAllTokens(sql);
 | 
			
		||||
        const tokens = spark.getAllTokens(sql);
 | 
			
		||||
        expect(tokens.length).toBe(24);
 | 
			
		||||
    });
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
@ -1,13 +1,13 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { SparkSqlParserListener } from 'src/lib/spark/SparkSqlParserListener';
 | 
			
		||||
import { ParseTreeListener } from 'antlr4ng';
 | 
			
		||||
 | 
			
		||||
describe('Spark SQL Listener Tests', () => {
 | 
			
		||||
    const expectTableName = 'user1';
 | 
			
		||||
    const sql = `select id,name,sex from ${expectTableName};`;
 | 
			
		||||
    const parser = new SparkSQL();
 | 
			
		||||
    const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
    const parseTree = parser.parse(sql);
 | 
			
		||||
    const parseTree = spark.parse(sql);
 | 
			
		||||
 | 
			
		||||
    test('Listener exitTableName', () => {
 | 
			
		||||
        let result = '';
 | 
			
		||||
@ -22,7 +22,7 @@ describe('Spark SQL Listener Tests', () => {
 | 
			
		||||
        }
 | 
			
		||||
        const listenTableName = new MyListener();
 | 
			
		||||
 | 
			
		||||
        parser.listen(listenTableName as ParseTreeListener, parseTree);
 | 
			
		||||
        spark.listen(listenTableName as ParseTreeListener, parseTree);
 | 
			
		||||
        expect(result).toBe(expectTableName);
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
@ -43,7 +43,7 @@ describe('Spark SQL Listener Tests', () => {
 | 
			
		||||
        ];
 | 
			
		||||
 | 
			
		||||
        const sql = singleStatementArr.join('\n');
 | 
			
		||||
        const sqlSlices = parser.splitSQLByStatement(sql);
 | 
			
		||||
        const sqlSlices = spark.splitSQLByStatement(sql);
 | 
			
		||||
 | 
			
		||||
        expect(sqlSlices).not.toBeNull();
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import fs from 'fs';
 | 
			
		||||
import path from 'path';
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { CaretPosition, EntityContextType } from 'src/parser/common/types';
 | 
			
		||||
 | 
			
		||||
const syntaxSql = fs.readFileSync(
 | 
			
		||||
    path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
 | 
			
		||||
@ -9,14 +9,14 @@ const syntaxSql = fs.readFileSync(
 | 
			
		||||
);
 | 
			
		||||
 | 
			
		||||
describe('SparkSQL Multiple Statements Syntax Suggestion', () => {
 | 
			
		||||
    const parser = new SparkSQL();
 | 
			
		||||
    const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
    test('Create table ', () => {
 | 
			
		||||
        const pos: CaretPosition = {
 | 
			
		||||
            lineNumber: 1,
 | 
			
		||||
            column: 14,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
 | 
			
		||||
        );
 | 
			
		||||
@ -30,7 +30,7 @@ describe('SparkSQL Multiple Statements Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 9,
 | 
			
		||||
            column: 18,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.TABLE
 | 
			
		||||
        );
 | 
			
		||||
@ -44,7 +44,7 @@ describe('SparkSQL Multiple Statements Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 15,
 | 
			
		||||
            column: 13,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.TABLE
 | 
			
		||||
        );
 | 
			
		||||
@ -58,7 +58,7 @@ describe('SparkSQL Multiple Statements Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 21,
 | 
			
		||||
            column: 65,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.TABLE
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import fs from 'fs';
 | 
			
		||||
import path from 'path';
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { CaretPosition, EntityContextType } from 'src/parser/common/types';
 | 
			
		||||
import { commentOtherLine } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const syntaxSql = fs.readFileSync(
 | 
			
		||||
@ -9,7 +9,7 @@ const syntaxSql = fs.readFileSync(
 | 
			
		||||
    'utf-8'
 | 
			
		||||
);
 | 
			
		||||
 | 
			
		||||
describe('PostgreSQL Syntax Suggestion with collect entity', () => {
 | 
			
		||||
describe('PostgreSql Syntax Suggestion with collect entity', () => {
 | 
			
		||||
    const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
    test('select with no column', () => {
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import fs from 'fs';
 | 
			
		||||
import path from 'path';
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { CaretPosition, EntityContextType } from 'src/parser/common/types';
 | 
			
		||||
import { commentOtherLine } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const syntaxSql = fs.readFileSync(
 | 
			
		||||
@ -10,12 +10,12 @@ const syntaxSql = fs.readFileSync(
 | 
			
		||||
);
 | 
			
		||||
 | 
			
		||||
describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
    const parser = new SparkSQL();
 | 
			
		||||
    const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
    test('Validate Syntax SQL', () => {
 | 
			
		||||
        expect(parser.validate(syntaxSql).length).not.toBe(0);
 | 
			
		||||
        expect(parser.validate(syntaxSql).length).not.toBe(0);
 | 
			
		||||
        expect(parser.validate(syntaxSql).length).not.toBe(0);
 | 
			
		||||
        expect(spark.validate(syntaxSql).length).not.toBe(0);
 | 
			
		||||
        expect(spark.validate(syntaxSql).length).not.toBe(0);
 | 
			
		||||
        expect(spark.validate(syntaxSql).length).not.toBe(0);
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('Insert table ', () => {
 | 
			
		||||
@ -23,7 +23,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 1,
 | 
			
		||||
            column: 18,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -40,7 +40,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 3,
 | 
			
		||||
            column: 18,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -57,7 +57,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 5,
 | 
			
		||||
            column: 17,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -74,7 +74,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 7,
 | 
			
		||||
            column: 26,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -91,7 +91,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 9,
 | 
			
		||||
            column: 28,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -108,7 +108,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 11,
 | 
			
		||||
            column: 15,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -125,7 +125,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 13,
 | 
			
		||||
            column: 20,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -142,7 +142,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 15,
 | 
			
		||||
            column: 27,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -159,7 +159,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 17,
 | 
			
		||||
            column: 19,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -176,7 +176,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 19,
 | 
			
		||||
            column: 26,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -193,7 +193,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 21,
 | 
			
		||||
            column: 63,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -210,7 +210,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 23,
 | 
			
		||||
            column: 55,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -227,7 +227,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 25,
 | 
			
		||||
            column: 39,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -244,7 +244,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 27,
 | 
			
		||||
            column: 48,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -261,7 +261,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 29,
 | 
			
		||||
            column: 49,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -278,7 +278,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 31,
 | 
			
		||||
            column: 41,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -295,7 +295,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 33,
 | 
			
		||||
            column: 24,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -312,7 +312,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 35,
 | 
			
		||||
            column: 29,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -329,7 +329,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 37,
 | 
			
		||||
            column: 8,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -346,7 +346,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 39,
 | 
			
		||||
            column: 13,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -363,7 +363,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 41,
 | 
			
		||||
            column: 8,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -380,7 +380,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 43,
 | 
			
		||||
            column: 13,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -397,7 +397,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 45,
 | 
			
		||||
            column: 32,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -414,7 +414,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 47,
 | 
			
		||||
            column: 39,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -431,7 +431,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 49,
 | 
			
		||||
            column: 37,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -448,7 +448,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 51,
 | 
			
		||||
            column: 31,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -465,7 +465,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 53,
 | 
			
		||||
            column: 29,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -482,7 +482,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 55,
 | 
			
		||||
            column: 30,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -499,7 +499,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 57,
 | 
			
		||||
            column: 32,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -516,7 +516,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 59,
 | 
			
		||||
            column: 36,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -533,7 +533,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 61,
 | 
			
		||||
            column: 32,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -550,7 +550,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 63,
 | 
			
		||||
            column: 15,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -567,7 +567,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 65,
 | 
			
		||||
            column: 26,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
@ -584,7 +584,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            lineNumber: 67,
 | 
			
		||||
            column: 33,
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(syntaxSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
 | 
			
		||||
@ -1,20 +1,20 @@
 | 
			
		||||
import fs from 'fs';
 | 
			
		||||
import path from 'path';
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { CaretPosition } from 'src/parser/common/basic-parser-types';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { CaretPosition } from 'src/parser/common/types';
 | 
			
		||||
import { commentOtherLine } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
 | 
			
		||||
 | 
			
		||||
describe('Spark SQL Token Suggestion', () => {
 | 
			
		||||
    const parser = new SparkSQL();
 | 
			
		||||
    const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
    test('After ALTER', () => {
 | 
			
		||||
        const pos: CaretPosition = {
 | 
			
		||||
            lineNumber: 1,
 | 
			
		||||
            column: 7,
 | 
			
		||||
        };
 | 
			
		||||
        const suggestion = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const suggestion = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(tokenSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.keywords;
 | 
			
		||||
@ -35,7 +35,7 @@ describe('Spark SQL Token Suggestion', () => {
 | 
			
		||||
            lineNumber: 3,
 | 
			
		||||
            column: 8,
 | 
			
		||||
        };
 | 
			
		||||
        const suggestion = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const suggestion = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(tokenSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.keywords;
 | 
			
		||||
@ -62,7 +62,7 @@ describe('Spark SQL Token Suggestion', () => {
 | 
			
		||||
            lineNumber: 5,
 | 
			
		||||
            column: 8,
 | 
			
		||||
        };
 | 
			
		||||
        const suggestion = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const suggestion = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(tokenSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.keywords;
 | 
			
		||||
@ -75,7 +75,7 @@ describe('Spark SQL Token Suggestion', () => {
 | 
			
		||||
            lineNumber: 7,
 | 
			
		||||
            column: 10,
 | 
			
		||||
        };
 | 
			
		||||
        const suggestion = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const suggestion = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(tokenSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.keywords;
 | 
			
		||||
@ -101,7 +101,7 @@ describe('Spark SQL Token Suggestion', () => {
 | 
			
		||||
            lineNumber: 9,
 | 
			
		||||
            column: 6,
 | 
			
		||||
        };
 | 
			
		||||
        const suggestion = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const suggestion = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(tokenSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.keywords;
 | 
			
		||||
@ -125,7 +125,7 @@ describe('Spark SQL Token Suggestion', () => {
 | 
			
		||||
            lineNumber: 11,
 | 
			
		||||
            column: 8,
 | 
			
		||||
        };
 | 
			
		||||
        const suggestion = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const suggestion = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(tokenSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.keywords;
 | 
			
		||||
@ -138,7 +138,7 @@ describe('Spark SQL Token Suggestion', () => {
 | 
			
		||||
            lineNumber: 13,
 | 
			
		||||
            column: 6,
 | 
			
		||||
        };
 | 
			
		||||
        const suggestion = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const suggestion = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(tokenSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.keywords;
 | 
			
		||||
@ -151,7 +151,7 @@ describe('Spark SQL Token Suggestion', () => {
 | 
			
		||||
            lineNumber: 15,
 | 
			
		||||
            column: 6,
 | 
			
		||||
        };
 | 
			
		||||
        const suggestion = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const suggestion = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(tokenSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.keywords;
 | 
			
		||||
@ -190,7 +190,7 @@ describe('Spark SQL Token Suggestion', () => {
 | 
			
		||||
            lineNumber: 17,
 | 
			
		||||
            column: 8,
 | 
			
		||||
        };
 | 
			
		||||
        const suggestion = parser.getSuggestionAtCaretPosition(
 | 
			
		||||
        const suggestion = spark.getSuggestionAtCaretPosition(
 | 
			
		||||
            commentOtherLine(tokenSql, pos.lineNumber),
 | 
			
		||||
            pos
 | 
			
		||||
        )?.keywords;
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { readSQL } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const parser = new SparkSQL();
 | 
			
		||||
const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
const features = {
 | 
			
		||||
    add: readSQL(__dirname, 'add.sql'),
 | 
			
		||||
@ -10,7 +10,7 @@ const features = {
 | 
			
		||||
describe('Spark Add Syntax Tests', () => {
 | 
			
		||||
    features.add.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { readSQL } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const parser = new SparkSQL();
 | 
			
		||||
const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
const features = {
 | 
			
		||||
    alterDatabase: readSQL(__dirname, 'alterDatabase.sql'),
 | 
			
		||||
@ -14,7 +14,7 @@ describe('SparkSQL Alter Syntax Tests', () => {
 | 
			
		||||
    Object.keys(features).forEach((key) => {
 | 
			
		||||
        features[key].forEach((sql) => {
 | 
			
		||||
            it(sql, () => {
 | 
			
		||||
                expect(parser.validate(sql).length).toBe(0);
 | 
			
		||||
                expect(spark.validate(sql).length).toBe(0);
 | 
			
		||||
            });
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { readSQL } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const parser = new SparkSQL();
 | 
			
		||||
const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
const features = {
 | 
			
		||||
    analyzeTable: readSQL(__dirname, 'analyzeTable.sql'),
 | 
			
		||||
@ -10,7 +10,7 @@ const features = {
 | 
			
		||||
describe('Spark Analyze Table Syntax Tests', () => {
 | 
			
		||||
    features.analyzeTable.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { readSQL } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const parser = new SparkSQL();
 | 
			
		||||
const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
const features = {
 | 
			
		||||
    cache: readSQL(__dirname, 'cache.sql'),
 | 
			
		||||
@ -10,7 +10,7 @@ const features = {
 | 
			
		||||
describe('Spark Cache Syntax Tests', () => {
 | 
			
		||||
    features.cache.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { readSQL } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const parser = new SparkSQL();
 | 
			
		||||
const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
const features = {
 | 
			
		||||
    createDatabase: readSQL(__dirname, 'createDatabase.sql'),
 | 
			
		||||
@ -14,7 +14,7 @@ describe('SparkSQL Create Syntax Tests', () => {
 | 
			
		||||
    Object.keys(features).forEach((key) => {
 | 
			
		||||
        features[key].forEach((sql) => {
 | 
			
		||||
            it(sql, () => {
 | 
			
		||||
                expect(parser.validate(sql).length).toBe(0);
 | 
			
		||||
                expect(spark.validate(sql).length).toBe(0);
 | 
			
		||||
            });
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { readSQL } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const parser = new SparkSQL();
 | 
			
		||||
const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
const features = {
 | 
			
		||||
    describe: readSQL(__dirname, 'describe.sql'),
 | 
			
		||||
@ -10,7 +10,7 @@ const features = {
 | 
			
		||||
describe('Spark Describe Syntax Tests', () => {
 | 
			
		||||
    features.describe.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { readSQL } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const parser = new SparkSQL();
 | 
			
		||||
const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
const features = {
 | 
			
		||||
    dropDatabase: readSQL(__dirname, 'dropDatabase.sql'),
 | 
			
		||||
@ -15,7 +15,7 @@ describe('SparkSQL Drop Syntax Tests', () => {
 | 
			
		||||
    Object.keys(features).forEach((key) => {
 | 
			
		||||
        features[key].forEach((sql) => {
 | 
			
		||||
            it(sql, () => {
 | 
			
		||||
                expect(parser.validate(sql).length).toBe(0);
 | 
			
		||||
                expect(spark.validate(sql).length).toBe(0);
 | 
			
		||||
            });
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { readSQL } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const parser = new SparkSQL();
 | 
			
		||||
const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
const features = {
 | 
			
		||||
    insertIntoTable: readSQL(__dirname, 'insertIntoTable.sql'),
 | 
			
		||||
@ -14,7 +14,7 @@ describe('SparkSQL Insert Syntax Tests', () => {
 | 
			
		||||
    Object.keys(features).forEach((key) => {
 | 
			
		||||
        features[key].forEach((sql) => {
 | 
			
		||||
            it(sql, () => {
 | 
			
		||||
                expect(parser.validate(sql).length).toBe(0);
 | 
			
		||||
                expect(spark.validate(sql).length).toBe(0);
 | 
			
		||||
            });
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { readSQL } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const parser = new SparkSQL();
 | 
			
		||||
const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
/**
 | 
			
		||||
 * 关键词有多个值
 | 
			
		||||
@ -16,7 +16,7 @@ describe('SparkSQL Keyword Has Multiple Values Syntax Tests', () => {
 | 
			
		||||
    Object.keys(features).forEach((key) => {
 | 
			
		||||
        features[key].forEach((sql) => {
 | 
			
		||||
            it(sql, () => {
 | 
			
		||||
                expect(parser.validate(sql).length).toBe(0);
 | 
			
		||||
                expect(spark.validate(sql).length).toBe(0);
 | 
			
		||||
            });
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { readSQL } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const parser = new SparkSQL();
 | 
			
		||||
const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
const features = {
 | 
			
		||||
    list: readSQL(__dirname, 'list.sql'),
 | 
			
		||||
@ -10,7 +10,7 @@ const features = {
 | 
			
		||||
describe('Spark List Syntax Tests', () => {
 | 
			
		||||
    features.list.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { readSQL } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const parser = new SparkSQL();
 | 
			
		||||
const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
const features = {
 | 
			
		||||
    loadData: readSQL(__dirname, 'loadData.sql'),
 | 
			
		||||
@ -11,7 +11,7 @@ describe('SparkSQL Load Syntax Tests', () => {
 | 
			
		||||
    Object.keys(features).forEach((key) => {
 | 
			
		||||
        features[key].forEach((sql) => {
 | 
			
		||||
            it(sql, () => {
 | 
			
		||||
                expect(parser.validate(sql).length).toBe(0);
 | 
			
		||||
                expect(spark.validate(sql).length).toBe(0);
 | 
			
		||||
            });
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { readSQL } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const parser = new SparkSQL();
 | 
			
		||||
const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
const features = {
 | 
			
		||||
    set: readSQL(__dirname, 'optimize.sql'),
 | 
			
		||||
@ -10,7 +10,7 @@ const features = {
 | 
			
		||||
describe('Spark Optimize Syntax Tests', () => {
 | 
			
		||||
    features.set.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { readSQL } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const parser = new SparkSQL();
 | 
			
		||||
const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
const features = {
 | 
			
		||||
    refresh: readSQL(__dirname, 'refresh.sql'),
 | 
			
		||||
@ -10,7 +10,7 @@ const features = {
 | 
			
		||||
describe('Spark Refresh Syntax Tests', () => {
 | 
			
		||||
    features.refresh.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { readSQL } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const parser = new SparkSQL();
 | 
			
		||||
const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
const features = {
 | 
			
		||||
    reset: readSQL(__dirname, 'reset.sql'),
 | 
			
		||||
@ -10,7 +10,7 @@ const features = {
 | 
			
		||||
describe('Spark Reset Syntax Tests', () => {
 | 
			
		||||
    features.reset.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { readSQL } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const parser = new SparkSQL();
 | 
			
		||||
const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
const features = {
 | 
			
		||||
    selectAggregateFn: readSQL(__dirname, 'selectAggregateFn.sql'),
 | 
			
		||||
@ -34,132 +34,132 @@ const features = {
 | 
			
		||||
describe('Spark Select Syntax Tests', () => {
 | 
			
		||||
    features.selectAggregateFn.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectCase.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectCET.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectWindowFn.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectWhere.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectUnPivot.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectTVF.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectTransform.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectTableSample.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectSortBy.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectPivot.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectOrderBy.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectOffset.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectLimit.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectLike.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectLateralView.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectLateralSubQuery.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectJoin.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectInlineTable.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectHiving.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectHint.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectGroupBy.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectFile.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectExplain.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectDistributeBy.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
    features.selectClusterBy.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { readSQL } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const parser = new SparkSQL();
 | 
			
		||||
const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
const features = {
 | 
			
		||||
    set: readSQL(__dirname, 'set.sql'),
 | 
			
		||||
@ -10,7 +10,7 @@ const features = {
 | 
			
		||||
describe('Spark Set Syntax Tests', () => {
 | 
			
		||||
    features.set.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { readSQL } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const parser = new SparkSQL();
 | 
			
		||||
const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
const features = {
 | 
			
		||||
    show: readSQL(__dirname, 'show.sql'),
 | 
			
		||||
@ -10,7 +10,7 @@ const features = {
 | 
			
		||||
describe('Spark Show Syntax Tests', () => {
 | 
			
		||||
    features.show.forEach((itemSql) => {
 | 
			
		||||
        it(itemSql, () => {
 | 
			
		||||
            expect(parser.validate(itemSql).length).toBe(0);
 | 
			
		||||
            expect(spark.validate(itemSql).length).toBe(0);
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { readSQL } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const parser = new SparkSQL();
 | 
			
		||||
const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
const features = {
 | 
			
		||||
    createDataSourceTable: readSQL(__dirname, 'createDataSourceTable.sql'),
 | 
			
		||||
@ -16,7 +16,7 @@ describe('SparkSQL About Table Syntax Tests', () => {
 | 
			
		||||
    Object.keys(features).forEach((key) => {
 | 
			
		||||
        features[key].forEach((sql) => {
 | 
			
		||||
            it(sql, () => {
 | 
			
		||||
                expect(parser.validate(sql).length).toBe(0);
 | 
			
		||||
                expect(spark.validate(sql).length).toBe(0);
 | 
			
		||||
            });
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { readSQL } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const parser = new SparkSQL();
 | 
			
		||||
const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
const features = {
 | 
			
		||||
    useDatabase: readSQL(__dirname, 'useDatabase.sql'),
 | 
			
		||||
@ -11,7 +11,7 @@ describe('SparkSQL Use Database Syntax Tests', () => {
 | 
			
		||||
    Object.keys(features).forEach((key) => {
 | 
			
		||||
        features[key].forEach((sql) => {
 | 
			
		||||
            it(sql, () => {
 | 
			
		||||
                expect(parser.validate(sql).length).toBe(0);
 | 
			
		||||
                expect(spark.validate(sql).length).toBe(0);
 | 
			
		||||
            });
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
@ -1,16 +1,16 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
 | 
			
		||||
const randomText = `dhsdansdnkla ndjnsla ndnalks`;
 | 
			
		||||
const unCompleteSQL = `CREATE TABLE`;
 | 
			
		||||
 | 
			
		||||
describe('Spark SQL validate invalid sql', () => {
 | 
			
		||||
    const parser = new SparkSQL();
 | 
			
		||||
    const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
    test('validate random text', () => {
 | 
			
		||||
        expect(parser.validate(randomText).length).not.toBe(0);
 | 
			
		||||
        expect(spark.validate(randomText).length).not.toBe(0);
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('validate unComplete sql', () => {
 | 
			
		||||
        expect(parser.validate(unCompleteSQL).length).not.toBe(0);
 | 
			
		||||
        expect(spark.validate(unCompleteSQL).length).not.toBe(0);
 | 
			
		||||
    });
 | 
			
		||||
});
 | 
			
		||||
 | 
			
		||||
@ -1,13 +1,13 @@
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSQL } from 'src/parser/spark';
 | 
			
		||||
import { SparkSqlParserVisitor } from 'src/lib/spark/SparkSqlParserVisitor';
 | 
			
		||||
import { AbstractParseTreeVisitor } from 'antlr4ng';
 | 
			
		||||
 | 
			
		||||
describe('Spark SQL Visitor Tests', () => {
 | 
			
		||||
    const expectTableName = 'user1';
 | 
			
		||||
    const sql = `select id,name,sex from ${expectTableName};`;
 | 
			
		||||
    const parser = new SparkSQL();
 | 
			
		||||
    const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
    const parseTree = parser.parse(sql, (error) => {
 | 
			
		||||
    const parseTree = spark.parse(sql, (error) => {
 | 
			
		||||
        console.error('Parse error:', error);
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
 | 
			
		||||
		Reference in New Issue
	
	Block a user