feat: collect entity (#265)
* feat: add text and word utils * feat: add entity collector class * refactor: rename SyntaxContextType to EntityContextType * refactor: improve EntityCollector * feat: improve mysql parser grammar * feat: add mysql entity collector * test: mysql entity collector tests * feat: remove useless method * feat: improve spark grammar file * feat: add spark entity collector * test: spark entity collector unit tests * feat: remove useless code * feat: add queryStatement label * feat: add crateDatabaseStmt * feat: add trino entity collector * feat: rename trinosql to trino * test: trino collect entity unit tests * test: fix spark test * feat(impala): support impale entity collector (#256) * Feat/collect entity hive (#263) * feat(hive): support hive collect entity * feat(hive): update tableAllColumns * feat: replace antlr4ts with antlr4ng * feat(pgsql): pgsql collect entity (#268) * feat(pgsql): pgsql collect entity * feat(pgsql): optimize some name --------- Co-authored-by: zhaoge <> * feat: get word text by token.text * feat: supprt collect db/function and add splitListener (#270) * feat: supprt collect db/function and add splitListner * feat: remove SplitListener interface in baseParser to use SplitListener in root * fix(mysql): fix show create xxx not celloct as createXXXEntity type * test: fix pgsql unit tests * Feat/error recover predicate (#274) * feat: optimize pgsql grammar * feat: add sql parser base * feat: apply SQLParserBase * feat: add geAllEntities method * test: test collect table when missing column * feat: compose collect and suggestion (#276) * feat: mark stmt which contain caret * test: correct name of getAllEntities * test: remove misscolumn unit tests * test: add suggestionWithEntity tests * feat: flink collect entity (#277) * feat: improve flink sql parser * feat: support flink entity collector * test: flink entity collect unit test * feat: move combine entities to parent class --------- Co-authored-by: 霜序 <976060700@qq.com> Co-authored-by: XCynthia <942884029@qq.com>
This commit is contained in:
		
							
								
								
									
										362
									
								
								test/parser/spark/contextCollect/entityCollector.test.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										362
									
								
								test/parser/spark/contextCollect/entityCollector.test.ts
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,362 @@
 | 
			
		||||
import fs from 'fs';
 | 
			
		||||
import path from 'path';
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { SparkSqlSplitListener, SparkEntityCollector } from 'src/parser/spark';
 | 
			
		||||
import { ParseTreeListener } from 'antlr4ng';
 | 
			
		||||
import { SparkSqlParserListener } from 'src/lib/spark/SparkSqlParserListener';
 | 
			
		||||
import { EntityContextType } from 'src/parser/common/basic-parser-types';
 | 
			
		||||
import { StmtContextType } from 'src/parser/common/entityCollector';
 | 
			
		||||
 | 
			
		||||
const commonSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'common.sql'), 'utf-8');
 | 
			
		||||
 | 
			
		||||
describe('SparkSQL entity collector tests', () => {
 | 
			
		||||
    const spark = new SparkSQL();
 | 
			
		||||
    const parseTree = spark.parse(commonSql);
 | 
			
		||||
    const splitListener = new SparkSqlSplitListener();
 | 
			
		||||
    spark.listen(splitListener as SparkSqlParserListener, parseTree);
 | 
			
		||||
 | 
			
		||||
    test('validate common sql', () => {
 | 
			
		||||
        expect(spark.validate(commonSql).length).toBe(0);
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('split results', () => {
 | 
			
		||||
        expect(splitListener.statementsContext.length).toBe(12);
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('create table like', () => {
 | 
			
		||||
        const columnCreateTableContext = splitListener.statementsContext[0];
 | 
			
		||||
 | 
			
		||||
        const collectListener = new SparkEntityCollector(commonSql);
 | 
			
		||||
        spark.listen(collectListener as ParseTreeListener, columnCreateTableContext);
 | 
			
		||||
 | 
			
		||||
        const allEntities = collectListener.getEntities();
 | 
			
		||||
 | 
			
		||||
        expect(allEntities.length).toBe(2);
 | 
			
		||||
 | 
			
		||||
        const tableCreateEntity = allEntities[0];
 | 
			
		||||
 | 
			
		||||
        expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
 | 
			
		||||
        expect(tableCreateEntity.text).toBe('new_tb1');
 | 
			
		||||
        expect(tableCreateEntity.position).toEqual({
 | 
			
		||||
            startIndex: 27,
 | 
			
		||||
            endIndex: 33,
 | 
			
		||||
            line: 1,
 | 
			
		||||
            startColumn: 28,
 | 
			
		||||
            endColumn: 35,
 | 
			
		||||
        });
 | 
			
		||||
 | 
			
		||||
        expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
 | 
			
		||||
            StmtContextType.CREATE_TABLE_STMT
 | 
			
		||||
        );
 | 
			
		||||
        expect(tableCreateEntity.belongStmt.position).toEqual({
 | 
			
		||||
            startIndex: 0,
 | 
			
		||||
            endIndex: 50,
 | 
			
		||||
            startLine: 1,
 | 
			
		||||
            endLine: 1,
 | 
			
		||||
            startColumn: 1,
 | 
			
		||||
            endColumn: 52,
 | 
			
		||||
        });
 | 
			
		||||
 | 
			
		||||
        expect(tableCreateEntity.relatedEntities.length).toBe(1);
 | 
			
		||||
 | 
			
		||||
        const beLikedEntity = allEntities[1];
 | 
			
		||||
 | 
			
		||||
        expect(tableCreateEntity.relatedEntities[0]).toBe(beLikedEntity);
 | 
			
		||||
        expect(beLikedEntity.text).toBe('like_old_tb');
 | 
			
		||||
        expect(beLikedEntity.entityContextType).toBe(EntityContextType.TABLE);
 | 
			
		||||
        expect(beLikedEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('create hive format table', () => {
 | 
			
		||||
        const columnCreateTableContext = splitListener.statementsContext[1];
 | 
			
		||||
 | 
			
		||||
        const collectListener = new SparkEntityCollector(commonSql);
 | 
			
		||||
        spark.listen(collectListener as ParseTreeListener, columnCreateTableContext);
 | 
			
		||||
 | 
			
		||||
        const allEntities = collectListener.getEntities();
 | 
			
		||||
 | 
			
		||||
        expect(allEntities.length).toBe(1);
 | 
			
		||||
 | 
			
		||||
        const tableCreateEntity = allEntities[0];
 | 
			
		||||
 | 
			
		||||
        expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
 | 
			
		||||
        expect(tableCreateEntity.text).toBe('new_tb2');
 | 
			
		||||
        expect(tableCreateEntity.position).toEqual({
 | 
			
		||||
            startIndex: 67,
 | 
			
		||||
            endIndex: 73,
 | 
			
		||||
            line: 3,
 | 
			
		||||
            startColumn: 14,
 | 
			
		||||
            endColumn: 21,
 | 
			
		||||
        });
 | 
			
		||||
 | 
			
		||||
        expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
 | 
			
		||||
            StmtContextType.CREATE_TABLE_STMT
 | 
			
		||||
        );
 | 
			
		||||
        expect(tableCreateEntity.belongStmt.position).toEqual({
 | 
			
		||||
            startIndex: 54,
 | 
			
		||||
            endIndex: 242,
 | 
			
		||||
            startLine: 3,
 | 
			
		||||
            endLine: 8,
 | 
			
		||||
            startColumn: 1,
 | 
			
		||||
            endColumn: 22,
 | 
			
		||||
        });
 | 
			
		||||
 | 
			
		||||
        expect(tableCreateEntity.relatedEntities).toBeNull();
 | 
			
		||||
        expect(tableCreateEntity.columns.length).toBe(2);
 | 
			
		||||
 | 
			
		||||
        tableCreateEntity.columns.forEach((columEntity) => {
 | 
			
		||||
            expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
 | 
			
		||||
            expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
 | 
			
		||||
            expect(columEntity.text).toBe(
 | 
			
		||||
                commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
 | 
			
		||||
            );
 | 
			
		||||
        });
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('create data source table', () => {
 | 
			
		||||
        const testingContext = splitListener.statementsContext[2];
 | 
			
		||||
 | 
			
		||||
        const collectListener = new SparkEntityCollector(commonSql);
 | 
			
		||||
        spark.listen(collectListener as ParseTreeListener, testingContext);
 | 
			
		||||
 | 
			
		||||
        const allEntities = collectListener.getEntities();
 | 
			
		||||
 | 
			
		||||
        expect(allEntities.length).toBe(2);
 | 
			
		||||
 | 
			
		||||
        const tableCreateEntity = allEntities[0];
 | 
			
		||||
        const originTableEntity = allEntities[1];
 | 
			
		||||
 | 
			
		||||
        expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
 | 
			
		||||
        expect(tableCreateEntity.text).toBe('student_copy');
 | 
			
		||||
        expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
 | 
			
		||||
            StmtContextType.CREATE_TABLE_STMT
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(tableCreateEntity.columns).toBeNull();
 | 
			
		||||
        expect(tableCreateEntity.relatedEntities.length).toBe(1);
 | 
			
		||||
        expect(tableCreateEntity.relatedEntities[0]).toBe(originTableEntity);
 | 
			
		||||
 | 
			
		||||
        expect(originTableEntity.entityContextType).toBe(EntityContextType.TABLE);
 | 
			
		||||
        expect(originTableEntity.text).toBe('student');
 | 
			
		||||
        expect(originTableEntity.belongStmt.rootStmt).toBe(tableCreateEntity.belongStmt);
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('create view', () => {
 | 
			
		||||
        const testingContext = splitListener.statementsContext[3];
 | 
			
		||||
 | 
			
		||||
        const collectListener = new SparkEntityCollector(commonSql);
 | 
			
		||||
        spark.listen(collectListener as ParseTreeListener, testingContext);
 | 
			
		||||
 | 
			
		||||
        const allEntities = collectListener.getEntities();
 | 
			
		||||
 | 
			
		||||
        expect(allEntities.length).toBe(2);
 | 
			
		||||
 | 
			
		||||
        const viewEntity = allEntities[0];
 | 
			
		||||
        const tableEntity = allEntities[1];
 | 
			
		||||
 | 
			
		||||
        expect(viewEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
 | 
			
		||||
        expect(viewEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
 | 
			
		||||
        expect(viewEntity.text).toBe('new_view1');
 | 
			
		||||
        expect(viewEntity.columns.length).toBe(2);
 | 
			
		||||
        viewEntity.columns.forEach((columEntity) => {
 | 
			
		||||
            expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
 | 
			
		||||
            expect(columEntity.belongStmt).toBe(viewEntity.belongStmt);
 | 
			
		||||
            expect(columEntity.text).toBe(
 | 
			
		||||
                commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
 | 
			
		||||
            );
 | 
			
		||||
        });
 | 
			
		||||
 | 
			
		||||
        expect(tableEntity.entityContextType).toBe(EntityContextType.TABLE);
 | 
			
		||||
        expect(tableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
 | 
			
		||||
        expect(tableEntity.belongStmt.rootStmt).toBe(viewEntity.belongStmt);
 | 
			
		||||
        expect(tableEntity.text).toBe('old_tb_1');
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('select from table', () => {
 | 
			
		||||
        const testingContext = splitListener.statementsContext[4];
 | 
			
		||||
 | 
			
		||||
        const collectListener = new SparkEntityCollector(commonSql);
 | 
			
		||||
        spark.listen(collectListener as ParseTreeListener, testingContext);
 | 
			
		||||
 | 
			
		||||
        const allEntities = collectListener.getEntities();
 | 
			
		||||
 | 
			
		||||
        expect(allEntities.length).toBe(2);
 | 
			
		||||
 | 
			
		||||
        const tableEntity1 = allEntities[0];
 | 
			
		||||
        const tableEntity2 = allEntities[1];
 | 
			
		||||
 | 
			
		||||
        expect(tableEntity1.entityContextType).toBe(EntityContextType.TABLE);
 | 
			
		||||
        expect(tableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
 | 
			
		||||
        expect(tableEntity1.text).toBe('employee');
 | 
			
		||||
 | 
			
		||||
        expect(tableEntity2.entityContextType).toBe(EntityContextType.TABLE);
 | 
			
		||||
        expect(tableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
 | 
			
		||||
        expect(tableEntity2.text).toBe('department');
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('insert into table values', () => {
 | 
			
		||||
        const testingContext = splitListener.statementsContext[5];
 | 
			
		||||
 | 
			
		||||
        const collectListener = new SparkEntityCollector(commonSql);
 | 
			
		||||
        spark.listen(collectListener as ParseTreeListener, testingContext);
 | 
			
		||||
 | 
			
		||||
        const allEntities = collectListener.getEntities();
 | 
			
		||||
 | 
			
		||||
        expect(allEntities.length).toBe(1);
 | 
			
		||||
 | 
			
		||||
        const tableEntity = allEntities[0];
 | 
			
		||||
 | 
			
		||||
        expect(tableEntity.entityContextType).toBe(EntityContextType.TABLE);
 | 
			
		||||
        expect(tableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
 | 
			
		||||
        expect(tableEntity.text).toBe('insert_tb');
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('insert overwrite table', () => {
 | 
			
		||||
        const testingContext = splitListener.statementsContext[6];
 | 
			
		||||
 | 
			
		||||
        const collectListener = new SparkEntityCollector(commonSql);
 | 
			
		||||
        spark.listen(collectListener as ParseTreeListener, testingContext);
 | 
			
		||||
 | 
			
		||||
        const allEntities = collectListener.getEntities();
 | 
			
		||||
 | 
			
		||||
        expect(allEntities.length).toBe(2);
 | 
			
		||||
 | 
			
		||||
        const targetTableEntity = allEntities[0];
 | 
			
		||||
        const sourceTableEntity = allEntities[1];
 | 
			
		||||
 | 
			
		||||
        expect(targetTableEntity.entityContextType).toBe(EntityContextType.TABLE);
 | 
			
		||||
        expect(targetTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
 | 
			
		||||
        expect(targetTableEntity.text).toBe('target_tb');
 | 
			
		||||
 | 
			
		||||
        expect(sourceTableEntity.entityContextType).toBe(EntityContextType.TABLE);
 | 
			
		||||
        expect(sourceTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
 | 
			
		||||
        expect(sourceTableEntity.belongStmt.rootStmt).toBe(targetTableEntity.belongStmt);
 | 
			
		||||
        expect(sourceTableEntity.text).toBe('source_tb');
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('insert overwrite dir', () => {
 | 
			
		||||
        const testingContext = splitListener.statementsContext[7];
 | 
			
		||||
 | 
			
		||||
        const collectListener = new SparkEntityCollector(commonSql);
 | 
			
		||||
        spark.listen(collectListener as ParseTreeListener, testingContext);
 | 
			
		||||
 | 
			
		||||
        const allEntities = collectListener.getEntities();
 | 
			
		||||
        expect(allEntities.length).toBe(1);
 | 
			
		||||
 | 
			
		||||
        const sourceTableEntity = allEntities[0];
 | 
			
		||||
 | 
			
		||||
        expect(sourceTableEntity.entityContextType).toBe(EntityContextType.TABLE);
 | 
			
		||||
        expect(sourceTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
 | 
			
		||||
        expect(sourceTableEntity.text).toBe('from_tb');
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('create database', () => {
 | 
			
		||||
        const testingContext = splitListener.statementsContext[8];
 | 
			
		||||
 | 
			
		||||
        const collectListener = new SparkEntityCollector(commonSql);
 | 
			
		||||
        spark.listen(collectListener as ParseTreeListener, testingContext);
 | 
			
		||||
 | 
			
		||||
        const allEntities = collectListener.getEntities();
 | 
			
		||||
        expect(allEntities.length).toBe(1);
 | 
			
		||||
 | 
			
		||||
        const sourceTableEntity = allEntities[0];
 | 
			
		||||
 | 
			
		||||
        expect(sourceTableEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
 | 
			
		||||
        expect(sourceTableEntity.belongStmt.stmtContextType).toBe(
 | 
			
		||||
            StmtContextType.CREATE_DATABASE_STMT
 | 
			
		||||
        );
 | 
			
		||||
        expect(sourceTableEntity.text).toBe('customer_db');
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('use namespace', () => {
 | 
			
		||||
        const testingContext = splitListener.statementsContext[9];
 | 
			
		||||
 | 
			
		||||
        const collectListener = new SparkEntityCollector(commonSql);
 | 
			
		||||
        spark.listen(collectListener as ParseTreeListener, testingContext);
 | 
			
		||||
 | 
			
		||||
        const allEntities = collectListener.getEntities();
 | 
			
		||||
        expect(allEntities.length).toBe(1);
 | 
			
		||||
 | 
			
		||||
        const sourceTableEntity = allEntities[0];
 | 
			
		||||
 | 
			
		||||
        expect(sourceTableEntity.entityContextType).toBe(EntityContextType.DATABASE);
 | 
			
		||||
        expect(sourceTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.COMMON_STMT);
 | 
			
		||||
        expect(sourceTableEntity.text).toBe('ns1');
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('create function', () => {
 | 
			
		||||
        const functionContext = splitListener.statementsContext[10];
 | 
			
		||||
 | 
			
		||||
        const collectListener = new SparkEntityCollector(commonSql);
 | 
			
		||||
        spark.listen(collectListener as ParseTreeListener, functionContext);
 | 
			
		||||
 | 
			
		||||
        const allEntities = collectListener.getEntities();
 | 
			
		||||
 | 
			
		||||
        expect(allEntities.length).toBe(1);
 | 
			
		||||
 | 
			
		||||
        const functionEntity = allEntities[0];
 | 
			
		||||
 | 
			
		||||
        expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
 | 
			
		||||
        expect(functionEntity.text).toBe('simple_udf');
 | 
			
		||||
        expect(functionEntity.position).toEqual({
 | 
			
		||||
            endColumn: 38,
 | 
			
		||||
            endIndex: 905,
 | 
			
		||||
            line: 28,
 | 
			
		||||
            startColumn: 28,
 | 
			
		||||
            startIndex: 896,
 | 
			
		||||
        });
 | 
			
		||||
 | 
			
		||||
        expect(functionEntity.belongStmt.stmtContextType).toBe(
 | 
			
		||||
            StmtContextType.CREATE_FUNCTION_STMT
 | 
			
		||||
        );
 | 
			
		||||
        expect(functionEntity.belongStmt.position).toEqual({
 | 
			
		||||
            endColumn: 54,
 | 
			
		||||
            endIndex: 921,
 | 
			
		||||
            endLine: 28,
 | 
			
		||||
            startColumn: 1,
 | 
			
		||||
            startIndex: 869,
 | 
			
		||||
            startLine: 28,
 | 
			
		||||
        });
 | 
			
		||||
 | 
			
		||||
        expect(functionEntity.columns).toBeNull();
 | 
			
		||||
        expect(functionEntity.relatedEntities).toBeNull();
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('create xxx function', () => {
 | 
			
		||||
        const functionContext = splitListener.statementsContext[11];
 | 
			
		||||
 | 
			
		||||
        const collectListener = new SparkEntityCollector(commonSql);
 | 
			
		||||
        spark.listen(collectListener as ParseTreeListener, functionContext);
 | 
			
		||||
 | 
			
		||||
        const allEntities = collectListener.getEntities();
 | 
			
		||||
 | 
			
		||||
        expect(allEntities.length).toBe(1);
 | 
			
		||||
 | 
			
		||||
        const functionEntity = allEntities[0];
 | 
			
		||||
 | 
			
		||||
        expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
 | 
			
		||||
        expect(functionEntity.text).toBe('simple_udf');
 | 
			
		||||
        expect(functionEntity.position).toEqual({
 | 
			
		||||
            endColumn: 27,
 | 
			
		||||
            endIndex: 950,
 | 
			
		||||
            line: 30,
 | 
			
		||||
            startColumn: 17,
 | 
			
		||||
            startIndex: 941,
 | 
			
		||||
        });
 | 
			
		||||
 | 
			
		||||
        expect(functionEntity.belongStmt.stmtContextType).toBe(
 | 
			
		||||
            StmtContextType.CREATE_FUNCTION_STMT
 | 
			
		||||
        );
 | 
			
		||||
        expect(functionEntity.belongStmt.position).toEqual({
 | 
			
		||||
            endColumn: 43,
 | 
			
		||||
            endIndex: 966,
 | 
			
		||||
            endLine: 30,
 | 
			
		||||
            startColumn: 1,
 | 
			
		||||
            startIndex: 925,
 | 
			
		||||
            startLine: 30,
 | 
			
		||||
        });
 | 
			
		||||
 | 
			
		||||
        expect(functionEntity.columns).toBeNull();
 | 
			
		||||
        expect(functionEntity.relatedEntities).toBeNull();
 | 
			
		||||
    });
 | 
			
		||||
});
 | 
			
		||||
							
								
								
									
										30
									
								
								test/parser/spark/contextCollect/fixtures/common.sql
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										30
									
								
								test/parser/spark/contextCollect/fixtures/common.sql
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,30 @@
 | 
			
		||||
CREATE TABLE IF NOT EXISTS new_tb1 like like_old_tb;
 | 
			
		||||
 | 
			
		||||
CREATE TABLE new_tb2 (new_col1 INT, new_col2 STRING)
 | 
			
		||||
    PARTITIONED BY (YEAR STRING)
 | 
			
		||||
    CLUSTERED BY (new_col1, NAME)
 | 
			
		||||
    SORTED BY (new_col1 ASC)
 | 
			
		||||
    INTO 3 BUCKETS
 | 
			
		||||
    STORED AS PARQUET;
 | 
			
		||||
 | 
			
		||||
CREATE TABLE student_copy USING CSV AS SELECT * FROM student;
 | 
			
		||||
 | 
			
		||||
CREATE VIEW new_view1 (ID COMMENT 'Unique identification number', Name) 
 | 
			
		||||
COMMENT 'View for experienced employees' 
 | 
			
		||||
AS SELECT id, name FROM old_tb_1 WHERE working_years > 5;
 | 
			
		||||
 | 
			
		||||
SELECT id, name, employee.deptno, deptname FROM employee CROSS JOIN department;
 | 
			
		||||
 | 
			
		||||
INSERT INTO insert_tb (address, name, student_id) VALUES ('Hangzhou, China', 'Kent Yao', 11215016);
 | 
			
		||||
 | 
			
		||||
INSERT OVERWRITE target_tb TABLE source_tb;
 | 
			
		||||
 | 
			
		||||
INSERT OVERWRITE DIRECTORY '/path/to/output/directory' SELECT * FROM from_tb WHERE condition;
 | 
			
		||||
 | 
			
		||||
CREATE DATABASE IF NOT EXISTS customer_db;
 | 
			
		||||
 | 
			
		||||
USE NAMESPACE ns1;
 | 
			
		||||
 | 
			
		||||
CREATE OR REPLACE FUNCTION simple_udf AS 'SimpleUdfR';
 | 
			
		||||
 | 
			
		||||
CREATE FUNCTION simple_udf AS 'SimpleUdfR';
 | 
			
		||||
@ -0,0 +1,11 @@
 | 
			
		||||
SELECT  FROM my_db.tb;
 | 
			
		||||
 | 
			
		||||
SELECT name, calculate_age(birthdate) AS age,  FROM students;
 | 
			
		||||
 | 
			
		||||
INSERT INTO insert_tb SELECT  FROM from_tb;
 | 
			
		||||
 | 
			
		||||
INSERT INTO insert_tb SELECT id, age,  FROM from_tb;
 | 
			
		||||
 | 
			
		||||
CREATE TABLE sorted_census_data AS SELECT  FROM unsorted_census_data;
 | 
			
		||||
 | 
			
		||||
CREATE TABLE sorted_census_data AS SELECT id, age,  FROM unsorted_census_data;
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import fs from 'fs';
 | 
			
		||||
import path from 'path';
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
 | 
			
		||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
 | 
			
		||||
 | 
			
		||||
const syntaxSql = fs.readFileSync(
 | 
			
		||||
    path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
 | 
			
		||||
@ -18,7 +18,7 @@ describe('SparkSQL Multiple Statements Syntax Suggestion', () => {
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -32,7 +32,7 @@ describe('SparkSQL Multiple Statements Syntax Suggestion', () => {
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.TABLE
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.TABLE
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -46,7 +46,7 @@ describe('SparkSQL Multiple Statements Syntax Suggestion', () => {
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.TABLE
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.TABLE
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -60,7 +60,7 @@ describe('SparkSQL Multiple Statements Syntax Suggestion', () => {
 | 
			
		||||
        };
 | 
			
		||||
        const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.TABLE
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.TABLE
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
 | 
			
		||||
							
								
								
									
										156
									
								
								test/parser/spark/suggestion/suggestionWithEntity.test.ts
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										156
									
								
								test/parser/spark/suggestion/suggestionWithEntity.test.ts
									
									
									
									
									
										Normal file
									
								
							@ -0,0 +1,156 @@
 | 
			
		||||
import fs from 'fs';
 | 
			
		||||
import path from 'path';
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
 | 
			
		||||
import { commentOtherLine } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const syntaxSql = fs.readFileSync(
 | 
			
		||||
    path.join(__dirname, 'fixtures', 'suggestionWithEntity.sql'),
 | 
			
		||||
    'utf-8'
 | 
			
		||||
);
 | 
			
		||||
 | 
			
		||||
describe('PostgreSQL Syntax Suggestion with collect entity', () => {
 | 
			
		||||
    const spark = new SparkSQL();
 | 
			
		||||
 | 
			
		||||
    test('select with no column', () => {
 | 
			
		||||
        const pos: CaretPosition = {
 | 
			
		||||
            lineNumber: 1,
 | 
			
		||||
            column: 8,
 | 
			
		||||
        };
 | 
			
		||||
        const sql = commentOtherLine(syntaxSql, pos.lineNumber);
 | 
			
		||||
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(sql, pos)?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
        expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
 | 
			
		||||
 | 
			
		||||
        const entities = spark.getAllEntities(sql, pos);
 | 
			
		||||
        expect(entities.length).toBe(1);
 | 
			
		||||
        expect(entities[0].text).toBe('my_db.tb');
 | 
			
		||||
        expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
 | 
			
		||||
        expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('select with columns with trailing comma', () => {
 | 
			
		||||
        const pos: CaretPosition = {
 | 
			
		||||
            lineNumber: 3,
 | 
			
		||||
            column: 47,
 | 
			
		||||
        };
 | 
			
		||||
        const sql = commentOtherLine(syntaxSql, pos.lineNumber);
 | 
			
		||||
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(sql, pos)?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
        expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
 | 
			
		||||
 | 
			
		||||
        const entities = spark.getAllEntities(sql, pos);
 | 
			
		||||
        expect(entities.length).toBe(1);
 | 
			
		||||
        expect(entities[0].text).toBe('students');
 | 
			
		||||
        expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
 | 
			
		||||
        expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('insert into table as select with no column', () => {
 | 
			
		||||
        const pos: CaretPosition = {
 | 
			
		||||
            lineNumber: 5,
 | 
			
		||||
            column: 30,
 | 
			
		||||
        };
 | 
			
		||||
        const sql = commentOtherLine(syntaxSql, pos.lineNumber);
 | 
			
		||||
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(sql, pos)?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
        expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
 | 
			
		||||
 | 
			
		||||
        const entities = spark.getAllEntities(sql, pos);
 | 
			
		||||
        expect(entities.length).toBe(2);
 | 
			
		||||
        expect(entities[0].text).toBe('insert_tb');
 | 
			
		||||
        expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
 | 
			
		||||
        expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
 | 
			
		||||
 | 
			
		||||
        expect(entities[1].text).toBe('from_tb');
 | 
			
		||||
        expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
 | 
			
		||||
        expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('insert into table as select with trailing comma', () => {
 | 
			
		||||
        const pos: CaretPosition = {
 | 
			
		||||
            lineNumber: 7,
 | 
			
		||||
            column: 39,
 | 
			
		||||
        };
 | 
			
		||||
        const sql = commentOtherLine(syntaxSql, pos.lineNumber);
 | 
			
		||||
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(sql, pos)?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
        expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
 | 
			
		||||
 | 
			
		||||
        const entities = spark.getAllEntities(sql, pos);
 | 
			
		||||
        expect(entities.length).toBe(2);
 | 
			
		||||
        expect(entities[0].text).toBe('insert_tb');
 | 
			
		||||
        expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
 | 
			
		||||
        expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
 | 
			
		||||
 | 
			
		||||
        expect(entities[1].text).toBe('from_tb');
 | 
			
		||||
        expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
 | 
			
		||||
        expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('create table as select with no column', () => {
 | 
			
		||||
        const pos: CaretPosition = {
 | 
			
		||||
            lineNumber: 9,
 | 
			
		||||
            column: 43,
 | 
			
		||||
        };
 | 
			
		||||
        const sql = commentOtherLine(syntaxSql, pos.lineNumber);
 | 
			
		||||
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(sql, pos)?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
        expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
 | 
			
		||||
 | 
			
		||||
        const entities = spark.getAllEntities(sql, pos);
 | 
			
		||||
        expect(entities.length).toBe(2);
 | 
			
		||||
        expect(entities[0].text).toBe('sorted_census_data');
 | 
			
		||||
        expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
 | 
			
		||||
        expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
 | 
			
		||||
 | 
			
		||||
        expect(entities[1].text).toBe('unsorted_census_data');
 | 
			
		||||
        expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
 | 
			
		||||
        expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
 | 
			
		||||
    });
 | 
			
		||||
 | 
			
		||||
    test('create table as select with trailing comma', () => {
 | 
			
		||||
        const pos: CaretPosition = {
 | 
			
		||||
            lineNumber: 11,
 | 
			
		||||
            column: 52,
 | 
			
		||||
        };
 | 
			
		||||
        const sql = commentOtherLine(syntaxSql, pos.lineNumber);
 | 
			
		||||
 | 
			
		||||
        const syntaxes = spark.getSuggestionAtCaretPosition(sql, pos)?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
        expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
 | 
			
		||||
 | 
			
		||||
        const entities = spark.getAllEntities(sql, pos);
 | 
			
		||||
        expect(entities.length).toBe(2);
 | 
			
		||||
        expect(entities[0].text).toBe('sorted_census_data');
 | 
			
		||||
        expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
 | 
			
		||||
        expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
 | 
			
		||||
 | 
			
		||||
        expect(entities[1].text).toBe('unsorted_census_data');
 | 
			
		||||
        expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
 | 
			
		||||
        expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
 | 
			
		||||
    });
 | 
			
		||||
});
 | 
			
		||||
@ -1,7 +1,7 @@
 | 
			
		||||
import fs from 'fs';
 | 
			
		||||
import path from 'path';
 | 
			
		||||
import SparkSQL from 'src/parser/spark';
 | 
			
		||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
 | 
			
		||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
 | 
			
		||||
import { commentOtherLine } from 'test/helper';
 | 
			
		||||
 | 
			
		||||
const syntaxSql = fs.readFileSync(
 | 
			
		||||
@ -28,7 +28,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.TABLE
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.TABLE
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -45,7 +45,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.TABLE
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.TABLE
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -62,7 +62,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -79,7 +79,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.TABLE
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.TABLE
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -96,7 +96,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.VIEW_CREATE
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -113,7 +113,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.VIEW
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.VIEW
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -130,7 +130,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.FUNCTION_CREATE
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -147,7 +147,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.FUNCTION
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -164,7 +164,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.DATABASE_CREATE
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -181,7 +181,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.DATABASE
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -198,7 +198,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -215,7 +215,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -232,7 +232,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -249,7 +249,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -266,7 +266,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -283,7 +283,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -300,7 +300,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -317,7 +317,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -334,7 +334,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -351,7 +351,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -368,7 +368,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -385,7 +385,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -402,7 +402,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -419,7 +419,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -436,7 +436,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -453,7 +453,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.VIEW_CREATE
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -470,7 +470,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.VIEW
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.VIEW
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -487,7 +487,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.VIEW
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.VIEW
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -504,7 +504,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.VIEW
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.VIEW
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -521,7 +521,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.VIEW
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.VIEW
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -538,7 +538,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.DATABASE
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -555,7 +555,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.TABLE
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.TABLE
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -572,7 +572,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
@ -589,7 +589,7 @@ describe('Spark SQL Syntax Suggestion', () => {
 | 
			
		||||
            pos
 | 
			
		||||
        )?.syntax;
 | 
			
		||||
        const suggestion = syntaxes?.find(
 | 
			
		||||
            (syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
 | 
			
		||||
            (syn) => syn.syntaxContextType === EntityContextType.COLUMN
 | 
			
		||||
        );
 | 
			
		||||
 | 
			
		||||
        expect(suggestion).not.toBeUndefined();
 | 
			
		||||
 | 
			
		||||
		Reference in New Issue
	
	Block a user