feat: collect entity (#265)
* feat: add text and word utils * feat: add entity collector class * refactor: rename SyntaxContextType to EntityContextType * refactor: improve EntityCollector * feat: improve mysql parser grammar * feat: add mysql entity collector * test: mysql entity collector tests * feat: remove useless method * feat: improve spark grammar file * feat: add spark entity collector * test: spark entity collector unit tests * feat: remove useless code * feat: add queryStatement label * feat: add crateDatabaseStmt * feat: add trino entity collector * feat: rename trinosql to trino * test: trino collect entity unit tests * test: fix spark test * feat(impala): support impale entity collector (#256) * Feat/collect entity hive (#263) * feat(hive): support hive collect entity * feat(hive): update tableAllColumns * feat: replace antlr4ts with antlr4ng * feat(pgsql): pgsql collect entity (#268) * feat(pgsql): pgsql collect entity * feat(pgsql): optimize some name --------- Co-authored-by: zhaoge <> * feat: get word text by token.text * feat: supprt collect db/function and add splitListener (#270) * feat: supprt collect db/function and add splitListner * feat: remove SplitListener interface in baseParser to use SplitListener in root * fix(mysql): fix show create xxx not celloct as createXXXEntity type * test: fix pgsql unit tests * Feat/error recover predicate (#274) * feat: optimize pgsql grammar * feat: add sql parser base * feat: apply SQLParserBase * feat: add geAllEntities method * test: test collect table when missing column * feat: compose collect and suggestion (#276) * feat: mark stmt which contain caret * test: correct name of getAllEntities * test: remove misscolumn unit tests * test: add suggestionWithEntity tests * feat: flink collect entity (#277) * feat: improve flink sql parser * feat: support flink entity collector * test: flink entity collect unit test * feat: move combine entities to parent class --------- Co-authored-by: 霜序 <976060700@qq.com> Co-authored-by: XCynthia <942884029@qq.com>
This commit is contained in:
395
test/parser/flinksql/contextCollect/entityCollector.test.ts
Normal file
395
test/parser/flinksql/contextCollect/entityCollector.test.ts
Normal file
@ -0,0 +1,395 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
import FlinkSQL, { FlinkEntityCollector, FlinkSqlSplitListener } from 'src/parser/flinksql';
|
||||
import { FlinkSqlParserListener } from 'src/lib/flinksql/FlinkSqlParserListener';
|
||||
import { EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { StmtContextType } from 'src/parser/common/entityCollector';
|
||||
|
||||
const commonSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'common.sql'), 'utf-8');
|
||||
|
||||
describe('Flink entity collector tests', () => {
|
||||
const flink = new FlinkSQL();
|
||||
const parseTree = flink.parse(commonSql);
|
||||
const splitListener = new FlinkSqlSplitListener();
|
||||
flink.listen(splitListener as FlinkSqlParserListener, parseTree);
|
||||
|
||||
test('validate common sql', () => {
|
||||
expect(flink.validate(commonSql).length).toBe(0);
|
||||
});
|
||||
|
||||
test('split results', () => {
|
||||
expect(splitListener.statementsContext.length).toBe(12);
|
||||
});
|
||||
|
||||
test('create table by columns', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[0];
|
||||
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('MyTable');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 13,
|
||||
endIndex: 19,
|
||||
line: 1,
|
||||
startColumn: 14,
|
||||
endColumn: 21,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 0,
|
||||
endIndex: 85,
|
||||
startLine: 1,
|
||||
endLine: 1,
|
||||
startColumn: 1,
|
||||
endColumn: 87,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.relatedEntities).toBeNull();
|
||||
|
||||
expect(tableCreateEntity.columns.length).toBe(2);
|
||||
tableCreateEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('create table as select', () => {
|
||||
const createTableBySelectContext = splitListener.statementsContext[1];
|
||||
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, createTableBySelectContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('my_ctas_table');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 102,
|
||||
endIndex: 114,
|
||||
line: 3,
|
||||
startColumn: 14,
|
||||
endColumn: 27,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 89,
|
||||
endIndex: 228,
|
||||
startLine: 3,
|
||||
endLine: 11,
|
||||
startColumn: 1,
|
||||
endColumn: 20,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
tableCreateEntity.relatedEntities.forEach((relatedEntity) => {
|
||||
expect(relatedEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(allEntities.some((en) => relatedEntity === en)).toBeTruthy();
|
||||
});
|
||||
|
||||
expect(allEntities[1].text).toBe('source_table');
|
||||
expect(allEntities[1].belongStmt.rootStmt).toBe(allEntities[0].belongStmt);
|
||||
expect(allEntities[1].position).toEqual({
|
||||
startIndex: 191,
|
||||
endIndex: 202,
|
||||
line: 9,
|
||||
startColumn: 5,
|
||||
endColumn: 17,
|
||||
});
|
||||
});
|
||||
|
||||
test('create table like', () => {
|
||||
const createTableLikeContext = splitListener.statementsContext[2];
|
||||
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, createTableLikeContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
const originTableEntity = allEntities[1];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('Orders_with_watermark');
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
|
||||
expect(tableCreateEntity.columns.length).toBe(1);
|
||||
expect(tableCreateEntity.columns[0].text).toBe('id');
|
||||
expect(tableCreateEntity.columns[0].entityContextType).toBe(
|
||||
EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
expect(tableCreateEntity.relatedEntities[0]).toBe(originTableEntity);
|
||||
|
||||
expect(originTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(originTableEntity.text).toBe('Orders_in_file');
|
||||
expect(originTableEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
});
|
||||
|
||||
test('common select from table', () => {
|
||||
const selectTableContext = splitListener.statementsContext[3];
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, selectTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableEntity = allEntities[0];
|
||||
|
||||
expect(tableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity.text).toBe('Orders');
|
||||
expect(tableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
|
||||
expect(tableEntity.columns).toBeNull();
|
||||
expect(tableEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('select from table join', () => {
|
||||
const selectTableContext = splitListener.statementsContext[4];
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, selectTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableEntity1 = allEntities[0];
|
||||
const tableEntity2 = allEntities[1];
|
||||
|
||||
expect(tableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity1.text).toBe('Orders');
|
||||
expect(tableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
|
||||
expect(tableEntity1.columns).toBeNull();
|
||||
expect(tableEntity1.relatedEntities).toBeNull();
|
||||
|
||||
expect(tableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity2.text).toBe('Product');
|
||||
expect(tableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
|
||||
expect(tableEntity2.columns).toBeNull();
|
||||
expect(tableEntity2.relatedEntities).toBeNull();
|
||||
|
||||
expect(tableEntity1.belongStmt).toBe(tableEntity2.belongStmt);
|
||||
});
|
||||
|
||||
test('union select', () => {
|
||||
const selectTableContext = splitListener.statementsContext[5];
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, selectTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableEntity1 = allEntities[0];
|
||||
const tableEntity2 = allEntities[1];
|
||||
|
||||
expect(tableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity1.text).toBe('t1');
|
||||
expect(tableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
|
||||
expect(tableEntity1.columns).toBeNull();
|
||||
expect(tableEntity1.relatedEntities).toBeNull();
|
||||
|
||||
expect(tableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity2.text).toBe('t2');
|
||||
expect(tableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
|
||||
expect(tableEntity2.columns).toBeNull();
|
||||
expect(tableEntity2.relatedEntities).toBeNull();
|
||||
|
||||
expect(tableEntity1.belongStmt.rootStmt).toBe(tableEntity2.belongStmt.rootStmt);
|
||||
});
|
||||
|
||||
test('insert into table values', () => {
|
||||
const insertTableContext = splitListener.statementsContext[6];
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableEntity = allEntities[0];
|
||||
|
||||
expect(tableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity.text).toBe('country_page_view');
|
||||
expect(tableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
|
||||
expect(tableEntity.columns).toBeNull();
|
||||
expect(tableEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('insert into table select', () => {
|
||||
const insertTableContext = splitListener.statementsContext[7];
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const insertTableEntity = allEntities[0];
|
||||
const fromTableEntity1 = allEntities[1];
|
||||
|
||||
expect(insertTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(insertTableEntity.text).toBe('catalog1.db1.country_page_view');
|
||||
expect(insertTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
|
||||
expect(fromTableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(fromTableEntity1.text).toBe('page_view_source');
|
||||
expect(fromTableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
|
||||
expect(fromTableEntity1.belongStmt.parentStmt).toBe(insertTableEntity.belongStmt);
|
||||
expect(fromTableEntity1.belongStmt.rootStmt).toBe(insertTableEntity.belongStmt);
|
||||
});
|
||||
|
||||
test('multiple insert', () => {
|
||||
const insertTableContext = splitListener.statementsContext[8];
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const insertTableEntity1 = allEntities[0];
|
||||
const insertTableEntity2 = allEntities[1];
|
||||
|
||||
expect(insertTableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(insertTableEntity1.text).toBe('country_page_view1');
|
||||
expect(insertTableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
|
||||
expect(insertTableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(insertTableEntity2.text).toBe('country_page_view2');
|
||||
expect(insertTableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
|
||||
expect(insertTableEntity2.belongStmt.parentStmt).toBe(
|
||||
insertTableEntity1.belongStmt.parentStmt
|
||||
);
|
||||
expect(insertTableEntity2.belongStmt.rootStmt).toBe(
|
||||
insertTableEntity1.belongStmt.parentStmt
|
||||
);
|
||||
});
|
||||
|
||||
test('create view as select table', () => {
|
||||
const insertTableContext = splitListener.statementsContext[9];
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
expect(allEntities[0].entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||
expect(allEntities[0].text).toBe('view1');
|
||||
expect(allEntities[0].belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||
|
||||
expect(allEntities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(allEntities[1].text).toBe('tbl');
|
||||
expect(allEntities[1].belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
});
|
||||
|
||||
test('create database', () => {
|
||||
const dbCreateContext = splitListener.statementsContext[10];
|
||||
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, dbCreateContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const dbEntity = allEntities[0];
|
||||
|
||||
expect(dbEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||
expect(dbEntity.text).toBe('db1');
|
||||
expect(dbEntity.position).toEqual({
|
||||
endColumn: 34,
|
||||
endIndex: 1160,
|
||||
line: 44,
|
||||
startColumn: 31,
|
||||
startIndex: 1158,
|
||||
});
|
||||
|
||||
expect(dbEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_DATABASE_STMT);
|
||||
expect(dbEntity.belongStmt.position).toEqual({
|
||||
endColumn: 82,
|
||||
endIndex: 1208,
|
||||
endLine: 44,
|
||||
startColumn: 1,
|
||||
startIndex: 1128,
|
||||
startLine: 44,
|
||||
});
|
||||
|
||||
expect(dbEntity.columns).toBeNull();
|
||||
expect(dbEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('create function', () => {
|
||||
const functionCreateContext = splitListener.statementsContext[11];
|
||||
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, functionCreateContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const functionEntity = allEntities[0];
|
||||
|
||||
expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
|
||||
expect(functionEntity.text).toBe('tempFunction');
|
||||
expect(functionEntity.position).toEqual({
|
||||
endColumn: 43,
|
||||
endIndex: 1253,
|
||||
line: 46,
|
||||
startColumn: 31,
|
||||
startIndex: 1242,
|
||||
});
|
||||
|
||||
expect(functionEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_FUNCTION_STMT
|
||||
);
|
||||
expect(functionEntity.belongStmt.position).toEqual({
|
||||
endColumn: 58,
|
||||
endIndex: 1268,
|
||||
endLine: 46,
|
||||
startColumn: 1,
|
||||
startIndex: 1212,
|
||||
startLine: 46,
|
||||
});
|
||||
|
||||
expect(functionEntity.columns).toBeNull();
|
||||
expect(functionEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
});
|
46
test/parser/flinksql/contextCollect/fixtures/common.sql
Normal file
46
test/parser/flinksql/contextCollect/fixtures/common.sql
Normal file
@ -0,0 +1,46 @@
|
||||
CREATE TABLE MyTable ('user_id' BIGINT, 'name' STRING) WITH ('connector' = 'oracle-x');
|
||||
|
||||
CREATE TABLE my_ctas_table WITH ('connector' = 'kafka')
|
||||
AS SELECT
|
||||
id,
|
||||
name,
|
||||
age
|
||||
FROM
|
||||
source_table
|
||||
WHERE
|
||||
mod(id, 10) = 0;
|
||||
|
||||
CREATE TABLE Orders_with_watermark (
|
||||
id INT,
|
||||
WATERMARK FOR order_time AS order_time - INTERVAL '5' SECOND
|
||||
) WITH (
|
||||
'scan.startup.mode' = 'latest-offset'
|
||||
) LIKE Orders_in_file (
|
||||
EXCLUDING ALL
|
||||
INCLUDING GENERATED
|
||||
);
|
||||
|
||||
SELECT order_id, price + tax FROM Orders;
|
||||
|
||||
SELECT * FROM Orders LEFT JOIN Product ON Orders.product_id = Product.id;
|
||||
|
||||
(SELECT s FROM t1) UNION (SELECT s FROM t2);
|
||||
|
||||
INSERT INTO country_page_view VALUES ('Chinese', 'mumiao', 18), ('Amercian', 'georage', 22);
|
||||
|
||||
INSERT INTO catalog1.db1.country_page_view SELECT `user`, cnt FROM page_view_source;
|
||||
|
||||
EXECUTE STATEMENT SET BEGIN
|
||||
INSERT INTO country_page_view1
|
||||
VALUES ('Chinese', 'mumiao', 18),
|
||||
('Amercian', 'georage', 22);
|
||||
INSERT INTO country_page_view2
|
||||
VALUES ('Chinese', 'mumiao', 18),
|
||||
('Amercian', 'georage', 22);
|
||||
END;
|
||||
|
||||
CREATE VIEW view1(col1, col2) AS SELECT col3, col4 FROM tbl;
|
||||
|
||||
CREATE DATABASE IF NOT EXISTS db1 WITH ('key1' = 'value1', 'key2.a' = 'value2.a');
|
||||
|
||||
CREATE FUNCTION IF NOT EXISTS tempFunction AS 'SimpleUdf';
|
@ -0,0 +1,11 @@
|
||||
SELECT FROM tb1;
|
||||
|
||||
SELECT col1, col2, FROM tb;
|
||||
|
||||
INSERT INTO insert_tb PARTITION (country, state) SELECT col1, col2, country, state FROM ( SELECT FROM inside_tb ) subquery;
|
||||
|
||||
INSERT INTO insert_tb PARTITION (country, state) SELECT col1, col2, country, state FROM ( SELECT id, FROM inside_tb ) subquery;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS derived_table WITH ('connector' = 'kafka') AS SELECT FROM origin_table;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS derived_table WITH ('connector' = 'kafka') AS SELECT id, FROM origin_table;
|
@ -32,7 +32,7 @@ SELECT col, FROM tb1;
|
||||
|
||||
SELECT * FROM tb ORDER BY ;
|
||||
|
||||
SELECT * FROM tb GROUP BY tb. ;
|
||||
SELECT * FROM tb GROUP BY ;
|
||||
|
||||
INSERT INTO tb (col, tb.c );
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import FlinkSQL from 'src/parser/flinksql';
|
||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||
@ -18,7 +18,7 @@ describe('FlinkSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -32,7 +32,7 @@ describe('FlinkSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -46,7 +46,7 @@ describe('FlinkSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -60,7 +60,7 @@ describe('FlinkSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
|
162
test/parser/flinksql/suggestion/suggestionWithEntity.test.ts
Normal file
162
test/parser/flinksql/suggestion/suggestionWithEntity.test.ts
Normal file
@ -0,0 +1,162 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import FlinkSQL from 'src/parser/flinksql';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'suggestionWithEntity.sql'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
describe('Flink SQL Syntax Suggestion with collect entity', () => {
|
||||
const flink = new FlinkSQL();
|
||||
|
||||
test('Validate Syntax SQL', () => {
|
||||
expect(flink.validate(syntaxSql).length).not.toBe(0);
|
||||
});
|
||||
|
||||
test('select with no columns', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 1,
|
||||
column: 8,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const parseTree = flink.parse(sql);
|
||||
|
||||
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = flink.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('tb1');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('select with columns with columns and trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 3,
|
||||
column: 20,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = flink.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into from nested query with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 5,
|
||||
column: 98,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = flink.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('inside_tb');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into from nested query with columns and trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 7,
|
||||
column: 102,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = flink.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('inside_tb');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 9,
|
||||
column: 82,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = flink.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('derived_table');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('origin_table');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with columns and trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 11,
|
||||
column: 86,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = flink.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('derived_table');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('origin_table');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
});
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import FlinkSQL from 'src/parser/flinksql';
|
||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
@ -28,7 +28,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.CATALOG
|
||||
(syn) => syn.syntaxContextType === EntityContextType.CATALOG
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -45,7 +45,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -62,7 +62,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -79,7 +79,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -96,7 +96,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -113,7 +113,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -130,7 +130,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -147,7 +147,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -164,7 +164,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -181,7 +181,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -198,7 +198,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -215,7 +215,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -232,7 +232,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -249,7 +249,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -266,7 +266,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -283,7 +283,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -300,7 +300,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -310,18 +310,18 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
test('Select group by column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 35,
|
||||
column: 30,
|
||||
column: 27,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['tb', '.']);
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
});
|
||||
|
||||
test('Insert into spec columns', () => {
|
||||
@ -334,7 +334,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -351,7 +351,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -368,7 +368,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
|
Reference in New Issue
Block a user