feat: collect entity (#265)
* feat: add text and word utils * feat: add entity collector class * refactor: rename SyntaxContextType to EntityContextType * refactor: improve EntityCollector * feat: improve mysql parser grammar * feat: add mysql entity collector * test: mysql entity collector tests * feat: remove useless method * feat: improve spark grammar file * feat: add spark entity collector * test: spark entity collector unit tests * feat: remove useless code * feat: add queryStatement label * feat: add crateDatabaseStmt * feat: add trino entity collector * feat: rename trinosql to trino * test: trino collect entity unit tests * test: fix spark test * feat(impala): support impale entity collector (#256) * Feat/collect entity hive (#263) * feat(hive): support hive collect entity * feat(hive): update tableAllColumns * feat: replace antlr4ts with antlr4ng * feat(pgsql): pgsql collect entity (#268) * feat(pgsql): pgsql collect entity * feat(pgsql): optimize some name --------- Co-authored-by: zhaoge <> * feat: get word text by token.text * feat: supprt collect db/function and add splitListener (#270) * feat: supprt collect db/function and add splitListner * feat: remove SplitListener interface in baseParser to use SplitListener in root * fix(mysql): fix show create xxx not celloct as createXXXEntity type * test: fix pgsql unit tests * Feat/error recover predicate (#274) * feat: optimize pgsql grammar * feat: add sql parser base * feat: apply SQLParserBase * feat: add geAllEntities method * test: test collect table when missing column * feat: compose collect and suggestion (#276) * feat: mark stmt which contain caret * test: correct name of getAllEntities * test: remove misscolumn unit tests * test: add suggestionWithEntity tests * feat: flink collect entity (#277) * feat: improve flink sql parser * feat: support flink entity collector * test: flink entity collect unit test * feat: move combine entities to parent class --------- Co-authored-by: 霜序 <976060700@qq.com> Co-authored-by: XCynthia <942884029@qq.com>
This commit is contained in:
395
test/parser/flinksql/contextCollect/entityCollector.test.ts
Normal file
395
test/parser/flinksql/contextCollect/entityCollector.test.ts
Normal file
@ -0,0 +1,395 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
import FlinkSQL, { FlinkEntityCollector, FlinkSqlSplitListener } from 'src/parser/flinksql';
|
||||
import { FlinkSqlParserListener } from 'src/lib/flinksql/FlinkSqlParserListener';
|
||||
import { EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { StmtContextType } from 'src/parser/common/entityCollector';
|
||||
|
||||
const commonSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'common.sql'), 'utf-8');
|
||||
|
||||
describe('Flink entity collector tests', () => {
|
||||
const flink = new FlinkSQL();
|
||||
const parseTree = flink.parse(commonSql);
|
||||
const splitListener = new FlinkSqlSplitListener();
|
||||
flink.listen(splitListener as FlinkSqlParserListener, parseTree);
|
||||
|
||||
test('validate common sql', () => {
|
||||
expect(flink.validate(commonSql).length).toBe(0);
|
||||
});
|
||||
|
||||
test('split results', () => {
|
||||
expect(splitListener.statementsContext.length).toBe(12);
|
||||
});
|
||||
|
||||
test('create table by columns', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[0];
|
||||
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('MyTable');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 13,
|
||||
endIndex: 19,
|
||||
line: 1,
|
||||
startColumn: 14,
|
||||
endColumn: 21,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 0,
|
||||
endIndex: 85,
|
||||
startLine: 1,
|
||||
endLine: 1,
|
||||
startColumn: 1,
|
||||
endColumn: 87,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.relatedEntities).toBeNull();
|
||||
|
||||
expect(tableCreateEntity.columns.length).toBe(2);
|
||||
tableCreateEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('create table as select', () => {
|
||||
const createTableBySelectContext = splitListener.statementsContext[1];
|
||||
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, createTableBySelectContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('my_ctas_table');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 102,
|
||||
endIndex: 114,
|
||||
line: 3,
|
||||
startColumn: 14,
|
||||
endColumn: 27,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 89,
|
||||
endIndex: 228,
|
||||
startLine: 3,
|
||||
endLine: 11,
|
||||
startColumn: 1,
|
||||
endColumn: 20,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
tableCreateEntity.relatedEntities.forEach((relatedEntity) => {
|
||||
expect(relatedEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(allEntities.some((en) => relatedEntity === en)).toBeTruthy();
|
||||
});
|
||||
|
||||
expect(allEntities[1].text).toBe('source_table');
|
||||
expect(allEntities[1].belongStmt.rootStmt).toBe(allEntities[0].belongStmt);
|
||||
expect(allEntities[1].position).toEqual({
|
||||
startIndex: 191,
|
||||
endIndex: 202,
|
||||
line: 9,
|
||||
startColumn: 5,
|
||||
endColumn: 17,
|
||||
});
|
||||
});
|
||||
|
||||
test('create table like', () => {
|
||||
const createTableLikeContext = splitListener.statementsContext[2];
|
||||
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, createTableLikeContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
const originTableEntity = allEntities[1];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('Orders_with_watermark');
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
|
||||
expect(tableCreateEntity.columns.length).toBe(1);
|
||||
expect(tableCreateEntity.columns[0].text).toBe('id');
|
||||
expect(tableCreateEntity.columns[0].entityContextType).toBe(
|
||||
EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
expect(tableCreateEntity.relatedEntities[0]).toBe(originTableEntity);
|
||||
|
||||
expect(originTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(originTableEntity.text).toBe('Orders_in_file');
|
||||
expect(originTableEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
});
|
||||
|
||||
test('common select from table', () => {
|
||||
const selectTableContext = splitListener.statementsContext[3];
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, selectTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableEntity = allEntities[0];
|
||||
|
||||
expect(tableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity.text).toBe('Orders');
|
||||
expect(tableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
|
||||
expect(tableEntity.columns).toBeNull();
|
||||
expect(tableEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('select from table join', () => {
|
||||
const selectTableContext = splitListener.statementsContext[4];
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, selectTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableEntity1 = allEntities[0];
|
||||
const tableEntity2 = allEntities[1];
|
||||
|
||||
expect(tableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity1.text).toBe('Orders');
|
||||
expect(tableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
|
||||
expect(tableEntity1.columns).toBeNull();
|
||||
expect(tableEntity1.relatedEntities).toBeNull();
|
||||
|
||||
expect(tableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity2.text).toBe('Product');
|
||||
expect(tableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
|
||||
expect(tableEntity2.columns).toBeNull();
|
||||
expect(tableEntity2.relatedEntities).toBeNull();
|
||||
|
||||
expect(tableEntity1.belongStmt).toBe(tableEntity2.belongStmt);
|
||||
});
|
||||
|
||||
test('union select', () => {
|
||||
const selectTableContext = splitListener.statementsContext[5];
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, selectTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableEntity1 = allEntities[0];
|
||||
const tableEntity2 = allEntities[1];
|
||||
|
||||
expect(tableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity1.text).toBe('t1');
|
||||
expect(tableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
|
||||
expect(tableEntity1.columns).toBeNull();
|
||||
expect(tableEntity1.relatedEntities).toBeNull();
|
||||
|
||||
expect(tableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity2.text).toBe('t2');
|
||||
expect(tableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
|
||||
expect(tableEntity2.columns).toBeNull();
|
||||
expect(tableEntity2.relatedEntities).toBeNull();
|
||||
|
||||
expect(tableEntity1.belongStmt.rootStmt).toBe(tableEntity2.belongStmt.rootStmt);
|
||||
});
|
||||
|
||||
test('insert into table values', () => {
|
||||
const insertTableContext = splitListener.statementsContext[6];
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableEntity = allEntities[0];
|
||||
|
||||
expect(tableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity.text).toBe('country_page_view');
|
||||
expect(tableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
|
||||
expect(tableEntity.columns).toBeNull();
|
||||
expect(tableEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('insert into table select', () => {
|
||||
const insertTableContext = splitListener.statementsContext[7];
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const insertTableEntity = allEntities[0];
|
||||
const fromTableEntity1 = allEntities[1];
|
||||
|
||||
expect(insertTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(insertTableEntity.text).toBe('catalog1.db1.country_page_view');
|
||||
expect(insertTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
|
||||
expect(fromTableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(fromTableEntity1.text).toBe('page_view_source');
|
||||
expect(fromTableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
|
||||
expect(fromTableEntity1.belongStmt.parentStmt).toBe(insertTableEntity.belongStmt);
|
||||
expect(fromTableEntity1.belongStmt.rootStmt).toBe(insertTableEntity.belongStmt);
|
||||
});
|
||||
|
||||
test('multiple insert', () => {
|
||||
const insertTableContext = splitListener.statementsContext[8];
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const insertTableEntity1 = allEntities[0];
|
||||
const insertTableEntity2 = allEntities[1];
|
||||
|
||||
expect(insertTableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(insertTableEntity1.text).toBe('country_page_view1');
|
||||
expect(insertTableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
|
||||
expect(insertTableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(insertTableEntity2.text).toBe('country_page_view2');
|
||||
expect(insertTableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
|
||||
expect(insertTableEntity2.belongStmt.parentStmt).toBe(
|
||||
insertTableEntity1.belongStmt.parentStmt
|
||||
);
|
||||
expect(insertTableEntity2.belongStmt.rootStmt).toBe(
|
||||
insertTableEntity1.belongStmt.parentStmt
|
||||
);
|
||||
});
|
||||
|
||||
test('create view as select table', () => {
|
||||
const insertTableContext = splitListener.statementsContext[9];
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
expect(allEntities[0].entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||
expect(allEntities[0].text).toBe('view1');
|
||||
expect(allEntities[0].belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||
|
||||
expect(allEntities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(allEntities[1].text).toBe('tbl');
|
||||
expect(allEntities[1].belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
});
|
||||
|
||||
test('create database', () => {
|
||||
const dbCreateContext = splitListener.statementsContext[10];
|
||||
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, dbCreateContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const dbEntity = allEntities[0];
|
||||
|
||||
expect(dbEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||
expect(dbEntity.text).toBe('db1');
|
||||
expect(dbEntity.position).toEqual({
|
||||
endColumn: 34,
|
||||
endIndex: 1160,
|
||||
line: 44,
|
||||
startColumn: 31,
|
||||
startIndex: 1158,
|
||||
});
|
||||
|
||||
expect(dbEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_DATABASE_STMT);
|
||||
expect(dbEntity.belongStmt.position).toEqual({
|
||||
endColumn: 82,
|
||||
endIndex: 1208,
|
||||
endLine: 44,
|
||||
startColumn: 1,
|
||||
startIndex: 1128,
|
||||
startLine: 44,
|
||||
});
|
||||
|
||||
expect(dbEntity.columns).toBeNull();
|
||||
expect(dbEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('create function', () => {
|
||||
const functionCreateContext = splitListener.statementsContext[11];
|
||||
|
||||
const collectListener = new FlinkEntityCollector(commonSql);
|
||||
flink.listen(collectListener as ParseTreeListener, functionCreateContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const functionEntity = allEntities[0];
|
||||
|
||||
expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
|
||||
expect(functionEntity.text).toBe('tempFunction');
|
||||
expect(functionEntity.position).toEqual({
|
||||
endColumn: 43,
|
||||
endIndex: 1253,
|
||||
line: 46,
|
||||
startColumn: 31,
|
||||
startIndex: 1242,
|
||||
});
|
||||
|
||||
expect(functionEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_FUNCTION_STMT
|
||||
);
|
||||
expect(functionEntity.belongStmt.position).toEqual({
|
||||
endColumn: 58,
|
||||
endIndex: 1268,
|
||||
endLine: 46,
|
||||
startColumn: 1,
|
||||
startIndex: 1212,
|
||||
startLine: 46,
|
||||
});
|
||||
|
||||
expect(functionEntity.columns).toBeNull();
|
||||
expect(functionEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
});
|
46
test/parser/flinksql/contextCollect/fixtures/common.sql
Normal file
46
test/parser/flinksql/contextCollect/fixtures/common.sql
Normal file
@ -0,0 +1,46 @@
|
||||
CREATE TABLE MyTable ('user_id' BIGINT, 'name' STRING) WITH ('connector' = 'oracle-x');
|
||||
|
||||
CREATE TABLE my_ctas_table WITH ('connector' = 'kafka')
|
||||
AS SELECT
|
||||
id,
|
||||
name,
|
||||
age
|
||||
FROM
|
||||
source_table
|
||||
WHERE
|
||||
mod(id, 10) = 0;
|
||||
|
||||
CREATE TABLE Orders_with_watermark (
|
||||
id INT,
|
||||
WATERMARK FOR order_time AS order_time - INTERVAL '5' SECOND
|
||||
) WITH (
|
||||
'scan.startup.mode' = 'latest-offset'
|
||||
) LIKE Orders_in_file (
|
||||
EXCLUDING ALL
|
||||
INCLUDING GENERATED
|
||||
);
|
||||
|
||||
SELECT order_id, price + tax FROM Orders;
|
||||
|
||||
SELECT * FROM Orders LEFT JOIN Product ON Orders.product_id = Product.id;
|
||||
|
||||
(SELECT s FROM t1) UNION (SELECT s FROM t2);
|
||||
|
||||
INSERT INTO country_page_view VALUES ('Chinese', 'mumiao', 18), ('Amercian', 'georage', 22);
|
||||
|
||||
INSERT INTO catalog1.db1.country_page_view SELECT `user`, cnt FROM page_view_source;
|
||||
|
||||
EXECUTE STATEMENT SET BEGIN
|
||||
INSERT INTO country_page_view1
|
||||
VALUES ('Chinese', 'mumiao', 18),
|
||||
('Amercian', 'georage', 22);
|
||||
INSERT INTO country_page_view2
|
||||
VALUES ('Chinese', 'mumiao', 18),
|
||||
('Amercian', 'georage', 22);
|
||||
END;
|
||||
|
||||
CREATE VIEW view1(col1, col2) AS SELECT col3, col4 FROM tbl;
|
||||
|
||||
CREATE DATABASE IF NOT EXISTS db1 WITH ('key1' = 'value1', 'key2.a' = 'value2.a');
|
||||
|
||||
CREATE FUNCTION IF NOT EXISTS tempFunction AS 'SimpleUdf';
|
@ -0,0 +1,11 @@
|
||||
SELECT FROM tb1;
|
||||
|
||||
SELECT col1, col2, FROM tb;
|
||||
|
||||
INSERT INTO insert_tb PARTITION (country, state) SELECT col1, col2, country, state FROM ( SELECT FROM inside_tb ) subquery;
|
||||
|
||||
INSERT INTO insert_tb PARTITION (country, state) SELECT col1, col2, country, state FROM ( SELECT id, FROM inside_tb ) subquery;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS derived_table WITH ('connector' = 'kafka') AS SELECT FROM origin_table;
|
||||
|
||||
CREATE TABLE IF NOT EXISTS derived_table WITH ('connector' = 'kafka') AS SELECT id, FROM origin_table;
|
@ -32,7 +32,7 @@ SELECT col, FROM tb1;
|
||||
|
||||
SELECT * FROM tb ORDER BY ;
|
||||
|
||||
SELECT * FROM tb GROUP BY tb. ;
|
||||
SELECT * FROM tb GROUP BY ;
|
||||
|
||||
INSERT INTO tb (col, tb.c );
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import FlinkSQL from 'src/parser/flinksql';
|
||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||
@ -18,7 +18,7 @@ describe('FlinkSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -32,7 +32,7 @@ describe('FlinkSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -46,7 +46,7 @@ describe('FlinkSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -60,7 +60,7 @@ describe('FlinkSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
|
162
test/parser/flinksql/suggestion/suggestionWithEntity.test.ts
Normal file
162
test/parser/flinksql/suggestion/suggestionWithEntity.test.ts
Normal file
@ -0,0 +1,162 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import FlinkSQL from 'src/parser/flinksql';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'suggestionWithEntity.sql'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
describe('Flink SQL Syntax Suggestion with collect entity', () => {
|
||||
const flink = new FlinkSQL();
|
||||
|
||||
test('Validate Syntax SQL', () => {
|
||||
expect(flink.validate(syntaxSql).length).not.toBe(0);
|
||||
});
|
||||
|
||||
test('select with no columns', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 1,
|
||||
column: 8,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const parseTree = flink.parse(sql);
|
||||
|
||||
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = flink.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('tb1');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('select with columns with columns and trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 3,
|
||||
column: 20,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = flink.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into from nested query with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 5,
|
||||
column: 98,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = flink.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('inside_tb');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into from nested query with columns and trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 7,
|
||||
column: 102,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = flink.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('inside_tb');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 9,
|
||||
column: 82,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = flink.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('derived_table');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('origin_table');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with columns and trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 11,
|
||||
column: 86,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = flink.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('derived_table');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('origin_table');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
});
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import FlinkSQL from 'src/parser/flinksql';
|
||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
@ -28,7 +28,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.CATALOG
|
||||
(syn) => syn.syntaxContextType === EntityContextType.CATALOG
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -45,7 +45,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -62,7 +62,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -79,7 +79,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -96,7 +96,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -113,7 +113,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -130,7 +130,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -147,7 +147,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -164,7 +164,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -181,7 +181,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -198,7 +198,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -215,7 +215,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -232,7 +232,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -249,7 +249,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -266,7 +266,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -283,7 +283,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -300,7 +300,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -310,18 +310,18 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
test('Select group by column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 35,
|
||||
column: 30,
|
||||
column: 27,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['tb', '.']);
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
});
|
||||
|
||||
test('Insert into spec columns', () => {
|
||||
@ -334,7 +334,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -351,7 +351,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -368,7 +368,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
|
752
test/parser/hive/contextCollect/entityCollector.test.ts
Normal file
752
test/parser/hive/contextCollect/entityCollector.test.ts
Normal file
@ -0,0 +1,752 @@
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { HiveSqlParserListener } from 'src/lib/hive/HiveSqlParserListener';
|
||||
import { EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import HiveSQL, { HiveEntityCollector } from 'src/parser/hive';
|
||||
import { HiveSqlSplitListener } from 'src/parser/hive/hiveSplitListener';
|
||||
import { StmtContextType } from 'src/parser/common/entityCollector';
|
||||
|
||||
const commonSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'common.sql'), 'utf-8');
|
||||
|
||||
describe('Hive entity collector tests', () => {
|
||||
const hiveSql = new HiveSQL();
|
||||
const parseTree = hiveSql.parse(commonSql);
|
||||
const splitListener = new HiveSqlSplitListener();
|
||||
hiveSql.listen(splitListener as HiveSqlParserListener, parseTree);
|
||||
|
||||
test('validate common sql', () => {
|
||||
expect(hiveSql.validate(commonSql).length).toBe(0);
|
||||
});
|
||||
|
||||
test('split results', () => {
|
||||
expect(splitListener.statementsContext.length).toBe(18);
|
||||
});
|
||||
|
||||
test('create table by like', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[0];
|
||||
|
||||
const collectListener = new HiveEntityCollector(commonSql);
|
||||
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
const tableLikeEntity = allEntities[1];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('copy_table');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
endColumn: 48,
|
||||
endIndex: 46,
|
||||
line: 1,
|
||||
startColumn: 38,
|
||||
startIndex: 37,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
endColumn: 66,
|
||||
endIndex: 64,
|
||||
startLine: 1,
|
||||
endLine: 1,
|
||||
startIndex: 0,
|
||||
startColumn: 1,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.relatedEntities).not.toBeNull();
|
||||
expect(tableCreateEntity.relatedEntities[0]).toEqual(tableLikeEntity);
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
|
||||
expect(tableLikeEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableLikeEntity.text).toBe('origin_table');
|
||||
expect(tableLikeEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
});
|
||||
|
||||
test('create table by columns', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[1];
|
||||
|
||||
const collectListener = new HiveEntityCollector(commonSql);
|
||||
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('list_bucket_multiple');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
endColumn: 67,
|
||||
endIndex: 133,
|
||||
line: 3,
|
||||
startColumn: 47,
|
||||
startIndex: 114,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
endColumn: 132,
|
||||
endIndex: 198,
|
||||
endLine: 3,
|
||||
startColumn: 1,
|
||||
startIndex: 68,
|
||||
startLine: 3,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.relatedEntities).toBeNull();
|
||||
expect(tableCreateEntity.columns).not.toBeNull();
|
||||
expect(tableCreateEntity.columns.length).toBe(3);
|
||||
tableCreateEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('create table by select', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[2];
|
||||
|
||||
const collectListener = new HiveEntityCollector(commonSql);
|
||||
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
const tableFromEntity = allEntities[1];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('derived_table');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
endColumn: 41,
|
||||
endIndex: 241,
|
||||
line: 5,
|
||||
startColumn: 28,
|
||||
startIndex: 229,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
endColumn: 17,
|
||||
endIndex: 279,
|
||||
endLine: 9,
|
||||
startColumn: 1,
|
||||
startIndex: 202,
|
||||
startLine: 5,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.relatedEntities).not.toBeNull();
|
||||
expect(tableCreateEntity.relatedEntities[0]).toBe(tableFromEntity);
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
|
||||
expect(tableFromEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableFromEntity.text).toBe('origin_table');
|
||||
expect(tableFromEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
});
|
||||
|
||||
test('create view by select', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[3];
|
||||
|
||||
const collectListener = new HiveEntityCollector(commonSql);
|
||||
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const viewCreateEntity = allEntities[0];
|
||||
const viewSelectEntity = allEntities[1];
|
||||
|
||||
expect(viewCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||
expect(viewCreateEntity.text).toBe('mydb.bro_view');
|
||||
expect(viewCreateEntity.position).toEqual({
|
||||
endColumn: 26,
|
||||
endIndex: 307,
|
||||
line: 11,
|
||||
startColumn: 13,
|
||||
startIndex: 295,
|
||||
});
|
||||
|
||||
expect(viewCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||
expect(viewCreateEntity.belongStmt.position).toEqual({
|
||||
endColumn: 31,
|
||||
endIndex: 338,
|
||||
endLine: 12,
|
||||
startColumn: 1,
|
||||
startIndex: 283,
|
||||
startLine: 11,
|
||||
});
|
||||
|
||||
expect(viewCreateEntity.relatedEntities).not.toBeNull();
|
||||
expect(viewCreateEntity.relatedEntities[0]).toBe(viewSelectEntity);
|
||||
expect(viewCreateEntity.columns).toBeNull();
|
||||
|
||||
expect(viewSelectEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(viewSelectEntity.text).toBe('mydb.sale_tbl');
|
||||
expect(viewSelectEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
});
|
||||
|
||||
test('create view columns by select', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[4];
|
||||
|
||||
const collectListener = new HiveEntityCollector(commonSql);
|
||||
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const viewCreateEntity = allEntities[0];
|
||||
const viewSelectEntity = allEntities[1];
|
||||
|
||||
expect(viewCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||
expect(viewCreateEntity.text).toBe('mydb.task_view');
|
||||
expect(viewCreateEntity.position).toEqual({
|
||||
endColumn: 27,
|
||||
endIndex: 367,
|
||||
line: 14,
|
||||
startColumn: 13,
|
||||
startIndex: 354,
|
||||
});
|
||||
|
||||
expect(viewCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||
expect(viewCreateEntity.belongStmt.position).toEqual({
|
||||
endColumn: 21,
|
||||
endIndex: 596,
|
||||
endLine: 25,
|
||||
startColumn: 1,
|
||||
startIndex: 342,
|
||||
startLine: 14,
|
||||
});
|
||||
|
||||
expect(viewCreateEntity.relatedEntities).not.toBeNull();
|
||||
expect(viewCreateEntity.relatedEntities[0]).toBe(viewSelectEntity);
|
||||
expect(viewCreateEntity.columns).not.toBeNull();
|
||||
expect(viewCreateEntity.columns.length).toBe(3);
|
||||
viewCreateEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(viewCreateEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
|
||||
expect(viewSelectEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(viewSelectEntity.text).toBe('task_tbl');
|
||||
expect(viewSelectEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
});
|
||||
|
||||
test('create materialized view by select', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[5];
|
||||
|
||||
const collectListener = new HiveEntityCollector(commonSql);
|
||||
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const viewCreateEntity = allEntities[0];
|
||||
const viewSelectEntity = allEntities[1];
|
||||
|
||||
expect(viewCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||
expect(viewCreateEntity.text).toBe('mydb.bro_view');
|
||||
expect(viewCreateEntity.position).toEqual({
|
||||
endColumn: 53,
|
||||
endIndex: 651,
|
||||
line: 27,
|
||||
startColumn: 40,
|
||||
startIndex: 639,
|
||||
});
|
||||
|
||||
expect(viewCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||
expect(viewCreateEntity.belongStmt.position).toEqual({
|
||||
endColumn: 31,
|
||||
endIndex: 715,
|
||||
endLine: 30,
|
||||
startColumn: 1,
|
||||
startIndex: 600,
|
||||
startLine: 27,
|
||||
});
|
||||
|
||||
expect(viewCreateEntity.relatedEntities).not.toBeNull();
|
||||
expect(viewCreateEntity.relatedEntities[0]).toBe(viewSelectEntity);
|
||||
expect(viewCreateEntity.columns).toBeNull();
|
||||
|
||||
expect(viewSelectEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(viewSelectEntity.text).toBe('mydb.sale_tbl');
|
||||
expect(viewSelectEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
});
|
||||
|
||||
test('select table default', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[6];
|
||||
|
||||
const collectListener = new HiveEntityCollector(commonSql);
|
||||
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const selectTableEntity = allEntities[0];
|
||||
|
||||
expect(selectTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(selectTableEntity.text).toBe('table_name_1');
|
||||
expect(selectTableEntity.position).toEqual({
|
||||
endColumn: 36,
|
||||
endIndex: 753,
|
||||
line: 32,
|
||||
startColumn: 24,
|
||||
startIndex: 742,
|
||||
});
|
||||
|
||||
expect(selectTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(selectTableEntity.belongStmt.position).toEqual({
|
||||
endColumn: 36,
|
||||
endIndex: 753,
|
||||
endLine: 32,
|
||||
startColumn: 1,
|
||||
startIndex: 719,
|
||||
startLine: 32,
|
||||
});
|
||||
|
||||
expect(selectTableEntity.columns).toBeNull();
|
||||
expect(selectTableEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('select table with join', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[7];
|
||||
|
||||
const collectListener = new HiveEntityCollector(commonSql);
|
||||
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const selectTableEntity = allEntities[0];
|
||||
const joinTableEntity = allEntities[1];
|
||||
|
||||
expect(selectTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(selectTableEntity.text).toBe('a');
|
||||
expect(selectTableEntity.position).toEqual({
|
||||
endColumn: 18,
|
||||
endIndex: 773,
|
||||
line: 34,
|
||||
startColumn: 17,
|
||||
startIndex: 773,
|
||||
});
|
||||
|
||||
expect(selectTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(selectTableEntity.belongStmt.position).toEqual({
|
||||
endColumn: 74,
|
||||
endIndex: 829,
|
||||
endLine: 34,
|
||||
startColumn: 1,
|
||||
startIndex: 757,
|
||||
startLine: 34,
|
||||
});
|
||||
|
||||
expect(selectTableEntity.columns).toBeNull();
|
||||
expect(selectTableEntity.relatedEntities).toBeNull();
|
||||
|
||||
expect(selectTableEntity.belongStmt).toEqual(joinTableEntity.belongStmt);
|
||||
expect(joinTableEntity.text).toBe('b');
|
||||
expect(joinTableEntity.columns).toBeNull();
|
||||
expect(joinTableEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('from select table', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[8];
|
||||
|
||||
const collectListener = new HiveEntityCollector(commonSql);
|
||||
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const selectTableEntity = allEntities[0];
|
||||
|
||||
expect(selectTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(selectTableEntity.text).toBe('table_name_1');
|
||||
expect(selectTableEntity.position).toEqual({
|
||||
endColumn: 18,
|
||||
endIndex: 849,
|
||||
line: 36,
|
||||
startColumn: 6,
|
||||
startIndex: 838,
|
||||
});
|
||||
|
||||
expect(selectTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(selectTableEntity.belongStmt.position).toEqual({
|
||||
endColumn: 36,
|
||||
endIndex: 867,
|
||||
endLine: 36,
|
||||
startColumn: 1,
|
||||
startIndex: 833,
|
||||
startLine: 36,
|
||||
});
|
||||
|
||||
expect(selectTableEntity.columns).toBeNull();
|
||||
expect(selectTableEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('from select table with join', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[9];
|
||||
|
||||
const collectListener = new HiveEntityCollector(commonSql);
|
||||
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const selectTableEntity = allEntities[0];
|
||||
const joinTableEntity = allEntities[1];
|
||||
|
||||
expect(selectTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(selectTableEntity.text).toBe('a');
|
||||
expect(selectTableEntity.position).toEqual({
|
||||
endColumn: 7,
|
||||
endIndex: 876,
|
||||
line: 38,
|
||||
startColumn: 6,
|
||||
startIndex: 876,
|
||||
});
|
||||
|
||||
expect(selectTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(selectTableEntity.belongStmt.position).toEqual({
|
||||
endColumn: 74,
|
||||
endIndex: 943,
|
||||
endLine: 38,
|
||||
startColumn: 1,
|
||||
startIndex: 871,
|
||||
startLine: 38,
|
||||
});
|
||||
|
||||
expect(selectTableEntity.columns).toBeNull();
|
||||
expect(selectTableEntity.relatedEntities).toBeNull();
|
||||
|
||||
expect(selectTableEntity.belongStmt).toEqual(joinTableEntity.belongStmt);
|
||||
expect(joinTableEntity.text).toBe('b');
|
||||
expect(joinTableEntity.columns).toBeNull();
|
||||
expect(joinTableEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('insert table with values', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[10];
|
||||
|
||||
const collectListener = new HiveEntityCollector(commonSql);
|
||||
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const insertTableEntity = allEntities[0];
|
||||
|
||||
expect(insertTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(insertTableEntity.text).toBe('students');
|
||||
expect(insertTableEntity.position).toEqual({
|
||||
endColumn: 27,
|
||||
endIndex: 972,
|
||||
line: 40,
|
||||
startColumn: 19,
|
||||
startIndex: 965,
|
||||
});
|
||||
|
||||
expect(insertTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
expect(insertTableEntity.belongStmt.position).toEqual({
|
||||
endColumn: 66,
|
||||
endIndex: 1045,
|
||||
endLine: 41,
|
||||
startColumn: 1,
|
||||
startIndex: 947,
|
||||
startLine: 40,
|
||||
});
|
||||
|
||||
expect(insertTableEntity.columns).toBeNull();
|
||||
expect(insertTableEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('insert table use select', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[11];
|
||||
|
||||
const collectListener = new HiveEntityCollector(commonSql);
|
||||
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const insertTableEntity = allEntities[0];
|
||||
const fromTableEntity = allEntities[1];
|
||||
|
||||
expect(insertTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(insertTableEntity.text).toBe('table_name');
|
||||
expect(insertTableEntity.position).toEqual({
|
||||
endColumn: 23,
|
||||
endIndex: 1070,
|
||||
line: 43,
|
||||
startColumn: 13,
|
||||
startIndex: 1061,
|
||||
});
|
||||
|
||||
expect(insertTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
expect(insertTableEntity.belongStmt.position).toEqual({
|
||||
endColumn: 18,
|
||||
endIndex: 1183,
|
||||
endLine: 46,
|
||||
startColumn: 1,
|
||||
startIndex: 1049,
|
||||
startLine: 43,
|
||||
});
|
||||
|
||||
expect(insertTableEntity.columns).toBeNull();
|
||||
expect(insertTableEntity.relatedEntities).toBeNull();
|
||||
|
||||
expect(fromTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(fromTableEntity.text).toBe('source_table');
|
||||
expect(fromTableEntity.belongStmt.parentStmt).toEqual(insertTableEntity.belongStmt);
|
||||
expect(fromTableEntity.belongStmt.rootStmt).toBe(insertTableEntity.belongStmt);
|
||||
});
|
||||
|
||||
test('from insert table use select', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[12];
|
||||
|
||||
const collectListener = new HiveEntityCollector(commonSql);
|
||||
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const fromTableEntity = allEntities[0];
|
||||
const insertTableEntity = allEntities[1];
|
||||
|
||||
expect(insertTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(insertTableEntity.text).toBe('page_view');
|
||||
expect(insertTableEntity.position).toEqual({
|
||||
endColumn: 33,
|
||||
endIndex: 1241,
|
||||
line: 49,
|
||||
startColumn: 24,
|
||||
startIndex: 1233,
|
||||
});
|
||||
|
||||
expect(insertTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
expect(insertTableEntity.belongStmt.position).toEqual({
|
||||
endColumn: 93,
|
||||
endIndex: 1370,
|
||||
endLine: 50,
|
||||
startColumn: 1,
|
||||
startIndex: 1187,
|
||||
startLine: 48,
|
||||
});
|
||||
|
||||
expect(insertTableEntity.columns).toBeNull();
|
||||
expect(insertTableEntity.relatedEntities).toBeNull();
|
||||
|
||||
expect(fromTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
expect(fromTableEntity.text).toBe('page_view_stg');
|
||||
expect(fromTableEntity.belongStmt).toEqual(insertTableEntity.belongStmt);
|
||||
});
|
||||
|
||||
test('create db', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[13];
|
||||
|
||||
const collectListener = new HiveEntityCollector(commonSql);
|
||||
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const dbEntity = allEntities[0];
|
||||
|
||||
expect(dbEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||
expect(dbEntity.text).toBe('mydb');
|
||||
expect(dbEntity.position).toEqual({
|
||||
endColumn: 21,
|
||||
endIndex: 1393,
|
||||
line: 52,
|
||||
startColumn: 17,
|
||||
startIndex: 1390,
|
||||
});
|
||||
|
||||
expect(dbEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_DATABASE_STMT);
|
||||
expect(dbEntity.belongStmt.position).toEqual({
|
||||
endColumn: 21,
|
||||
endIndex: 1393,
|
||||
endLine: 52,
|
||||
startColumn: 1,
|
||||
startIndex: 1374,
|
||||
startLine: 52,
|
||||
});
|
||||
|
||||
expect(dbEntity.columns).toBeNull();
|
||||
expect(dbEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('create remote db', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[14];
|
||||
|
||||
const collectListener = new HiveEntityCollector(commonSql);
|
||||
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const dbEntity = allEntities[0];
|
||||
|
||||
expect(dbEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||
expect(dbEntity.text).toBe('mydb');
|
||||
expect(dbEntity.position).toEqual({
|
||||
endColumn: 28,
|
||||
endIndex: 1423,
|
||||
line: 54,
|
||||
startColumn: 24,
|
||||
startIndex: 1420,
|
||||
});
|
||||
|
||||
expect(dbEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_DATABASE_STMT);
|
||||
expect(dbEntity.belongStmt.position).toEqual({
|
||||
endColumn: 28,
|
||||
endIndex: 1423,
|
||||
endLine: 54,
|
||||
startColumn: 1,
|
||||
startIndex: 1397,
|
||||
startLine: 54,
|
||||
});
|
||||
|
||||
expect(dbEntity.columns).toBeNull();
|
||||
expect(dbEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('show locks db', () => {
|
||||
const dbContext = splitListener.statementsContext[15];
|
||||
|
||||
const collectListener = new HiveEntityCollector(commonSql);
|
||||
hiveSql.listen(collectListener as ParseTreeListener, dbContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const dbEntity = allEntities[0];
|
||||
|
||||
expect(dbEntity.entityContextType).toBe(EntityContextType.DATABASE);
|
||||
expect(dbEntity.text).toBe('db1');
|
||||
expect(dbEntity.position).toEqual({
|
||||
endColumn: 24,
|
||||
endIndex: 1449,
|
||||
line: 56,
|
||||
startColumn: 21,
|
||||
startIndex: 1447,
|
||||
});
|
||||
|
||||
expect(dbEntity.belongStmt.stmtContextType).toBe(StmtContextType.COMMON_STMT);
|
||||
expect(dbEntity.belongStmt.position).toEqual({
|
||||
endColumn: 25,
|
||||
endIndex: 1450,
|
||||
endLine: 56,
|
||||
startColumn: 1,
|
||||
startIndex: 1427,
|
||||
startLine: 56,
|
||||
});
|
||||
|
||||
expect(dbEntity.columns).toBeNull();
|
||||
expect(dbEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('create function', () => {
|
||||
const functionCreateContext = splitListener.statementsContext[16];
|
||||
|
||||
const collectListener = new HiveEntityCollector(commonSql);
|
||||
hiveSql.listen(collectListener as ParseTreeListener, functionCreateContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const functionEntity = allEntities[0];
|
||||
|
||||
expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
|
||||
expect(functionEntity.text).toBe('base_analizer');
|
||||
expect(functionEntity.position).toEqual({
|
||||
endColumn: 30,
|
||||
endIndex: 1481,
|
||||
line: 58,
|
||||
startColumn: 17,
|
||||
startIndex: 1469,
|
||||
});
|
||||
|
||||
expect(functionEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_FUNCTION_STMT
|
||||
);
|
||||
expect(functionEntity.belongStmt.position).toEqual({
|
||||
endColumn: 30,
|
||||
endIndex: 1481,
|
||||
endLine: 58,
|
||||
startColumn: 17,
|
||||
startIndex: 1469,
|
||||
startLine: 58,
|
||||
});
|
||||
|
||||
expect(functionEntity.columns).toBeNull();
|
||||
expect(functionEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('create temporary function', () => {
|
||||
const functionCreateContext = splitListener.statementsContext[17];
|
||||
|
||||
const collectListener = new HiveEntityCollector(commonSql);
|
||||
hiveSql.listen(collectListener as ParseTreeListener, functionCreateContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const functionEntity = allEntities[0];
|
||||
|
||||
expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
|
||||
expect(functionEntity.text).toBe('flat_analizer');
|
||||
expect(functionEntity.position).toEqual({
|
||||
endColumn: 40,
|
||||
endIndex: 1549,
|
||||
line: 60,
|
||||
startColumn: 27,
|
||||
startIndex: 1537,
|
||||
});
|
||||
|
||||
expect(functionEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_FUNCTION_STMT
|
||||
);
|
||||
expect(functionEntity.belongStmt.position).toEqual({
|
||||
endColumn: 40,
|
||||
endIndex: 1549,
|
||||
endLine: 60,
|
||||
startColumn: 27,
|
||||
startIndex: 1537,
|
||||
startLine: 60,
|
||||
});
|
||||
|
||||
expect(functionEntity.columns).toBeNull();
|
||||
expect(functionEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
});
|
60
test/parser/hive/contextCollect/fixtures/common.sql
Normal file
60
test/parser/hive/contextCollect/fixtures/common.sql
Normal file
@ -0,0 +1,60 @@
|
||||
CREATE TEMPORARY TABLE IF NOT EXISTS copy_table LIKE origin_table;
|
||||
|
||||
CREATE TEMPORARY EXTERNAL TABLE IF NOT EXISTS list_bucket_multiple (col1 STRING, col2 INT, col3 STRING) COMMENT 'this is a comment';
|
||||
|
||||
CREATE TABLE IF NOT EXISTS derived_table AS
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
origin_table;
|
||||
|
||||
CREATE VIEW mydb.bro_view
|
||||
AS SELECT * FROM mydb.sale_tbl;
|
||||
|
||||
CREATE VIEW mydb.task_view (
|
||||
taskId COMMENT '任务id',
|
||||
taskName COMMENT '任务名称',
|
||||
taskRunTime COMMENT '任务运行时长'
|
||||
)
|
||||
COMMENT '一个任务信息视图'
|
||||
TBLPROPERTIES(
|
||||
'author'='hayden'
|
||||
)
|
||||
AS SELECT DISTINCT id, `name`, runtime
|
||||
FROM task_tbl
|
||||
WHERE type='day';
|
||||
|
||||
CREATE MATERIALIZED VIEW IF NOT EXISTS mydb.bro_view
|
||||
DISABLE REWRITE
|
||||
COMMENT '一个测试视图'
|
||||
AS SELECT * FROM mydb.sale_tbl;
|
||||
|
||||
SELECT col1, col2 FROM table_name_1;
|
||||
|
||||
SELECT a.* FROM a JOIN b ON (a.id = b.id AND a.department = b.department);
|
||||
|
||||
FROM table_name_1 SELECT col1, col2;
|
||||
|
||||
FROM a JOIN b ON (a.id = b.id AND a.department = b.department) SELECT a.*;
|
||||
|
||||
INSERT INTO TABLE students(a,b,c)
|
||||
VALUES ('fred flintstone', 35, 1.28), ('barney rubble', 32, 2.32);
|
||||
|
||||
INSERT INTO table_name PARTITION (country, state)
|
||||
SELECT col1, col2,
|
||||
CONCAT(country, '_', state) AS country_state
|
||||
FROM source_table;
|
||||
|
||||
FROM page_view_stg pvs
|
||||
INSERT OVERWRITE TABLE page_view PARTITION(dt='2008-06-08', country)
|
||||
SELECT pvs.viewTime, pvs.userid, pvs.page_url, pvs.referrer_url, null, null, pvs.ip, pvs.cnt;
|
||||
|
||||
CREATE DATABASE mydb;
|
||||
|
||||
CREATE REMOTE DATABASE mydb;
|
||||
|
||||
SHOW LOCKS DATABASE db1;
|
||||
|
||||
CREATE FUNCTION base_analizer AS 'com.udf.BaseFieldUDF';
|
||||
|
||||
CREATE TEMPORARY FUNCTION flat_analizer AS 'com.udtf.EventJsonUDTF';
|
@ -0,0 +1,23 @@
|
||||
SELECT FROM tb1
|
||||
|
||||
SELECT col1, col2, FROM tb
|
||||
|
||||
FROM table_name_1 SELECT ; -- TODO: request semicolon
|
||||
|
||||
FROM table_name_1 SELECT col1, col2, ; -- TODO: request semicolon
|
||||
|
||||
FROM a JOIN b ON (a.id = b.id AND a.department = b.department) SELECT ; -- TODO: request semicolon
|
||||
|
||||
FROM a JOIN b ON (a.id = b.id AND a.department = b.department) SELECT a.*, ; -- TODO: request semicolon
|
||||
|
||||
FROM page_view_stg pvs INSERT OVERWRITE TABLE page_view PARTITION(dt='2008-06-08', country) SELECT ; -- TODO: request semicolon
|
||||
|
||||
FROM page_view_stg pvs INSERT OVERWRITE TABLE page_view PARTITION(dt='2008-06-08', country) SELECT id, ; -- TODO: request semicolon
|
||||
|
||||
INSERT INTO insert_tb PARTITION (country, state) SELECT col1, col2, country, state FROM ( SELECT FROM inside_tb ) subquery
|
||||
|
||||
INSERT INTO insert_tb PARTITION (country, state) SELECT col1, col2, country, state FROM ( SELECT id, FROM inside_tb ) subquery
|
||||
|
||||
CREATE TABLE IF NOT EXISTS derived_table AS SELECT FROM origin_table
|
||||
|
||||
CREATE TABLE IF NOT EXISTS derived_table AS SELECT id, FROM origin_table
|
@ -32,4 +32,10 @@ MERGE INTO tablename USING tablename2 ON (tablename.id = tablename2.id) WHEN MAT
|
||||
|
||||
ALTER TABLE tbl CHANGE COLUMN ;
|
||||
|
||||
ALTER TABLE tbl CHANGE COLUMN tbl.oldcol new ;
|
||||
ALTER TABLE tbl CHANGE COLUMN tbl.oldcol new ;
|
||||
|
||||
FROM table_name_1 SELECT col1, col2;
|
||||
|
||||
FROM a JOIN b ON (a.id = b.id AND a.department = b.department) SELECT a.*;
|
||||
|
||||
FROM page_view_stg INSERT;
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||
@ -18,7 +18,7 @@ describe('HiveSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -32,7 +32,7 @@ describe('HiveSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -46,7 +46,7 @@ describe('HiveSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -60,7 +60,7 @@ describe('HiveSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
|
310
test/parser/hive/suggestion/suggestionWithEntity.test.ts
Normal file
310
test/parser/hive/suggestion/suggestionWithEntity.test.ts
Normal file
@ -0,0 +1,310 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'suggestionWithEntity.sql'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
describe('Hive SQL Syntax Suggestion with collect entity', () => {
|
||||
const hive = new HiveSQL();
|
||||
|
||||
test('Validate Syntax SQL', () => {
|
||||
expect(hive.validate(syntaxSql).length).not.toBe(0);
|
||||
});
|
||||
|
||||
test('select with no columns', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 1,
|
||||
column: 8,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = hive.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('tb1');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('select with columns with columns and trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 3,
|
||||
column: 20,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = hive.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('from table select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 5,
|
||||
column: 26,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = hive.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('table_name_1');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('from table select with with columns and trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 7,
|
||||
column: 38,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = hive.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('table_name_1');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('from joined table select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 9,
|
||||
column: 71,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = hive.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('a');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeFalsy();
|
||||
expect(entities[0].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('b');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeFalsy();
|
||||
expect(entities[1].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('from joined table select with columns and trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 11,
|
||||
column: 76,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = hive.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('a');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeFalsy();
|
||||
expect(entities[0].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('b');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeFalsy();
|
||||
expect(entities[1].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('from table insert into table select no columns', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 13,
|
||||
column: 100,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = hive.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('page_view_stg');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeFalsy();
|
||||
expect(entities[0].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('page_view');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeFalsy();
|
||||
expect(entities[1].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('from table insert into table select with column and trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 15,
|
||||
column: 104,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = hive.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('page_view_stg');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeFalsy();
|
||||
expect(entities[0].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('page_view');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeFalsy();
|
||||
expect(entities[1].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into from nested query with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 17,
|
||||
column: 98,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = hive.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('inside_tb');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into from nested query with columns and trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 19,
|
||||
column: 102,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = hive.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('inside_tb');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 21,
|
||||
column: 52,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = hive.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('derived_table');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('origin_table');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with columns and trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 23,
|
||||
column: 56,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = hive.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('derived_table');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('origin_table');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
});
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
@ -28,7 +28,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -45,7 +45,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -62,7 +62,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -79,7 +79,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -96,7 +96,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -113,7 +113,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -130,7 +130,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -147,7 +147,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -164,7 +164,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -181,7 +181,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -198,7 +198,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -215,7 +215,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -232,7 +232,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -249,7 +249,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -266,7 +266,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -287,7 +287,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -304,10 +304,61 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['new']);
|
||||
});
|
||||
|
||||
test('From Table Select', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 37,
|
||||
column: 18,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['table_name_1']);
|
||||
});
|
||||
|
||||
test('From Table Select join', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 39,
|
||||
column: 14,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['b']);
|
||||
});
|
||||
|
||||
test('From Table Insert', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 41,
|
||||
column: 19,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['page_view_stg']);
|
||||
});
|
||||
});
|
||||
|
@ -239,3 +239,8 @@ EXPLAIN LOCKS UPDATE target SET b = 1 WHERE p IN (SELECT t.q1 FROM source t WHER
|
||||
|
||||
-- LanguageManual Explain -- User-level Explain Output
|
||||
EXPLAIN select sum(hash(key)), sum(hash(value)) from src_orc_merge_test_part where ds='2012-01-03' and ts='2012-01-03+14:46:31';
|
||||
|
||||
-- FROM xx SELECT
|
||||
FROM table_name_1 SELECT col1, col2;
|
||||
|
||||
FROM a JOIN b ON (a.id = b.id AND a.department = b.department) SELECT a.*;
|
600
test/parser/impala/contextCollect/entityCollector.test.ts
Normal file
600
test/parser/impala/contextCollect/entityCollector.test.ts
Normal file
@ -0,0 +1,600 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { ImpalaSqlSplitListener } from 'src/parser/impala';
|
||||
import { EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { StmtContextType } from 'src/parser/common/entityCollector';
|
||||
import ImpalaSQL from 'src/parser/impala';
|
||||
import { ImpalaSqlParserListener } from 'src/lib/impala/ImpalaSqlParserListener';
|
||||
import ImpalaEntityCollector from 'src/parser/impala/impalaEntityCollector';
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
|
||||
const commonSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'common.sql'), 'utf-8');
|
||||
|
||||
describe('ImpalaSQL entity collector tests', () => {
|
||||
const impalaSql = new ImpalaSQL();
|
||||
const parseTree = impalaSql.parse(commonSql);
|
||||
const splitListener = new ImpalaSqlSplitListener();
|
||||
impalaSql.listen(splitListener as ImpalaSqlParserListener, parseTree);
|
||||
|
||||
test('validate common sql', () => {
|
||||
expect(impalaSql.validate(commonSql).length).toBe(0);
|
||||
});
|
||||
|
||||
test('split results', () => {
|
||||
expect(splitListener.statementsContext.length).toBe(14);
|
||||
});
|
||||
|
||||
test('create table by like', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[0];
|
||||
|
||||
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
const likeTableEntity = allEntities[1];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('new_Table');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
endColumn: 23,
|
||||
endIndex: 21,
|
||||
line: 1,
|
||||
startColumn: 14,
|
||||
startIndex: 13,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
endColumn: 38,
|
||||
endIndex: 36,
|
||||
endLine: 1,
|
||||
startColumn: 1,
|
||||
startIndex: 0,
|
||||
startLine: 1,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
expect(tableCreateEntity.relatedEntities[0]).toBe(likeTableEntity);
|
||||
|
||||
expect(likeTableEntity.text).toBe('old_table');
|
||||
expect(likeTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(likeTableEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
});
|
||||
|
||||
test('create table by columns', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[1];
|
||||
|
||||
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('census');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
endColumn: 20,
|
||||
endIndex: 58,
|
||||
line: 3,
|
||||
startColumn: 14,
|
||||
startIndex: 53,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
endColumn: 77,
|
||||
endIndex: 115,
|
||||
endLine: 3,
|
||||
startColumn: 1,
|
||||
startIndex: 40,
|
||||
startLine: 3,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.relatedEntities).toBeNull();
|
||||
expect(tableCreateEntity.columns.length).toBe(2);
|
||||
tableCreateEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('create table by select', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[2];
|
||||
|
||||
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
const fromCreateEntity = allEntities[1];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('sorted_census_data');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
endColumn: 32,
|
||||
endIndex: 149,
|
||||
line: 5,
|
||||
startColumn: 14,
|
||||
startIndex: 132,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
endColumn: 30,
|
||||
endIndex: 278,
|
||||
endLine: 9,
|
||||
startColumn: 1,
|
||||
startIndex: 119,
|
||||
startLine: 5,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
|
||||
expect(fromCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(fromCreateEntity.text).toBe('unsorted_census_data');
|
||||
expect(tableCreateEntity.relatedEntities[0]).toBe(fromCreateEntity);
|
||||
});
|
||||
|
||||
test('create kudu table by select', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[3];
|
||||
|
||||
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
const fromCreateEntity = allEntities[1];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('ctas_t1');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
endColumn: 21,
|
||||
endIndex: 301,
|
||||
line: 11,
|
||||
startColumn: 14,
|
||||
startIndex: 295,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
endColumn: 31,
|
||||
endIndex: 405,
|
||||
endLine: 14,
|
||||
startColumn: 1,
|
||||
startIndex: 282,
|
||||
startLine: 11,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
|
||||
expect(fromCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(fromCreateEntity.text).toBe('kudu_t1');
|
||||
expect(tableCreateEntity.relatedEntities[0]).toBe(fromCreateEntity);
|
||||
});
|
||||
|
||||
test('create kudu table by columns', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[4];
|
||||
|
||||
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('kudu_t3');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
endColumn: 21,
|
||||
endIndex: 428,
|
||||
line: 16,
|
||||
startColumn: 14,
|
||||
startIndex: 422,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
endColumn: 17,
|
||||
endIndex: 705,
|
||||
endLine: 23,
|
||||
startColumn: 1,
|
||||
startIndex: 409,
|
||||
startLine: 16,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.relatedEntities).toBeNull();
|
||||
expect(tableCreateEntity.columns.length).toBe(4);
|
||||
tableCreateEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('create view', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[5];
|
||||
|
||||
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const viewCreateEntity = allEntities[0];
|
||||
const fromCreateEntity = allEntities[1];
|
||||
|
||||
expect(viewCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||
expect(viewCreateEntity.text).toBe('my_view');
|
||||
expect(viewCreateEntity.position).toEqual({
|
||||
endColumn: 20,
|
||||
endIndex: 727,
|
||||
line: 25,
|
||||
startColumn: 13,
|
||||
startIndex: 721,
|
||||
});
|
||||
|
||||
expect(viewCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||
expect(viewCreateEntity.belongStmt.position).toEqual({
|
||||
endColumn: 146,
|
||||
endIndex: 853,
|
||||
endLine: 25,
|
||||
startColumn: 1,
|
||||
startIndex: 709,
|
||||
startLine: 25,
|
||||
});
|
||||
|
||||
expect(viewCreateEntity.relatedEntities[0]).toBe(fromCreateEntity);
|
||||
expect(viewCreateEntity.columns.length).toBe(2);
|
||||
viewCreateEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(viewCreateEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
|
||||
expect(fromCreateEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(fromCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(fromCreateEntity.text).toBe('my_table');
|
||||
});
|
||||
|
||||
test('insert table select', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[6];
|
||||
|
||||
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableInsertEntity = allEntities[0];
|
||||
const fromTableEntity = allEntities[1];
|
||||
|
||||
expect(tableInsertEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableInsertEntity.text).toBe('t2');
|
||||
expect(tableInsertEntity.position).toEqual({
|
||||
endColumn: 15,
|
||||
endIndex: 870,
|
||||
line: 27,
|
||||
startColumn: 13,
|
||||
startIndex: 869,
|
||||
});
|
||||
|
||||
expect(tableInsertEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
expect(tableInsertEntity.belongStmt.position).toEqual({
|
||||
endColumn: 40,
|
||||
endIndex: 895,
|
||||
endLine: 27,
|
||||
startColumn: 1,
|
||||
startIndex: 857,
|
||||
startLine: 27,
|
||||
});
|
||||
|
||||
expect(tableInsertEntity.columns).toBeNull();
|
||||
|
||||
expect(fromTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(fromTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(fromTableEntity.text).toBe('t1');
|
||||
expect(fromTableEntity.belongStmt.parentStmt).toBe(tableInsertEntity.belongStmt);
|
||||
});
|
||||
|
||||
test('select table', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[7];
|
||||
|
||||
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableEntity1 = allEntities[0];
|
||||
|
||||
expect(tableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity1.text).toBe('a');
|
||||
expect(tableEntity1.position).toEqual({
|
||||
endColumn: 16,
|
||||
endIndex: 913,
|
||||
line: 29,
|
||||
startColumn: 15,
|
||||
startIndex: 913,
|
||||
});
|
||||
|
||||
expect(tableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(tableEntity1.belongStmt.position).toEqual({
|
||||
endColumn: 16,
|
||||
endIndex: 913,
|
||||
endLine: 29,
|
||||
startColumn: 1,
|
||||
startIndex: 899,
|
||||
startLine: 29,
|
||||
});
|
||||
|
||||
expect(tableEntity1.columns).toBeNull();
|
||||
expect(tableEntity1.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('select table join', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[8];
|
||||
|
||||
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableEntity1 = allEntities[0];
|
||||
const tableEntity2 = allEntities[1];
|
||||
|
||||
expect(tableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity1.text).toBe('t1');
|
||||
expect(tableEntity1.position).toEqual({
|
||||
endColumn: 28,
|
||||
endIndex: 943,
|
||||
line: 31,
|
||||
startColumn: 26,
|
||||
startIndex: 942,
|
||||
});
|
||||
|
||||
expect(tableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(tableEntity1.belongStmt.position).toEqual({
|
||||
endColumn: 20,
|
||||
endIndex: 1022,
|
||||
endLine: 33,
|
||||
startColumn: 1,
|
||||
startIndex: 917,
|
||||
startLine: 31,
|
||||
});
|
||||
|
||||
expect(tableEntity1.columns).toBeNull();
|
||||
expect(tableEntity1.relatedEntities).toBeNull();
|
||||
|
||||
expect(tableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(tableEntity2.text).toBe('t2');
|
||||
expect(tableEntity2.columns).toBeNull();
|
||||
expect(tableEntity2.relatedEntities).toBeNull();
|
||||
expect(tableEntity2.belongStmt).toBe(tableEntity1.belongStmt);
|
||||
});
|
||||
|
||||
test('create db', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[9];
|
||||
|
||||
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const dbEntity = allEntities[0];
|
||||
|
||||
expect(dbEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||
expect(dbEntity.text).toBe('my_db');
|
||||
expect(dbEntity.position).toEqual({
|
||||
endColumn: 22,
|
||||
endIndex: 1046,
|
||||
line: 35,
|
||||
startColumn: 17,
|
||||
startIndex: 1042,
|
||||
});
|
||||
|
||||
expect(dbEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_DATABASE_STMT);
|
||||
expect(dbEntity.belongStmt.position).toEqual({
|
||||
endColumn: 52,
|
||||
endIndex: 1076,
|
||||
endLine: 35,
|
||||
startColumn: 1,
|
||||
startIndex: 1026,
|
||||
startLine: 35,
|
||||
});
|
||||
|
||||
expect(dbEntity.columns).toBeNull();
|
||||
expect(dbEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('create schema', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[10];
|
||||
|
||||
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const schemaEntity = allEntities[0];
|
||||
|
||||
expect(schemaEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||
expect(schemaEntity.text).toBe('my_schema');
|
||||
expect(schemaEntity.position).toEqual({
|
||||
endColumn: 38,
|
||||
endIndex: 1116,
|
||||
line: 37,
|
||||
startColumn: 29,
|
||||
startIndex: 1108,
|
||||
});
|
||||
|
||||
expect(schemaEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_DATABASE_STMT);
|
||||
expect(schemaEntity.belongStmt.position).toEqual({
|
||||
endColumn: 94,
|
||||
endIndex: 1172,
|
||||
endLine: 37,
|
||||
startColumn: 1,
|
||||
startIndex: 1080,
|
||||
startLine: 37,
|
||||
});
|
||||
|
||||
expect(schemaEntity.columns).toBeNull();
|
||||
expect(schemaEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('comment dbName', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[11];
|
||||
|
||||
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const dbEntity = allEntities[0];
|
||||
|
||||
expect(dbEntity.entityContextType).toBe(EntityContextType.DATABASE);
|
||||
expect(dbEntity.text).toBe('my_database');
|
||||
expect(dbEntity.position).toEqual({
|
||||
endColumn: 32,
|
||||
endIndex: 1206,
|
||||
line: 39,
|
||||
startColumn: 21,
|
||||
startIndex: 1196,
|
||||
});
|
||||
|
||||
// 由于没有处理 comment 语句,所以当前是处于 COMMON_STMT
|
||||
expect(dbEntity.belongStmt.stmtContextType).toBe(StmtContextType.COMMON_STMT);
|
||||
expect(dbEntity.belongStmt.position).toEqual({
|
||||
endColumn: 59,
|
||||
endIndex: 1233,
|
||||
endLine: 39,
|
||||
startColumn: 1,
|
||||
startIndex: 1176,
|
||||
startLine: 39,
|
||||
});
|
||||
|
||||
expect(dbEntity.columns).toBeNull();
|
||||
expect(dbEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('create aggregate function', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[12];
|
||||
|
||||
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const functionEntity = allEntities[0];
|
||||
|
||||
expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
|
||||
expect(functionEntity.text).toBe('function_name');
|
||||
expect(functionEntity.position).toEqual({
|
||||
endColumn: 40,
|
||||
endIndex: 1274,
|
||||
line: 41,
|
||||
startColumn: 27,
|
||||
startIndex: 1262,
|
||||
});
|
||||
|
||||
expect(functionEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_FUNCTION_STMT
|
||||
);
|
||||
expect(functionEntity.belongStmt.position).toEqual({
|
||||
endColumn: 26,
|
||||
endIndex: 1391,
|
||||
endLine: 45,
|
||||
startColumn: 1,
|
||||
startIndex: 1236,
|
||||
startLine: 41,
|
||||
});
|
||||
|
||||
expect(functionEntity.columns).toBeNull();
|
||||
expect(functionEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('create function', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[13];
|
||||
|
||||
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const functionEntity = allEntities[0];
|
||||
|
||||
expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
|
||||
expect(functionEntity.text).toBe('function_name');
|
||||
expect(functionEntity.position).toEqual({
|
||||
endColumn: 30,
|
||||
endIndex: 1423,
|
||||
line: 47,
|
||||
startColumn: 17,
|
||||
startIndex: 1411,
|
||||
});
|
||||
|
||||
expect(functionEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_FUNCTION_STMT
|
||||
);
|
||||
expect(functionEntity.belongStmt.position).toEqual({
|
||||
endColumn: 21,
|
||||
endIndex: 1517,
|
||||
endLine: 50,
|
||||
startColumn: 1,
|
||||
startIndex: 1395,
|
||||
startLine: 47,
|
||||
});
|
||||
|
||||
expect(functionEntity.columns).toBeNull();
|
||||
expect(functionEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
});
|
50
test/parser/impala/contextCollect/fixtures/common.sql
Normal file
50
test/parser/impala/contextCollect/fixtures/common.sql
Normal file
@ -0,0 +1,50 @@
|
||||
CREATE TABLE new_Table like old_table;
|
||||
|
||||
create table census (name string, census_year int) partitioned by (year int);
|
||||
|
||||
CREATE TABLE sorted_census_data
|
||||
SORT BY (last_name, state)
|
||||
STORED AS PARQUET
|
||||
AS SELECT last_name, first_name, state, address
|
||||
FROM unsorted_census_data;
|
||||
|
||||
CREATE TABLE ctas_t1
|
||||
PRIMARY KEY (id) PARTITION BY HASH (id) PARTITIONS 10
|
||||
STORED AS KUDU
|
||||
AS SELECT id, s FROM kudu_t1;
|
||||
|
||||
CREATE TABLE kudu_t3 (id BIGINT, year INT, s STRING,
|
||||
b BOOLEAN, PRIMARY KEY (id,year))
|
||||
PARTITION BY HASH (id) PARTITIONS 20,
|
||||
RANGE (year) (PARTITION 1980 <= VALUES < 1990,
|
||||
PARTITION 1990 <= VALUES < 2000,
|
||||
PARTITION VALUE = 2001,
|
||||
PARTITION 2001 < VALUES < 2003)
|
||||
STORED AS KUDU;
|
||||
|
||||
CREATE VIEW my_view (age COMMENT 'this is number col', age1 COMMENT 'this is number col') TBLPROPERTIES ('tblp1' = '1') AS SELECT * FROM my_table;
|
||||
|
||||
insert into t2 (y, x) select c1 from t1;
|
||||
|
||||
SELECT * from a;
|
||||
|
||||
SELECT t1.c1, t2.c2 FROM t1 JOIN t2
|
||||
ON t1.id = t2.id and t1.type_flag = t2.type_flag
|
||||
WHERE t1.c1 > 100;
|
||||
|
||||
CREATE DATABASE my_db LOCATION '/path/to/partition';
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS my_schema COMMENT 'my first schema' LOCATION '/path/to/partition';
|
||||
|
||||
COMMENT ON DATABASE my_database IS 'This is my database.';
|
||||
|
||||
CREATE AGGREGATE FUNCTION function_name(arg_type1, arg_type2)
|
||||
RETURNS return_type
|
||||
LOCATION 'hdfs_path'
|
||||
UPDATE_FN='update_function'
|
||||
MERGE_FN='merge_function';
|
||||
|
||||
CREATE FUNCTION function_name(arg_type1, arg_type2)
|
||||
RETURNS return_type
|
||||
LOCATION 'hdfs_path_to_dot_so'
|
||||
SYMBOL='symbol_name';
|
@ -0,0 +1,11 @@
|
||||
SELECT FROM tab;
|
||||
|
||||
SELECT name, calculate_age(birthdate) AS age, FROM students;
|
||||
|
||||
INSERT INTO insert_tb SELECT FROM from_tb;
|
||||
|
||||
INSERT INTO insert_tb SELECT id, FROM from_tb;
|
||||
|
||||
CREATE TABLE sorted_census_data AS SELECT FROM unsorted_census_data;
|
||||
|
||||
CREATE TABLE sorted_census_data AS SELECT id, FROM unsorted_census_data;
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import ImpalaSQL from 'src/parser/impala';
|
||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||
@ -18,7 +18,7 @@ describe('ImpalaSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -32,7 +32,7 @@ describe('ImpalaSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -46,7 +46,7 @@ describe('ImpalaSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -60,7 +60,7 @@ describe('ImpalaSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
|
158
test/parser/impala/suggestion/suggestionWithEntity.test.ts
Normal file
158
test/parser/impala/suggestion/suggestionWithEntity.test.ts
Normal file
@ -0,0 +1,158 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import ImpalaSQL from 'src/parser/impala';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'suggestionWithEntity.sql'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
describe('Impala SQL Syntax Suggestion with collect entity', () => {
|
||||
const impala = new ImpalaSQL();
|
||||
|
||||
test('select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 1,
|
||||
column: 8,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = impala.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
// TODO:
|
||||
// const entities = impala.getAllEntities(sql, pos);
|
||||
// expect(entities.length).toBe(1);
|
||||
// expect(entities[0].text).toBe('my_db.tb');
|
||||
// expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
// expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('select with columns with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 3,
|
||||
column: 47,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = impala.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = impala.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('students');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into table as select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 5,
|
||||
column: 30,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = impala.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = impala.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
// TODO:
|
||||
// expect(entities[1].text).toBe('from_tb');
|
||||
// expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
// expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into table as select with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 7,
|
||||
column: 34,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = impala.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = impala.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('from_tb');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 9,
|
||||
column: 43,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = impala.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = impala.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('sorted_census_data');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
// TODO:
|
||||
// expect(entities[1].text).toBe('unsorted_census_data');
|
||||
// expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
// expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 11,
|
||||
column: 47,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = impala.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = impala.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('sorted_census_data');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('unsorted_census_data');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
});
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import ImpalaSQL from 'src/parser/impala';
|
||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
@ -22,7 +22,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -39,7 +39,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -56,7 +56,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -73,7 +73,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -90,7 +90,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -107,7 +107,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -124,7 +124,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -141,7 +141,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -158,7 +158,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -175,7 +175,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -192,7 +192,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -209,7 +209,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -226,7 +226,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -243,7 +243,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -260,7 +260,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -277,7 +277,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -294,7 +294,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -311,7 +311,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -328,7 +328,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -345,7 +345,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -362,7 +362,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -379,7 +379,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -396,7 +396,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -413,7 +413,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -430,7 +430,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
|
@ -178,4 +178,8 @@ CREATE TABLE fk(id INT, col1 INT, col2 STRING, PRIMARY KEY(id),
|
||||
CREATE TABLE pk(id INT, PRIMARY KEY(id) DISABLE, NOVALIDATE, RELY);
|
||||
|
||||
CREATE TABLE fk(id INT, col1 INT, col2 STRING, PRIMARY KEY(id),
|
||||
FOREIGN KEY(col1, col2) REFERENCES pk(col1, col2));
|
||||
FOREIGN KEY(col1, col2) REFERENCES pk(col1, col2));
|
||||
|
||||
CREATE TABLE new_Table like old_table;
|
||||
|
||||
CREATE TABLE new_Table like old_table partitioned by (year int) SORT BY (last_name, state);
|
||||
|
495
test/parser/mysql/contextCollect/entityCollector.test.ts
Normal file
495
test/parser/mysql/contextCollect/entityCollector.test.ts
Normal file
@ -0,0 +1,495 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import MySQL from 'src/parser/mysql';
|
||||
import { MySqlEntityCollector, MysqlSplitListener } from 'src/parser/mysql';
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
import { MySqlParserListener } from 'src/lib/mysql/MySqlParserListener';
|
||||
import { EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { StmtContextType } from 'src/parser/common/entityCollector';
|
||||
|
||||
const commonSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'common.sql'), 'utf-8');
|
||||
|
||||
describe('MySQL entity collector tests', () => {
|
||||
const mysql = new MySQL();
|
||||
const parseTree = mysql.parse(commonSql);
|
||||
const splitListener = new MysqlSplitListener();
|
||||
mysql.listen(splitListener as MySqlParserListener, parseTree);
|
||||
|
||||
test('validate common sql', () => {
|
||||
expect(mysql.validate(commonSql).length).toBe(0);
|
||||
});
|
||||
|
||||
test('split results', () => {
|
||||
expect(splitListener.statementsContext.length).toBe(15);
|
||||
});
|
||||
|
||||
test('create table by columns', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[0];
|
||||
|
||||
const collectListener = new MySqlEntityCollector(commonSql);
|
||||
mysql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('new_tb_with_col');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 13,
|
||||
endIndex: 27,
|
||||
line: 1,
|
||||
startColumn: 14,
|
||||
endColumn: 29,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 0,
|
||||
endIndex: 45,
|
||||
startLine: 1,
|
||||
endLine: 1,
|
||||
startColumn: 1,
|
||||
endColumn: 47,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.relatedEntities).toBeNull();
|
||||
|
||||
expect(tableCreateEntity.columns.length).toBe(2);
|
||||
tableCreateEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('create table by select', () => {
|
||||
const createTableBySelectContext = splitListener.statementsContext[1];
|
||||
|
||||
const collectListener = new MySqlEntityCollector(commonSql);
|
||||
mysql.listen(collectListener as ParseTreeListener, createTableBySelectContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(3);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('new_tb_from_old');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 62,
|
||||
endIndex: 76,
|
||||
line: 3,
|
||||
startColumn: 14,
|
||||
endColumn: 29,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 49,
|
||||
endIndex: 265,
|
||||
startLine: 3,
|
||||
endLine: 12,
|
||||
startColumn: 1,
|
||||
endColumn: 34,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(2);
|
||||
tableCreateEntity.relatedEntities.forEach((relatedEntity) => {
|
||||
expect(relatedEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(allEntities.some((en) => relatedEntity === en)).toBeTruthy();
|
||||
});
|
||||
|
||||
expect(allEntities[1].text).toBe('old_tb1');
|
||||
expect(allEntities[1].belongStmt.rootStmt).toBe(allEntities[0].belongStmt);
|
||||
expect(allEntities[1].position).toEqual({
|
||||
startIndex: 161,
|
||||
endIndex: 167,
|
||||
line: 8,
|
||||
startColumn: 9,
|
||||
endColumn: 16,
|
||||
});
|
||||
|
||||
expect(allEntities[2].text).toBe('old_tb2');
|
||||
expect(allEntities[2].belongStmt.rootStmt).toBe(allEntities[0].belongStmt);
|
||||
expect(allEntities[2].position).toEqual({
|
||||
startIndex: 187,
|
||||
endIndex: 193,
|
||||
line: 10,
|
||||
startColumn: 9,
|
||||
endColumn: 16,
|
||||
});
|
||||
});
|
||||
|
||||
test('create table like', () => {
|
||||
const createTableLikeContext = splitListener.statementsContext[2];
|
||||
|
||||
const collectListener = new MySqlEntityCollector(commonSql);
|
||||
mysql.listen(collectListener as ParseTreeListener, createTableLikeContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
const originTableEntity = allEntities[1];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('new_tb_like_old');
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
expect(tableCreateEntity.relatedEntities[0]).toBe(originTableEntity);
|
||||
|
||||
expect(originTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(originTableEntity.text).toBe('old_tb');
|
||||
expect(originTableEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
});
|
||||
|
||||
test('common select from table', () => {
|
||||
const selectTableContext = splitListener.statementsContext[3];
|
||||
const collectListener = new MySqlEntityCollector(commonSql);
|
||||
mysql.listen(collectListener as ParseTreeListener, selectTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableEntity = allEntities[0];
|
||||
|
||||
expect(tableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity.text).toBe('select_tb');
|
||||
expect(tableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
|
||||
expect(tableEntity.columns).toBeNull();
|
||||
expect(tableEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('select into from table', () => {
|
||||
const selectTableContext = splitListener.statementsContext[4];
|
||||
const collectListener = new MySqlEntityCollector(commonSql);
|
||||
mysql.listen(collectListener as ParseTreeListener, selectTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableEntity = allEntities[0];
|
||||
|
||||
expect(tableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity.text).toBe('into_select_tb');
|
||||
expect(tableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
|
||||
expect(tableEntity.columns).toBeNull();
|
||||
expect(tableEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('select from table join', () => {
|
||||
const selectTableContext = splitListener.statementsContext[5];
|
||||
const collectListener = new MySqlEntityCollector(commonSql);
|
||||
mysql.listen(collectListener as ParseTreeListener, selectTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableEntity1 = allEntities[0];
|
||||
const tableEntity2 = allEntities[1];
|
||||
|
||||
expect(tableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity1.text).toBe('from_tb');
|
||||
expect(tableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
|
||||
expect(tableEntity1.columns).toBeNull();
|
||||
expect(tableEntity1.relatedEntities).toBeNull();
|
||||
|
||||
expect(tableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity2.text).toBe('join_tb');
|
||||
expect(tableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
|
||||
expect(tableEntity2.columns).toBeNull();
|
||||
expect(tableEntity2.relatedEntities).toBeNull();
|
||||
|
||||
expect(tableEntity1.belongStmt).toBe(tableEntity2.belongStmt);
|
||||
});
|
||||
|
||||
test('insert into table values', () => {
|
||||
const insertTableContext = splitListener.statementsContext[6];
|
||||
const collectListener = new MySqlEntityCollector(commonSql);
|
||||
mysql.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableEntity = allEntities[0];
|
||||
|
||||
expect(tableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity.text).toBe('insert_tb');
|
||||
expect(tableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
|
||||
expect(tableEntity.columns).toBeNull();
|
||||
expect(tableEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('insert into table select', () => {
|
||||
const insertTableContext = splitListener.statementsContext[7];
|
||||
const collectListener = new MySqlEntityCollector(commonSql);
|
||||
mysql.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(3);
|
||||
|
||||
const insertTableEntity = allEntities[0];
|
||||
const fromTableEntity1 = allEntities[1];
|
||||
const fromTableEntity2 = allEntities[2];
|
||||
|
||||
expect(insertTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(insertTableEntity.text).toBe('insert_from_tb');
|
||||
expect(insertTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
|
||||
expect(fromTableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(fromTableEntity1.text).toBe('from_tb1');
|
||||
expect(fromTableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
|
||||
expect(fromTableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(fromTableEntity2.text).toBe('from_tb2');
|
||||
expect(fromTableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
|
||||
expect(fromTableEntity1.belongStmt.parentStmt).toBe(insertTableEntity.belongStmt);
|
||||
expect(fromTableEntity2.belongStmt.parentStmt).toBe(insertTableEntity.belongStmt);
|
||||
expect(fromTableEntity1.belongStmt.rootStmt).toBe(insertTableEntity.belongStmt);
|
||||
expect(fromTableEntity2.belongStmt.rootStmt).toBe(insertTableEntity.belongStmt);
|
||||
});
|
||||
|
||||
test('create view with col', () => {
|
||||
const insertTableContext = splitListener.statementsContext[8];
|
||||
const collectListener = new MySqlEntityCollector(commonSql);
|
||||
mysql.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
expect(allEntities[0].entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||
expect(allEntities[0].text).toBe('new_view');
|
||||
expect(allEntities[0].belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||
|
||||
expect(allEntities[0].columns.length).toBe(2);
|
||||
expect(allEntities[0].columns[0].text).toBe('col1');
|
||||
expect(allEntities[0].columns[1].text).toBe('col2');
|
||||
expect(allEntities[0].columns[0].entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(allEntities[0].columns[0].belongStmt).toBe(allEntities[0].belongStmt);
|
||||
});
|
||||
|
||||
test('create view as select table', () => {
|
||||
const insertTableContext = splitListener.statementsContext[9];
|
||||
const collectListener = new MySqlEntityCollector(commonSql);
|
||||
mysql.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
expect(allEntities[0].entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||
expect(allEntities[0].text).toBe('db.new_view');
|
||||
expect(allEntities[0].belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||
});
|
||||
|
||||
test('create database', () => {
|
||||
const dbCreateContext = splitListener.statementsContext[10];
|
||||
|
||||
const collectListener = new MySqlEntityCollector(commonSql);
|
||||
mysql.listen(collectListener as ParseTreeListener, dbCreateContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const dbEntity = allEntities[0];
|
||||
|
||||
expect(dbEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||
expect(dbEntity.text).toBe('db_name');
|
||||
expect(dbEntity.position).toEqual({
|
||||
endColumn: 24,
|
||||
endIndex: 778,
|
||||
line: 31,
|
||||
startColumn: 17,
|
||||
startIndex: 772,
|
||||
});
|
||||
|
||||
expect(dbEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_DATABASE_STMT);
|
||||
expect(dbEntity.belongStmt.position).toEqual({
|
||||
endColumn: 47,
|
||||
endIndex: 801,
|
||||
endLine: 31,
|
||||
startColumn: 1,
|
||||
startIndex: 756,
|
||||
startLine: 31,
|
||||
});
|
||||
|
||||
expect(dbEntity.columns).toBeNull();
|
||||
expect(dbEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('create schema', () => {
|
||||
const schemaCreateContext = splitListener.statementsContext[11];
|
||||
|
||||
const collectListener = new MySqlEntityCollector(commonSql);
|
||||
mysql.listen(collectListener as ParseTreeListener, schemaCreateContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const schemaEntity = allEntities[0];
|
||||
|
||||
expect(schemaEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||
expect(schemaEntity.text).toBe('db_name');
|
||||
expect(schemaEntity.position).toEqual({
|
||||
endColumn: 36,
|
||||
endIndex: 839,
|
||||
line: 33,
|
||||
startColumn: 29,
|
||||
startIndex: 833,
|
||||
});
|
||||
|
||||
expect(schemaEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_DATABASE_STMT);
|
||||
expect(schemaEntity.belongStmt.position).toEqual({
|
||||
endColumn: 59,
|
||||
endIndex: 862,
|
||||
endLine: 33,
|
||||
startColumn: 1,
|
||||
startIndex: 805,
|
||||
startLine: 33,
|
||||
});
|
||||
|
||||
expect(schemaEntity.columns).toBeNull();
|
||||
expect(schemaEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('show create database', () => {
|
||||
const dbCreateContext = splitListener.statementsContext[12];
|
||||
|
||||
const collectListener = new MySqlEntityCollector(commonSql);
|
||||
mysql.listen(collectListener as ParseTreeListener, dbCreateContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const dbEntity = allEntities[0];
|
||||
|
||||
expect(dbEntity.entityContextType).toBe(EntityContextType.DATABASE);
|
||||
expect(dbEntity.text).toBe('db_name');
|
||||
expect(dbEntity.position).toEqual({
|
||||
endColumn: 41,
|
||||
endIndex: 905,
|
||||
line: 35,
|
||||
startColumn: 34,
|
||||
startIndex: 899,
|
||||
});
|
||||
|
||||
expect(dbEntity.belongStmt.stmtContextType).toBe(StmtContextType.COMMON_STMT);
|
||||
expect(dbEntity.belongStmt.position).toEqual({
|
||||
endColumn: 42,
|
||||
endIndex: 906,
|
||||
endLine: 35,
|
||||
startColumn: 1,
|
||||
startIndex: 866,
|
||||
startLine: 35,
|
||||
});
|
||||
|
||||
expect(dbEntity.columns).toBeNull();
|
||||
expect(dbEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('drop database', () => {
|
||||
const dbDropContext = splitListener.statementsContext[13];
|
||||
|
||||
const collectListener = new MySqlEntityCollector(commonSql);
|
||||
mysql.listen(collectListener as ParseTreeListener, dbDropContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const dbEntity = allEntities[0];
|
||||
|
||||
expect(dbEntity.entityContextType).toBe(EntityContextType.DATABASE);
|
||||
expect(dbEntity.text).toBe('db_name');
|
||||
expect(dbEntity.position).toEqual({
|
||||
endColumn: 30,
|
||||
endIndex: 937,
|
||||
line: 37,
|
||||
startColumn: 23,
|
||||
startIndex: 931,
|
||||
});
|
||||
|
||||
expect(dbEntity.belongStmt.stmtContextType).toBe(StmtContextType.COMMON_STMT);
|
||||
expect(dbEntity.belongStmt.position).toEqual({
|
||||
endColumn: 31,
|
||||
endIndex: 938,
|
||||
endLine: 37,
|
||||
startColumn: 1,
|
||||
startIndex: 909,
|
||||
startLine: 37,
|
||||
});
|
||||
|
||||
expect(dbEntity.columns).toBeNull();
|
||||
expect(dbEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('create function', () => {
|
||||
const functionCreateContext = splitListener.statementsContext[14];
|
||||
|
||||
const collectListener = new MySqlEntityCollector(commonSql);
|
||||
mysql.listen(collectListener as ParseTreeListener, functionCreateContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const functionEntity = allEntities[0];
|
||||
|
||||
expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
|
||||
expect(functionEntity.text).toBe('my_concat_ws');
|
||||
expect(functionEntity.position).toEqual({
|
||||
endColumn: 43,
|
||||
endIndex: 982,
|
||||
line: 39,
|
||||
startColumn: 31,
|
||||
startIndex: 971,
|
||||
});
|
||||
|
||||
expect(functionEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_FUNCTION_STMT
|
||||
);
|
||||
expect(functionEntity.belongStmt.position).toEqual({
|
||||
endColumn: 87,
|
||||
endIndex: 1026,
|
||||
endLine: 39,
|
||||
startColumn: 1,
|
||||
startIndex: 941,
|
||||
startLine: 39,
|
||||
});
|
||||
|
||||
expect(functionEntity.columns).toBeNull();
|
||||
expect(functionEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
});
|
39
test/parser/mysql/contextCollect/fixtures/common.sql
Normal file
39
test/parser/mysql/contextCollect/fixtures/common.sql
Normal file
@ -0,0 +1,39 @@
|
||||
CREATE TABLE new_tb_with_col (id int, age int);
|
||||
|
||||
CREATE TABLE new_tb_from_old AS
|
||||
SELECT
|
||||
old_tb1.column1,
|
||||
old_tb2.column2
|
||||
FROM
|
||||
old_tb1
|
||||
JOIN
|
||||
old_tb2 ON old_tb1.id = old_tb2.id
|
||||
WHERE
|
||||
old_tb1.column1 = 'value';
|
||||
|
||||
|
||||
CREATE TABLE new_tb_like_old LIKE old_tb;
|
||||
|
||||
SELECT * FROM select_tb LIMIT 5,10;
|
||||
|
||||
SELECT * INTO @my_var FROM into_select_tb;
|
||||
|
||||
SELECT * FROM from_tb LEFT JOIN join_tb ON (t1.a = t2.a);
|
||||
|
||||
INSERT INTO insert_tb (a,b,c) VALUES(1,2,3), (4,5,6), (7,8,9);
|
||||
|
||||
INSERT INTO insert_from_tb (a, b) SELECT c, d FROM from_tb1 UNION SELECT e, f FROM from_tb2 ON DUPLICATE KEY UPDATE b = b + c;
|
||||
|
||||
CREATE VIEW new_view (col1, col2) AS SELECT CURRENT_DATE;
|
||||
|
||||
CREATE VIEW db.new_view AS SELECT * FROM from_tb;
|
||||
|
||||
CREATE DATABASE db_name DEFAULT ENCRYPTION 'N';
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS db_name DEFAULT ENCRYPTION 'Y';
|
||||
|
||||
SHOW CREATE SCHEMA IF NOT EXISTS db_name;
|
||||
|
||||
DROP SCHEMA IF EXISTS db_name;
|
||||
|
||||
CREATE FUNCTION IF NOT EXISTS my_concat_ws RETURNS STRING SONAME 'udf_my_concat_ws.so';
|
@ -0,0 +1,11 @@
|
||||
SELECT FROM my_db.tb;
|
||||
|
||||
SELECT name, calculate_age(birthdate) AS age, FROM students;
|
||||
|
||||
INSERT INTO insert_tb SELECT FROM from_tb;
|
||||
|
||||
INSERT INTO insert_tb SELECT id, age, FROM from_tb;
|
||||
|
||||
CREATE TABLE sorted_census_data AS SELECT FROM unsorted_census_data;
|
||||
|
||||
CREATE TABLE sorted_census_data AS SELECT id, age, FROM unsorted_census_data;
|
@ -47,3 +47,11 @@ SELECT id, n FROM tbl GROUP BY ;
|
||||
SELECT id, n FROM tbl ORDER BY name, i ;
|
||||
|
||||
SELECT id FROM tb1 GROUP BY ROLLUP( );
|
||||
|
||||
SHOW CREATE FUNCTION func_name;
|
||||
|
||||
SHOW CREATE TABLE tbl_name;
|
||||
|
||||
SHOW CREATE DATABASE IF NOT EXISTS db_name;
|
||||
|
||||
SHOW CREATE VIEW test.v;
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import MySQL from 'src/parser/mysql';
|
||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||
@ -18,7 +18,7 @@ describe('MySQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -32,7 +32,7 @@ describe('MySQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -46,7 +46,7 @@ describe('MySQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -60,7 +60,7 @@ describe('MySQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
|
156
test/parser/mysql/suggestion/suggestionWithEntity.test.ts
Normal file
156
test/parser/mysql/suggestion/suggestionWithEntity.test.ts
Normal file
@ -0,0 +1,156 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import MySQL from 'src/parser/mysql';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'suggestionWithEntity.sql'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
describe('MySQL Syntax Suggestion with collect entity', () => {
|
||||
const mysql = new MySQL();
|
||||
|
||||
test('select with no columns', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 1,
|
||||
column: 8,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = mysql.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = mysql.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('my_db.tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('select with columns with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 3,
|
||||
column: 47,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = mysql.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = mysql.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('students');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into table as select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 5,
|
||||
column: 30,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = mysql.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = mysql.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('from_tb');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into table as select with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 7,
|
||||
column: 39,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = mysql.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = mysql.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('from_tb');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 9,
|
||||
column: 43,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = mysql.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = mysql.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('sorted_census_data');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('unsorted_census_data');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 11,
|
||||
column: 52,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = mysql.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = mysql.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('sorted_census_data');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('unsorted_census_data');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
});
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import MySQL from 'src/parser/mysql';
|
||||
import { SyntaxContextType, CaretPosition } from 'src/parser/common/basic-parser-types';
|
||||
import { EntityContextType, CaretPosition } from 'src/parser/common/basic-parser-types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
@ -26,7 +26,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -43,7 +43,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -60,7 +60,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -77,7 +77,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -94,7 +94,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -111,7 +111,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -128,7 +128,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -145,7 +145,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -162,7 +162,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -179,7 +179,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -196,7 +196,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -213,7 +213,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -230,7 +230,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -247,7 +247,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -264,7 +264,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -281,7 +281,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -298,7 +298,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -315,7 +315,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -332,7 +332,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -349,7 +349,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -366,7 +366,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -383,7 +383,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -400,7 +400,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -417,7 +417,7 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -434,10 +434,78 @@ describe('MySQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
});
|
||||
|
||||
test('show create function', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 51,
|
||||
column: 31,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['func_name']);
|
||||
});
|
||||
|
||||
test('show create table', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 53,
|
||||
column: 27,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['tbl_name']);
|
||||
});
|
||||
|
||||
test('show create dbName', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 55,
|
||||
column: 43,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db_name']);
|
||||
});
|
||||
|
||||
test('show create view', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 57,
|
||||
column: 24,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['test', '.', 'v']);
|
||||
});
|
||||
});
|
||||
|
444
test/parser/pgsql/contextCollect/entityCollector.test.ts
Normal file
444
test/parser/pgsql/contextCollect/entityCollector.test.ts
Normal file
@ -0,0 +1,444 @@
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { PostgreSQLParserListener } from 'src/lib/pgsql/PostgreSQLParserListener';
|
||||
import { EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { StmtContextType } from 'src/parser/common/entityCollector';
|
||||
import PostgreSQL, { PostgreSQLEntityCollector, PostgreSqlSplitListener } from 'src/parser/pgsql';
|
||||
|
||||
const commonSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'common.sql'), 'utf-8');
|
||||
|
||||
describe('PostgreSQL entity collector tests', () => {
|
||||
const postgreSql = new PostgreSQL();
|
||||
const parseTree = postgreSql.parse(commonSql);
|
||||
const splitListener = new PostgreSqlSplitListener();
|
||||
postgreSql.listen(splitListener as PostgreSQLParserListener, parseTree);
|
||||
|
||||
test('validate common sql', () => {
|
||||
expect(postgreSql.validate(commonSql).length).toBe(0);
|
||||
});
|
||||
|
||||
test('split results', () => {
|
||||
expect(splitListener.statementsContext.length).toBe(10);
|
||||
});
|
||||
|
||||
test('create database', () => {
|
||||
const testingContext = splitListener.statementsContext[0];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const sourceTableEntity = allEntities[0];
|
||||
|
||||
expect(sourceTableEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||
expect(sourceTableEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_DATABASE_STMT
|
||||
);
|
||||
expect(sourceTableEntity.text).toBe('music2');
|
||||
});
|
||||
|
||||
test('create table by select', () => {
|
||||
const testingContext = splitListener.statementsContext[1];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(3);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('films_recent');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 110,
|
||||
endIndex: 121,
|
||||
line: 6,
|
||||
startColumn: 14,
|
||||
endColumn: 26,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 97,
|
||||
endIndex: 246,
|
||||
startLine: 6,
|
||||
endLine: 7,
|
||||
startColumn: 1,
|
||||
endColumn: 122,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(2);
|
||||
tableCreateEntity.relatedEntities.forEach((relatedEntity) => {
|
||||
expect(relatedEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(allEntities.some((en) => relatedEntity === en)).toBeTruthy();
|
||||
});
|
||||
|
||||
expect(allEntities[1].text).toBe('films');
|
||||
expect(allEntities[1].belongStmt.rootStmt).toBe(allEntities[0].belongStmt);
|
||||
expect(allEntities[1].position).toEqual({
|
||||
startIndex: 168,
|
||||
endIndex: 172,
|
||||
line: 7,
|
||||
startColumn: 43,
|
||||
endColumn: 48,
|
||||
});
|
||||
|
||||
expect(allEntities[2].text).toBe('films2');
|
||||
expect(allEntities[2].belongStmt.rootStmt).toBe(allEntities[0].belongStmt);
|
||||
expect(allEntities[2].position).toEqual({
|
||||
startIndex: 179,
|
||||
endIndex: 184,
|
||||
line: 7,
|
||||
startColumn: 54,
|
||||
endColumn: 60,
|
||||
});
|
||||
});
|
||||
|
||||
test('create table of columns', () => {
|
||||
const testingContext = splitListener.statementsContext[2];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('distributors');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 263,
|
||||
endIndex: 274,
|
||||
line: 9,
|
||||
startColumn: 14,
|
||||
endColumn: 26,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 250,
|
||||
endIndex: 377,
|
||||
startLine: 9,
|
||||
endLine: 13,
|
||||
startColumn: 1,
|
||||
endColumn: 2,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns.length).toBe(3);
|
||||
tableCreateEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('create foreign table by columns', () => {
|
||||
const testingContext = splitListener.statementsContext[3];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('films');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 402,
|
||||
endIndex: 406,
|
||||
line: 15,
|
||||
startColumn: 22,
|
||||
endColumn: 27,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 381,
|
||||
endIndex: 626,
|
||||
startLine: 15,
|
||||
endLine: 23,
|
||||
startColumn: 1,
|
||||
endColumn: 19,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns.length).toBe(6);
|
||||
tableCreateEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('create foreign table of partition', () => {
|
||||
const testingContext = splitListener.statementsContext[4];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('measurement_y2016m07');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 651,
|
||||
endIndex: 670,
|
||||
line: 25,
|
||||
startColumn: 22,
|
||||
endColumn: 42,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 630,
|
||||
endIndex: 769,
|
||||
startLine: 25,
|
||||
endLine: 27,
|
||||
startColumn: 1,
|
||||
endColumn: 21,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
|
||||
const relatedEntity = tableCreateEntity.relatedEntities[0];
|
||||
expect(relatedEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(allEntities.some((en) => relatedEntity === en)).toBeTruthy();
|
||||
expect(relatedEntity.text).toBe('measurement');
|
||||
});
|
||||
|
||||
test('create view by select', () => {
|
||||
const testingContext = splitListener.statementsContext[5];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('comedies');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 785,
|
||||
endIndex: 792,
|
||||
line: 29,
|
||||
startColumn: 13,
|
||||
endColumn: 21,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 773,
|
||||
endIndex: 849,
|
||||
startLine: 29,
|
||||
endLine: 32,
|
||||
startColumn: 1,
|
||||
endColumn: 26,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
|
||||
const relatedEntity = tableCreateEntity.relatedEntities[0];
|
||||
expect(relatedEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(allEntities.some((en) => relatedEntity === en)).toBeTruthy();
|
||||
expect(relatedEntity.text).toBe('films');
|
||||
});
|
||||
|
||||
test('create materialized view by columns', () => {
|
||||
const testingContext = splitListener.statementsContext[6];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('comedies_mate');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 878,
|
||||
endIndex: 890,
|
||||
line: 34,
|
||||
startColumn: 26,
|
||||
endColumn: 39,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 853,
|
||||
endIndex: 1055,
|
||||
startLine: 34,
|
||||
endLine: 39,
|
||||
startColumn: 1,
|
||||
endColumn: 17,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns.length).toBe(2);
|
||||
|
||||
tableCreateEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('select with clause', () => {
|
||||
const testingContext = splitListener.statementsContext[7];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableEntity1 = allEntities[0];
|
||||
const tableEntity2 = allEntities[1];
|
||||
expect(tableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity1.text).toBe('table_expression');
|
||||
expect(tableEntity1.position).toEqual({
|
||||
startIndex: 1109,
|
||||
endIndex: 1124,
|
||||
line: 41,
|
||||
startColumn: 51,
|
||||
endColumn: 67,
|
||||
});
|
||||
|
||||
expect(tableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(tableEntity1.belongStmt.position).toEqual({
|
||||
startIndex: 1094,
|
||||
endIndex: 1124,
|
||||
startLine: 41,
|
||||
endLine: 41,
|
||||
startColumn: 36,
|
||||
endColumn: 67,
|
||||
});
|
||||
expect(tableEntity1.columns).toBeNull();
|
||||
expect(tableEntity1.relatedEntities).toBeNull();
|
||||
|
||||
expect(tableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity2.text).toBe('table_expression1');
|
||||
expect(tableEntity2.position).toEqual({
|
||||
startIndex: 1182,
|
||||
endIndex: 1198,
|
||||
line: 42,
|
||||
startColumn: 55,
|
||||
endColumn: 72,
|
||||
});
|
||||
|
||||
expect(tableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(tableEntity2.belongStmt.position).toEqual({
|
||||
startIndex: 1059,
|
||||
endIndex: 1237,
|
||||
startLine: 41,
|
||||
endLine: 42,
|
||||
startColumn: 1,
|
||||
endColumn: 111,
|
||||
});
|
||||
expect(tableEntity2.columns).toBeNull();
|
||||
expect(tableEntity2.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('insert into table', () => {
|
||||
const testingContext = splitListener.statementsContext[8];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableInsertEntity = allEntities[0];
|
||||
|
||||
expect(tableInsertEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableInsertEntity.text).toBe('insert_films');
|
||||
expect(tableInsertEntity.position).toEqual({
|
||||
startIndex: 1253,
|
||||
endIndex: 1264,
|
||||
line: 44,
|
||||
startColumn: 13,
|
||||
endColumn: 25,
|
||||
});
|
||||
|
||||
expect(tableInsertEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
expect(tableInsertEntity.belongStmt.position).toEqual({
|
||||
startIndex: 1241,
|
||||
endIndex: 1355,
|
||||
startLine: 44,
|
||||
endLine: 45,
|
||||
startColumn: 1,
|
||||
endColumn: 55,
|
||||
});
|
||||
|
||||
expect(tableInsertEntity.columns).toBeNull();
|
||||
expect(tableInsertEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('create function', () => {
|
||||
const testingContext = splitListener.statementsContext[9];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const functionEntity = allEntities[0];
|
||||
|
||||
expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
|
||||
expect(functionEntity.text).toBe('get_color_note');
|
||||
expect(functionEntity.position).toEqual({
|
||||
endColumn: 31,
|
||||
endIndex: 1388,
|
||||
line: 47,
|
||||
startColumn: 17,
|
||||
startIndex: 1375,
|
||||
});
|
||||
|
||||
expect(functionEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_FUNCTION_STMT
|
||||
);
|
||||
expect(functionEntity.belongStmt.position).toEqual({
|
||||
endColumn: 15,
|
||||
endIndex: 1477,
|
||||
endLine: 49,
|
||||
startColumn: 1,
|
||||
startIndex: 1359,
|
||||
startLine: 47,
|
||||
});
|
||||
|
||||
expect(functionEntity.columns).toBeNull();
|
||||
expect(functionEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
});
|
49
test/parser/pgsql/contextCollect/fixtures/common.sql
Normal file
49
test/parser/pgsql/contextCollect/fixtures/common.sql
Normal file
@ -0,0 +1,49 @@
|
||||
CREATE DATABASE music2
|
||||
LOCALE 'sv_SE.iso885915'
|
||||
ENCODING LATIN9
|
||||
TEMPLATE template0;
|
||||
|
||||
CREATE TABLE films_recent AS
|
||||
SELECT films.name, films.startTime FROM films JOIN films2 ON films2.id = films.id WHERE films.startTime >= '2002-01-01';
|
||||
|
||||
CREATE TABLE distributors (
|
||||
did integer,
|
||||
name varchar(40),
|
||||
CONSTRAINT con1 CHECK (did > 100 AND name <> '')
|
||||
);
|
||||
|
||||
CREATE FOREIGN TABLE films (
|
||||
code char(5) NOT NULL,
|
||||
title varchar(40) NOT NULL,
|
||||
did integer NOT NULL,
|
||||
date_prod date,
|
||||
kind varchar(10),
|
||||
len interval hour to minute
|
||||
)
|
||||
SERVER film_server;
|
||||
|
||||
CREATE FOREIGN TABLE measurement_y2016m07
|
||||
PARTITION OF measurement FOR VALUES FROM ('2016-07-01') TO ('2016-08-01')
|
||||
SERVER server_07;
|
||||
|
||||
CREATE VIEW comedies AS
|
||||
SELECT *
|
||||
FROM films
|
||||
WHERE kind = 'Comedy';
|
||||
|
||||
CREATE MATERIALIZED VIEW comedies_mate
|
||||
(column_name1, column_name2)
|
||||
WITH ( storage_parameter=3, storage_parameter1=4)
|
||||
TABLESPACE tablespace_name
|
||||
AS SELECT * FROM product
|
||||
WITH NO DATA;
|
||||
|
||||
WITH RECURSIVE query_name (id) AS (SELECT id FROM table_expression)
|
||||
SELECT DISTINCT ON (col1,col2) random() AS name1 FROM table_expression1 WHERE name1=name1 GROUP BY DISTINCT id;
|
||||
|
||||
INSERT INTO insert_films (code, title, did, date_prod, kind)
|
||||
VALUES ('T_601', 'Yojimbo', 106, DEFAULT, 'Drama');
|
||||
|
||||
CREATE FUNCTION get_color_note (rainbow) RETURNS text AS
|
||||
'SELECT note FROM my_colors WHERE color = $1'
|
||||
LANGUAGE SQL;
|
@ -1,4 +1,4 @@
|
||||
import PostgresSQL, { PgSqlSplitListener } from 'src/parser/pgsql';
|
||||
import PostgresSQL, { PostgreSqlSplitListener } from 'src/parser/pgsql';
|
||||
import { PostgreSQLParserListener } from 'src/lib/pgsql/PostgreSQLParserListener';
|
||||
|
||||
const validSQL1 = `INSERT INTO country_page_view
|
||||
@ -15,7 +15,7 @@ describe('PgSQL ErrorStrategy test', () => {
|
||||
// const sql = [inValidSQL, validSQL1, validSQL2].join('\n');
|
||||
// // parse with empty errorListener
|
||||
// const parseTree = pgSQL.parse(sql, () => {});
|
||||
// const splitListener = new PgSqlSplitListener();
|
||||
// const splitListener = new PostgreSqlSplitListener();
|
||||
// pgSQL.listen(splitListener as PostgreSQLParserListener, parseTree);
|
||||
|
||||
// const statementCount = splitListener.statementsContext.length;
|
||||
@ -32,7 +32,7 @@ describe('PgSQL ErrorStrategy test', () => {
|
||||
const sql = [validSQL1, inValidSQL, validSQL2].join('\n');
|
||||
// parse with empty errorListener
|
||||
const parseTree = pgSQL.parse(sql, () => {});
|
||||
const splitListener = new PgSqlSplitListener();
|
||||
const splitListener = new PostgreSqlSplitListener();
|
||||
pgSQL.listen(splitListener as PostgreSQLParserListener, parseTree);
|
||||
|
||||
const statementCount = splitListener.statementsContext.length;
|
||||
@ -49,7 +49,7 @@ describe('PgSQL ErrorStrategy test', () => {
|
||||
const sql = [validSQL1, validSQL2, inValidSQL].join('\n');
|
||||
// parse with empty errorListener
|
||||
const parseTree = pgSQL.parse(sql, () => {});
|
||||
const splitListener = new PgSqlSplitListener();
|
||||
const splitListener = new PostgreSqlSplitListener();
|
||||
pgSQL.listen(splitListener as PostgreSQLParserListener, parseTree);
|
||||
|
||||
splitListener.statementsContext.map((item, index) => {
|
||||
|
@ -0,0 +1,11 @@
|
||||
SELECT FROM my_db.tb;
|
||||
|
||||
SELECT name, calculate_age(birthdate) AS age, FROM students;
|
||||
|
||||
INSERT INTO insert_tb SELECT FROM from_tb;
|
||||
|
||||
INSERT INTO insert_tb SELECT id, age, FROM from_tb;
|
||||
|
||||
CREATE TABLE sorted_census_data AS SELECT FROM unsorted_census_data;
|
||||
|
||||
CREATE TABLE sorted_census_data AS SELECT id, age, FROM unsorted_census_data;
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import PostgresSQL from 'src/parser/pgsql';
|
||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||
@ -18,7 +18,7 @@ describe('PgSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -32,10 +32,10 @@ describe('PgSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
const suggestionVw = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -51,7 +51,7 @@ describe('PgSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -65,7 +65,7 @@ describe('PgSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
|
156
test/parser/pgsql/suggestion/suggestionWithEntity.test.ts
Normal file
156
test/parser/pgsql/suggestion/suggestionWithEntity.test.ts
Normal file
@ -0,0 +1,156 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import PostgreSQL from 'src/parser/pgsql';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'suggestionWithEntity.sql'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
describe('PostgreSQL Syntax Suggestion with collect entity', () => {
|
||||
const postgre = new PostgreSQL();
|
||||
|
||||
test('select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 1,
|
||||
column: 8,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = postgre.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = postgre.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('my_db.tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('select with columns with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 3,
|
||||
column: 47,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = postgre.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = postgre.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('students');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into table as select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 5,
|
||||
column: 30,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = postgre.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = postgre.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('from_tb');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into table as select with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 7,
|
||||
column: 39,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = postgre.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = postgre.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('from_tb');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 9,
|
||||
column: 43,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = postgre.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = postgre.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('sorted_census_data');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('unsorted_census_data');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 11,
|
||||
column: 52,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = postgre.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = postgre.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('sorted_census_data');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('unsorted_census_data');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
});
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import PostgresSQL from 'src/parser/pgsql';
|
||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
@ -28,7 +28,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -45,7 +45,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -62,7 +62,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -95,13 +95,13 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos2
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -122,7 +122,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -139,7 +139,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -156,7 +156,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -173,7 +173,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -190,7 +190,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -207,7 +207,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -224,7 +224,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -241,7 +241,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -258,7 +258,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -275,7 +275,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -292,7 +292,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -317,10 +317,10 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
posCreate
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestionCreate = syntaxesCreate?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -357,13 +357,13 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
posAction
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestionCreate = syntaxesCreate?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
const suggestionAction = syntaxesAction?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -386,7 +386,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -402,7 +402,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([
|
||||
@ -438,13 +438,13 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
posAction
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestionCreate = syntaxesCreate?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestionAction = syntaxesAction?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -467,7 +467,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -483,7 +483,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -515,13 +515,13 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos2
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['col_name']);
|
||||
@ -541,7 +541,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -557,7 +557,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -589,13 +589,13 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos2
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -615,7 +615,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['col_name']);
|
||||
@ -631,7 +631,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -655,10 +655,10 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos1
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -676,7 +676,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -708,13 +708,13 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos2
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -734,7 +734,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['col_name']);
|
||||
@ -749,7 +749,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['co_name']);
|
||||
@ -765,7 +765,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([
|
||||
@ -825,23 +825,23 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos5
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion3 = syntaxes3?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion4 = syntaxes4?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
const suggestion5 = syntaxes5?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -884,13 +884,13 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos2
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['columnname']);
|
||||
@ -918,10 +918,10 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos1
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['col1']);
|
||||
@ -954,13 +954,13 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos3
|
||||
)?.syntax;
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion3 = syntaxes3?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion1).not.toBeUndefined();
|
||||
|
362
test/parser/spark/contextCollect/entityCollector.test.ts
Normal file
362
test/parser/spark/contextCollect/entityCollector.test.ts
Normal file
@ -0,0 +1,362 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { SparkSqlSplitListener, SparkEntityCollector } from 'src/parser/spark';
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
import { SparkSqlParserListener } from 'src/lib/spark/SparkSqlParserListener';
|
||||
import { EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { StmtContextType } from 'src/parser/common/entityCollector';
|
||||
|
||||
const commonSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'common.sql'), 'utf-8');
|
||||
|
||||
describe('SparkSQL entity collector tests', () => {
|
||||
const spark = new SparkSQL();
|
||||
const parseTree = spark.parse(commonSql);
|
||||
const splitListener = new SparkSqlSplitListener();
|
||||
spark.listen(splitListener as SparkSqlParserListener, parseTree);
|
||||
|
||||
test('validate common sql', () => {
|
||||
expect(spark.validate(commonSql).length).toBe(0);
|
||||
});
|
||||
|
||||
test('split results', () => {
|
||||
expect(splitListener.statementsContext.length).toBe(12);
|
||||
});
|
||||
|
||||
test('create table like', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[0];
|
||||
|
||||
const collectListener = new SparkEntityCollector(commonSql);
|
||||
spark.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('new_tb1');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 27,
|
||||
endIndex: 33,
|
||||
line: 1,
|
||||
startColumn: 28,
|
||||
endColumn: 35,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 0,
|
||||
endIndex: 50,
|
||||
startLine: 1,
|
||||
endLine: 1,
|
||||
startColumn: 1,
|
||||
endColumn: 52,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
|
||||
const beLikedEntity = allEntities[1];
|
||||
|
||||
expect(tableCreateEntity.relatedEntities[0]).toBe(beLikedEntity);
|
||||
expect(beLikedEntity.text).toBe('like_old_tb');
|
||||
expect(beLikedEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(beLikedEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
});
|
||||
|
||||
test('create hive format table', () => {
|
||||
const columnCreateTableContext = splitListener.statementsContext[1];
|
||||
|
||||
const collectListener = new SparkEntityCollector(commonSql);
|
||||
spark.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('new_tb2');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 67,
|
||||
endIndex: 73,
|
||||
line: 3,
|
||||
startColumn: 14,
|
||||
endColumn: 21,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 54,
|
||||
endIndex: 242,
|
||||
startLine: 3,
|
||||
endLine: 8,
|
||||
startColumn: 1,
|
||||
endColumn: 22,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.relatedEntities).toBeNull();
|
||||
expect(tableCreateEntity.columns.length).toBe(2);
|
||||
|
||||
tableCreateEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('create data source table', () => {
|
||||
const testingContext = splitListener.statementsContext[2];
|
||||
|
||||
const collectListener = new SparkEntityCollector(commonSql);
|
||||
spark.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
const originTableEntity = allEntities[1];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('student_copy');
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
expect(tableCreateEntity.relatedEntities[0]).toBe(originTableEntity);
|
||||
|
||||
expect(originTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(originTableEntity.text).toBe('student');
|
||||
expect(originTableEntity.belongStmt.rootStmt).toBe(tableCreateEntity.belongStmt);
|
||||
});
|
||||
|
||||
test('create view', () => {
|
||||
const testingContext = splitListener.statementsContext[3];
|
||||
|
||||
const collectListener = new SparkEntityCollector(commonSql);
|
||||
spark.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const viewEntity = allEntities[0];
|
||||
const tableEntity = allEntities[1];
|
||||
|
||||
expect(viewEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||
expect(viewEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||
expect(viewEntity.text).toBe('new_view1');
|
||||
expect(viewEntity.columns.length).toBe(2);
|
||||
viewEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(viewEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
|
||||
expect(tableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(tableEntity.belongStmt.rootStmt).toBe(viewEntity.belongStmt);
|
||||
expect(tableEntity.text).toBe('old_tb_1');
|
||||
});
|
||||
|
||||
test('select from table', () => {
|
||||
const testingContext = splitListener.statementsContext[4];
|
||||
|
||||
const collectListener = new SparkEntityCollector(commonSql);
|
||||
spark.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableEntity1 = allEntities[0];
|
||||
const tableEntity2 = allEntities[1];
|
||||
|
||||
expect(tableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(tableEntity1.text).toBe('employee');
|
||||
|
||||
expect(tableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(tableEntity2.text).toBe('department');
|
||||
});
|
||||
|
||||
test('insert into table values', () => {
|
||||
const testingContext = splitListener.statementsContext[5];
|
||||
|
||||
const collectListener = new SparkEntityCollector(commonSql);
|
||||
spark.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableEntity = allEntities[0];
|
||||
|
||||
expect(tableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
expect(tableEntity.text).toBe('insert_tb');
|
||||
});
|
||||
|
||||
test('insert overwrite table', () => {
|
||||
const testingContext = splitListener.statementsContext[6];
|
||||
|
||||
const collectListener = new SparkEntityCollector(commonSql);
|
||||
spark.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const targetTableEntity = allEntities[0];
|
||||
const sourceTableEntity = allEntities[1];
|
||||
|
||||
expect(targetTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(targetTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
expect(targetTableEntity.text).toBe('target_tb');
|
||||
|
||||
expect(sourceTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(sourceTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(sourceTableEntity.belongStmt.rootStmt).toBe(targetTableEntity.belongStmt);
|
||||
expect(sourceTableEntity.text).toBe('source_tb');
|
||||
});
|
||||
|
||||
test('insert overwrite dir', () => {
|
||||
const testingContext = splitListener.statementsContext[7];
|
||||
|
||||
const collectListener = new SparkEntityCollector(commonSql);
|
||||
spark.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const sourceTableEntity = allEntities[0];
|
||||
|
||||
expect(sourceTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(sourceTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(sourceTableEntity.text).toBe('from_tb');
|
||||
});
|
||||
|
||||
test('create database', () => {
|
||||
const testingContext = splitListener.statementsContext[8];
|
||||
|
||||
const collectListener = new SparkEntityCollector(commonSql);
|
||||
spark.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const sourceTableEntity = allEntities[0];
|
||||
|
||||
expect(sourceTableEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||
expect(sourceTableEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_DATABASE_STMT
|
||||
);
|
||||
expect(sourceTableEntity.text).toBe('customer_db');
|
||||
});
|
||||
|
||||
test('use namespace', () => {
|
||||
const testingContext = splitListener.statementsContext[9];
|
||||
|
||||
const collectListener = new SparkEntityCollector(commonSql);
|
||||
spark.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const sourceTableEntity = allEntities[0];
|
||||
|
||||
expect(sourceTableEntity.entityContextType).toBe(EntityContextType.DATABASE);
|
||||
expect(sourceTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.COMMON_STMT);
|
||||
expect(sourceTableEntity.text).toBe('ns1');
|
||||
});
|
||||
|
||||
test('create function', () => {
|
||||
const functionContext = splitListener.statementsContext[10];
|
||||
|
||||
const collectListener = new SparkEntityCollector(commonSql);
|
||||
spark.listen(collectListener as ParseTreeListener, functionContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const functionEntity = allEntities[0];
|
||||
|
||||
expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
|
||||
expect(functionEntity.text).toBe('simple_udf');
|
||||
expect(functionEntity.position).toEqual({
|
||||
endColumn: 38,
|
||||
endIndex: 905,
|
||||
line: 28,
|
||||
startColumn: 28,
|
||||
startIndex: 896,
|
||||
});
|
||||
|
||||
expect(functionEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_FUNCTION_STMT
|
||||
);
|
||||
expect(functionEntity.belongStmt.position).toEqual({
|
||||
endColumn: 54,
|
||||
endIndex: 921,
|
||||
endLine: 28,
|
||||
startColumn: 1,
|
||||
startIndex: 869,
|
||||
startLine: 28,
|
||||
});
|
||||
|
||||
expect(functionEntity.columns).toBeNull();
|
||||
expect(functionEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('create xxx function', () => {
|
||||
const functionContext = splitListener.statementsContext[11];
|
||||
|
||||
const collectListener = new SparkEntityCollector(commonSql);
|
||||
spark.listen(collectListener as ParseTreeListener, functionContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const functionEntity = allEntities[0];
|
||||
|
||||
expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
|
||||
expect(functionEntity.text).toBe('simple_udf');
|
||||
expect(functionEntity.position).toEqual({
|
||||
endColumn: 27,
|
||||
endIndex: 950,
|
||||
line: 30,
|
||||
startColumn: 17,
|
||||
startIndex: 941,
|
||||
});
|
||||
|
||||
expect(functionEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_FUNCTION_STMT
|
||||
);
|
||||
expect(functionEntity.belongStmt.position).toEqual({
|
||||
endColumn: 43,
|
||||
endIndex: 966,
|
||||
endLine: 30,
|
||||
startColumn: 1,
|
||||
startIndex: 925,
|
||||
startLine: 30,
|
||||
});
|
||||
|
||||
expect(functionEntity.columns).toBeNull();
|
||||
expect(functionEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
});
|
30
test/parser/spark/contextCollect/fixtures/common.sql
Normal file
30
test/parser/spark/contextCollect/fixtures/common.sql
Normal file
@ -0,0 +1,30 @@
|
||||
CREATE TABLE IF NOT EXISTS new_tb1 like like_old_tb;
|
||||
|
||||
CREATE TABLE new_tb2 (new_col1 INT, new_col2 STRING)
|
||||
PARTITIONED BY (YEAR STRING)
|
||||
CLUSTERED BY (new_col1, NAME)
|
||||
SORTED BY (new_col1 ASC)
|
||||
INTO 3 BUCKETS
|
||||
STORED AS PARQUET;
|
||||
|
||||
CREATE TABLE student_copy USING CSV AS SELECT * FROM student;
|
||||
|
||||
CREATE VIEW new_view1 (ID COMMENT 'Unique identification number', Name)
|
||||
COMMENT 'View for experienced employees'
|
||||
AS SELECT id, name FROM old_tb_1 WHERE working_years > 5;
|
||||
|
||||
SELECT id, name, employee.deptno, deptname FROM employee CROSS JOIN department;
|
||||
|
||||
INSERT INTO insert_tb (address, name, student_id) VALUES ('Hangzhou, China', 'Kent Yao', 11215016);
|
||||
|
||||
INSERT OVERWRITE target_tb TABLE source_tb;
|
||||
|
||||
INSERT OVERWRITE DIRECTORY '/path/to/output/directory' SELECT * FROM from_tb WHERE condition;
|
||||
|
||||
CREATE DATABASE IF NOT EXISTS customer_db;
|
||||
|
||||
USE NAMESPACE ns1;
|
||||
|
||||
CREATE OR REPLACE FUNCTION simple_udf AS 'SimpleUdfR';
|
||||
|
||||
CREATE FUNCTION simple_udf AS 'SimpleUdfR';
|
@ -0,0 +1,11 @@
|
||||
SELECT FROM my_db.tb;
|
||||
|
||||
SELECT name, calculate_age(birthdate) AS age, FROM students;
|
||||
|
||||
INSERT INTO insert_tb SELECT FROM from_tb;
|
||||
|
||||
INSERT INTO insert_tb SELECT id, age, FROM from_tb;
|
||||
|
||||
CREATE TABLE sorted_census_data AS SELECT FROM unsorted_census_data;
|
||||
|
||||
CREATE TABLE sorted_census_data AS SELECT id, age, FROM unsorted_census_data;
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||
@ -18,7 +18,7 @@ describe('SparkSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -32,7 +32,7 @@ describe('SparkSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -46,7 +46,7 @@ describe('SparkSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -60,7 +60,7 @@ describe('SparkSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
|
156
test/parser/spark/suggestion/suggestionWithEntity.test.ts
Normal file
156
test/parser/spark/suggestion/suggestionWithEntity.test.ts
Normal file
@ -0,0 +1,156 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'suggestionWithEntity.sql'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
describe('PostgreSQL Syntax Suggestion with collect entity', () => {
|
||||
const spark = new SparkSQL();
|
||||
|
||||
test('select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 1,
|
||||
column: 8,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = spark.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = spark.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('my_db.tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('select with columns with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 3,
|
||||
column: 47,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = spark.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = spark.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('students');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into table as select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 5,
|
||||
column: 30,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = spark.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = spark.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('from_tb');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into table as select with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 7,
|
||||
column: 39,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = spark.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = spark.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('from_tb');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 9,
|
||||
column: 43,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = spark.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = spark.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('sorted_census_data');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('unsorted_census_data');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 11,
|
||||
column: 52,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = spark.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = spark.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('sorted_census_data');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('unsorted_census_data');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
});
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import SparkSQL from 'src/parser/spark';
|
||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
@ -28,7 +28,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -45,7 +45,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -62,7 +62,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -79,7 +79,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -96,7 +96,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -113,7 +113,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -130,7 +130,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -147,7 +147,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -164,7 +164,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -181,7 +181,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -198,7 +198,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -215,7 +215,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -232,7 +232,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -249,7 +249,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -266,7 +266,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -283,7 +283,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -300,7 +300,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -317,7 +317,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -334,7 +334,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -351,7 +351,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -368,7 +368,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -385,7 +385,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -402,7 +402,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -419,7 +419,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -436,7 +436,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -453,7 +453,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -470,7 +470,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -487,7 +487,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -504,7 +504,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -521,7 +521,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -538,7 +538,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -555,7 +555,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -572,7 +572,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -589,7 +589,7 @@ describe('Spark SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
|
250
test/parser/trino/contextCollect/entityCollector.test.ts
Normal file
250
test/parser/trino/contextCollect/entityCollector.test.ts
Normal file
@ -0,0 +1,250 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
import { TrinoSqlSplitListener, TrinoEntityCollector } from 'src/parser/trino';
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
import { TrinoSqlListener } from 'src/lib/trinosql/TrinoSqlListener';
|
||||
import { EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { StmtContextType } from 'src/parser/common/entityCollector';
|
||||
|
||||
const commonSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'common.sql'), 'utf-8');
|
||||
|
||||
describe('Trino entity collector tests', () => {
|
||||
const trino = new TrinoSQL();
|
||||
const parseTree = trino.parse(commonSql);
|
||||
const splitListener = new TrinoSqlSplitListener();
|
||||
trino.listen(splitListener as TrinoSqlListener, parseTree);
|
||||
|
||||
test('validate common sql', () => {
|
||||
expect(trino.validate(commonSql).length).toBe(0);
|
||||
});
|
||||
|
||||
test('split results', () => {
|
||||
expect(splitListener.statementsContext.length).toBe(9);
|
||||
});
|
||||
|
||||
test('create table like', () => {
|
||||
const testingContext = splitListener.statementsContext[0];
|
||||
|
||||
const collectListener = new TrinoEntityCollector(commonSql);
|
||||
trino.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('bar');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 27,
|
||||
endIndex: 29,
|
||||
line: 1,
|
||||
startColumn: 28,
|
||||
endColumn: 31,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 0,
|
||||
endIndex: 68,
|
||||
startLine: 1,
|
||||
endLine: 1,
|
||||
startColumn: 1,
|
||||
endColumn: 70,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
|
||||
const beLikedEntity = allEntities[1];
|
||||
|
||||
expect(tableCreateEntity.relatedEntities[0]).toBe(beLikedEntity);
|
||||
expect(beLikedEntity.text).toBe('like_table');
|
||||
expect(beLikedEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(beLikedEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
});
|
||||
|
||||
test('create table as select', () => {
|
||||
const testingContext = splitListener.statementsContext[1];
|
||||
|
||||
const collectListener = new TrinoEntityCollector(commonSql);
|
||||
trino.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
const originTableEntity = allEntities[1];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('foo');
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
|
||||
expect(tableCreateEntity.columns.length).toBe(2);
|
||||
tableCreateEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
expect(tableCreateEntity.relatedEntities[0]).toBe(originTableEntity);
|
||||
|
||||
expect(originTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(originTableEntity.text).toBe('t');
|
||||
expect(originTableEntity.belongStmt.rootStmt).toBe(tableCreateEntity.belongStmt);
|
||||
});
|
||||
|
||||
test('create view as select', () => {
|
||||
const testingContext = splitListener.statementsContext[2];
|
||||
|
||||
const collectListener = new TrinoEntityCollector(commonSql);
|
||||
trino.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
const originTableEntity = allEntities[1];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('a');
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
expect(tableCreateEntity.relatedEntities[0]).toBe(originTableEntity);
|
||||
|
||||
expect(originTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(originTableEntity.text).toBe('t');
|
||||
expect(originTableEntity.belongStmt.rootStmt).toBe(tableCreateEntity.belongStmt);
|
||||
});
|
||||
|
||||
test('create materialize view as select', () => {
|
||||
const testingContext = splitListener.statementsContext[3];
|
||||
|
||||
const collectListener = new TrinoEntityCollector(commonSql);
|
||||
trino.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
const originTableEntity = allEntities[1];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('a');
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
expect(tableCreateEntity.relatedEntities[0]).toBe(originTableEntity);
|
||||
|
||||
expect(originTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(originTableEntity.text).toBe('t');
|
||||
expect(originTableEntity.belongStmt.rootStmt).toBe(tableCreateEntity.belongStmt);
|
||||
});
|
||||
|
||||
test('select from table', () => {
|
||||
const testingContext = splitListener.statementsContext[4];
|
||||
|
||||
const collectListener = new TrinoEntityCollector(commonSql);
|
||||
trino.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableCreateEntity.text).toBe('table1');
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
expect(tableCreateEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('insert into table as select', () => {
|
||||
const testingContext = splitListener.statementsContext[5];
|
||||
|
||||
const collectListener = new TrinoEntityCollector(commonSql);
|
||||
trino.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const insertTableEntity = allEntities[0];
|
||||
const fromTableEntity = allEntities[1];
|
||||
|
||||
expect(insertTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(insertTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
expect(insertTableEntity.text).toBe('orders');
|
||||
|
||||
expect(fromTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(fromTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(fromTableEntity.text).toBe('new_orders');
|
||||
expect(fromTableEntity.belongStmt.parentStmt).toBe(insertTableEntity.belongStmt);
|
||||
});
|
||||
|
||||
test('insert into table values', () => {
|
||||
const testingContext = splitListener.statementsContext[6];
|
||||
|
||||
const collectListener = new TrinoEntityCollector(commonSql);
|
||||
trino.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableEntity = allEntities[0];
|
||||
|
||||
expect(tableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
expect(tableEntity.text).toBe('cities');
|
||||
});
|
||||
|
||||
test('create schema', () => {
|
||||
const testingContext = splitListener.statementsContext[7];
|
||||
|
||||
const collectListener = new TrinoEntityCollector(commonSql);
|
||||
trino.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const schemaEntity = allEntities[0];
|
||||
|
||||
expect(schemaEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||
expect(schemaEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_DATABASE_STMT);
|
||||
expect(schemaEntity.text).toBe('test');
|
||||
});
|
||||
|
||||
test('use schema', () => {
|
||||
const testingContext = splitListener.statementsContext[8];
|
||||
|
||||
const collectListener = new TrinoEntityCollector(commonSql);
|
||||
trino.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const schemaEntity = allEntities[0];
|
||||
|
||||
expect(schemaEntity.entityContextType).toBe(EntityContextType.DATABASE);
|
||||
expect(schemaEntity.belongStmt.stmtContextType).toBe(StmtContextType.COMMON_STMT);
|
||||
expect(schemaEntity.text).toBe('information_schema');
|
||||
});
|
||||
});
|
17
test/parser/trino/contextCollect/fixtures/common.sql
Normal file
17
test/parser/trino/contextCollect/fixtures/common.sql
Normal file
@ -0,0 +1,17 @@
|
||||
CREATE TABLE IF NOT EXISTS bar (LIKE like_table INCLUDING PROPERTIES);
|
||||
|
||||
CREATE TABLE foo(x,y) AS SELECT a,b FROM t;
|
||||
|
||||
CREATE VIEW a AS SELECT * FROM t;
|
||||
|
||||
CREATE MATERIALIZED VIEW a AS SELECT * FROM t;
|
||||
|
||||
SELECT * FROM table1 GROUP BY a;
|
||||
|
||||
INSERT INTO orders SELECT * FROM new_orders;
|
||||
|
||||
INSERT INTO cities VALUES (1, 'San Francisco');
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS test;
|
||||
|
||||
USE information_schema;
|
@ -1,4 +1,4 @@
|
||||
import TrinoSQL, { TrinoSqlSplitListener } from 'src/parser/trinosql';
|
||||
import TrinoSQL, { TrinoSqlSplitListener } from 'src/parser/trino';
|
||||
import { TrinoSqlListener } from 'src/lib/trinosql/TrinoSqlListener';
|
||||
|
||||
const validSQL1 = `INSERT INTO country_page_view
|
@ -1,4 +1,4 @@
|
||||
import TrinoSQL from 'src/parser/trinosql';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
|
||||
describe('trinoSQL Lexer tests', () => {
|
||||
const parser = new TrinoSQL();
|
@ -1,4 +1,4 @@
|
||||
import TrinoSQL from 'src/parser/trinosql';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
import { TrinoSqlListener } from 'src/lib/trinosql/TrinoSqlListener';
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
|
@ -0,0 +1,11 @@
|
||||
SELECT FROM my_db.tb;
|
||||
|
||||
SELECT name, calculate_age(birthdate) AS age, FROM students;
|
||||
|
||||
INSERT INTO insert_tb SELECT FROM from_tb;
|
||||
|
||||
INSERT INTO insert_tb SELECT id, age, FROM from_tb;
|
||||
|
||||
CREATE TABLE sorted_census_data AS SELECT FROM unsorted_census_data;
|
||||
|
||||
CREATE TABLE sorted_census_data AS SELECT id, age, FROM unsorted_census_data;
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import TrinoSQL from 'src/parser/trinosql';
|
||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||
@ -18,7 +18,7 @@ describe('TrinoSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -32,7 +32,7 @@ describe('TrinoSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -46,7 +46,7 @@ describe('TrinoSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -60,7 +60,7 @@ describe('TrinoSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
159
test/parser/trino/suggestion/suggestionWithEntity.test.ts
Normal file
159
test/parser/trino/suggestion/suggestionWithEntity.test.ts
Normal file
@ -0,0 +1,159 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'suggestionWithEntity.sql'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
describe('PostgreSQL Syntax Suggestion with collect entity', () => {
|
||||
const trino = new TrinoSQL();
|
||||
|
||||
test('select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 1,
|
||||
column: 8,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = trino.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
// TODO:
|
||||
// const entities = trino.getAllEntities(sql, pos);
|
||||
// expect(entities.length).toBe(1);
|
||||
// expect(entities[0].text).toBe('my_db.tb');
|
||||
// expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
// expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('select with columns with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 3,
|
||||
column: 47,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = trino.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = trino.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('students');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into table as select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 5,
|
||||
column: 30,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = trino.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = trino.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
// TODO:
|
||||
// expect(entities[1].text).toBe('from_tb');
|
||||
// expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
// expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into table as select with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 7,
|
||||
column: 39,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = trino.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = trino.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('from_tb');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 9,
|
||||
column: 43,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = trino.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = trino.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('sorted_census_data');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
// TODO:
|
||||
// expect(entities[1].text).toBe('unsorted_census_data');
|
||||
// expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
// expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 11,
|
||||
column: 52,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = trino.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = trino.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('sorted_census_data');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('unsorted_census_data');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
});
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import TrinoSQL from 'src/parser/trinosql';
|
||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
@ -29,7 +29,7 @@ describe('Trino SQL Syntax Suggestion', () => {
|
||||
)?.syntax;
|
||||
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'tb']);
|
||||
@ -45,11 +45,11 @@ describe('Trino SQL Syntax Suggestion', () => {
|
||||
?.syntax ?? [];
|
||||
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(
|
||||
syntaxes.some((item) => item.syntaxContextType === SyntaxContextType.VIEW)
|
||||
syntaxes.some((item) => item.syntaxContextType === EntityContextType.VIEW)
|
||||
).toBeTruthy();
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
|
||||
@ -65,7 +65,7 @@ describe('Trino SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -82,7 +82,7 @@ describe('Trino SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -99,7 +99,7 @@ describe('Trino SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -116,7 +116,7 @@ describe('Trino SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -133,7 +133,7 @@ describe('Trino SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -150,7 +150,7 @@ describe('Trino SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -167,7 +167,7 @@ describe('Trino SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -184,10 +184,10 @@ describe('Trino SQL Syntax Suggestion', () => {
|
||||
?.syntax ?? [];
|
||||
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
expect(
|
||||
syntaxes.some((item) => item.syntaxContextType === SyntaxContextType.VIEW)
|
||||
syntaxes.some((item) => item.syntaxContextType === EntityContextType.VIEW)
|
||||
).toBeTruthy();
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['tb']);
|
||||
@ -203,7 +203,7 @@ describe('Trino SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -221,7 +221,7 @@ describe('Trino SQL Syntax Suggestion', () => {
|
||||
)?.syntax;
|
||||
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -239,7 +239,7 @@ describe('Trino SQL Syntax Suggestion', () => {
|
||||
)?.syntax;
|
||||
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -257,7 +257,7 @@ describe('Trino SQL Syntax Suggestion', () => {
|
||||
)?.syntax;
|
||||
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -275,7 +275,7 @@ describe('Trino SQL Syntax Suggestion', () => {
|
||||
)?.syntax;
|
||||
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -292,7 +292,7 @@ describe('Trino SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -309,7 +309,7 @@ describe('Trino SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -326,7 +326,7 @@ describe('Trino SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -343,7 +343,7 @@ describe('Trino SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
@ -1,6 +1,6 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import TrinoSQL from 'src/parser/trinosql';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
import { CaretPosition } from 'src/parser/common/basic-parser-types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
@ -1,4 +1,4 @@
|
||||
import TrinoSQL from 'src/parser/trinosql';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const features = {
|
@ -1,4 +1,4 @@
|
||||
import TrinoSQL from 'src/parser/trinosql';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const features = {
|
@ -1,4 +1,4 @@
|
||||
import TrinoSQL from 'src/parser/trinosql';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const features = {
|
@ -1,4 +1,4 @@
|
||||
import TrinoSQL from 'src/parser/trinosql';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const features = {
|
@ -1,4 +1,4 @@
|
||||
import TrinoSQL from 'src/parser/trinosql';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const features = {
|
@ -1,4 +1,4 @@
|
||||
import TrinoSQL from 'src/parser/trinosql';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const features = {
|
@ -1,4 +1,4 @@
|
||||
import TrinoSQL from 'src/parser/trinosql';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const features = {
|
@ -1,4 +1,4 @@
|
||||
import TrinoSQL from 'src/parser/trinosql';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const features = {
|
@ -1,4 +1,4 @@
|
||||
import TrinoSQL from 'src/parser/trinosql';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const features = {
|
@ -1,4 +1,4 @@
|
||||
import TrinoSQL from 'src/parser/trinosql';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const features = {
|
@ -1,4 +1,4 @@
|
||||
import TrinoSQL from 'src/parser/trinosql';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const features = {
|
@ -1,4 +1,4 @@
|
||||
import TrinoSQL from 'src/parser/trinosql';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const features = {
|
@ -1,4 +1,4 @@
|
||||
import TrinoSQL from 'src/parser/trinosql';
|
||||
import TrinoSQL from 'src/parser/trino';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const features = {
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user