feat: collect entity (#265)
* feat: add text and word utils * feat: add entity collector class * refactor: rename SyntaxContextType to EntityContextType * refactor: improve EntityCollector * feat: improve mysql parser grammar * feat: add mysql entity collector * test: mysql entity collector tests * feat: remove useless method * feat: improve spark grammar file * feat: add spark entity collector * test: spark entity collector unit tests * feat: remove useless code * feat: add queryStatement label * feat: add crateDatabaseStmt * feat: add trino entity collector * feat: rename trinosql to trino * test: trino collect entity unit tests * test: fix spark test * feat(impala): support impale entity collector (#256) * Feat/collect entity hive (#263) * feat(hive): support hive collect entity * feat(hive): update tableAllColumns * feat: replace antlr4ts with antlr4ng * feat(pgsql): pgsql collect entity (#268) * feat(pgsql): pgsql collect entity * feat(pgsql): optimize some name --------- Co-authored-by: zhaoge <> * feat: get word text by token.text * feat: supprt collect db/function and add splitListener (#270) * feat: supprt collect db/function and add splitListner * feat: remove SplitListener interface in baseParser to use SplitListener in root * fix(mysql): fix show create xxx not celloct as createXXXEntity type * test: fix pgsql unit tests * Feat/error recover predicate (#274) * feat: optimize pgsql grammar * feat: add sql parser base * feat: apply SQLParserBase * feat: add geAllEntities method * test: test collect table when missing column * feat: compose collect and suggestion (#276) * feat: mark stmt which contain caret * test: correct name of getAllEntities * test: remove misscolumn unit tests * test: add suggestionWithEntity tests * feat: flink collect entity (#277) * feat: improve flink sql parser * feat: support flink entity collector * test: flink entity collect unit test * feat: move combine entities to parent class --------- Co-authored-by: 霜序 <976060700@qq.com> Co-authored-by: XCynthia <942884029@qq.com>
This commit is contained in:
444
test/parser/pgsql/contextCollect/entityCollector.test.ts
Normal file
444
test/parser/pgsql/contextCollect/entityCollector.test.ts
Normal file
@ -0,0 +1,444 @@
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { PostgreSQLParserListener } from 'src/lib/pgsql/PostgreSQLParserListener';
|
||||
import { EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { StmtContextType } from 'src/parser/common/entityCollector';
|
||||
import PostgreSQL, { PostgreSQLEntityCollector, PostgreSqlSplitListener } from 'src/parser/pgsql';
|
||||
|
||||
const commonSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'common.sql'), 'utf-8');
|
||||
|
||||
describe('PostgreSQL entity collector tests', () => {
|
||||
const postgreSql = new PostgreSQL();
|
||||
const parseTree = postgreSql.parse(commonSql);
|
||||
const splitListener = new PostgreSqlSplitListener();
|
||||
postgreSql.listen(splitListener as PostgreSQLParserListener, parseTree);
|
||||
|
||||
test('validate common sql', () => {
|
||||
expect(postgreSql.validate(commonSql).length).toBe(0);
|
||||
});
|
||||
|
||||
test('split results', () => {
|
||||
expect(splitListener.statementsContext.length).toBe(10);
|
||||
});
|
||||
|
||||
test('create database', () => {
|
||||
const testingContext = splitListener.statementsContext[0];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const sourceTableEntity = allEntities[0];
|
||||
|
||||
expect(sourceTableEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||
expect(sourceTableEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_DATABASE_STMT
|
||||
);
|
||||
expect(sourceTableEntity.text).toBe('music2');
|
||||
});
|
||||
|
||||
test('create table by select', () => {
|
||||
const testingContext = splitListener.statementsContext[1];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(3);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('films_recent');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 110,
|
||||
endIndex: 121,
|
||||
line: 6,
|
||||
startColumn: 14,
|
||||
endColumn: 26,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 97,
|
||||
endIndex: 246,
|
||||
startLine: 6,
|
||||
endLine: 7,
|
||||
startColumn: 1,
|
||||
endColumn: 122,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(2);
|
||||
tableCreateEntity.relatedEntities.forEach((relatedEntity) => {
|
||||
expect(relatedEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(allEntities.some((en) => relatedEntity === en)).toBeTruthy();
|
||||
});
|
||||
|
||||
expect(allEntities[1].text).toBe('films');
|
||||
expect(allEntities[1].belongStmt.rootStmt).toBe(allEntities[0].belongStmt);
|
||||
expect(allEntities[1].position).toEqual({
|
||||
startIndex: 168,
|
||||
endIndex: 172,
|
||||
line: 7,
|
||||
startColumn: 43,
|
||||
endColumn: 48,
|
||||
});
|
||||
|
||||
expect(allEntities[2].text).toBe('films2');
|
||||
expect(allEntities[2].belongStmt.rootStmt).toBe(allEntities[0].belongStmt);
|
||||
expect(allEntities[2].position).toEqual({
|
||||
startIndex: 179,
|
||||
endIndex: 184,
|
||||
line: 7,
|
||||
startColumn: 54,
|
||||
endColumn: 60,
|
||||
});
|
||||
});
|
||||
|
||||
test('create table of columns', () => {
|
||||
const testingContext = splitListener.statementsContext[2];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('distributors');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 263,
|
||||
endIndex: 274,
|
||||
line: 9,
|
||||
startColumn: 14,
|
||||
endColumn: 26,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 250,
|
||||
endIndex: 377,
|
||||
startLine: 9,
|
||||
endLine: 13,
|
||||
startColumn: 1,
|
||||
endColumn: 2,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns.length).toBe(3);
|
||||
tableCreateEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('create foreign table by columns', () => {
|
||||
const testingContext = splitListener.statementsContext[3];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('films');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 402,
|
||||
endIndex: 406,
|
||||
line: 15,
|
||||
startColumn: 22,
|
||||
endColumn: 27,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 381,
|
||||
endIndex: 626,
|
||||
startLine: 15,
|
||||
endLine: 23,
|
||||
startColumn: 1,
|
||||
endColumn: 19,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns.length).toBe(6);
|
||||
tableCreateEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('create foreign table of partition', () => {
|
||||
const testingContext = splitListener.statementsContext[4];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('measurement_y2016m07');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 651,
|
||||
endIndex: 670,
|
||||
line: 25,
|
||||
startColumn: 22,
|
||||
endColumn: 42,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 630,
|
||||
endIndex: 769,
|
||||
startLine: 25,
|
||||
endLine: 27,
|
||||
startColumn: 1,
|
||||
endColumn: 21,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
|
||||
const relatedEntity = tableCreateEntity.relatedEntities[0];
|
||||
expect(relatedEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(allEntities.some((en) => relatedEntity === en)).toBeTruthy();
|
||||
expect(relatedEntity.text).toBe('measurement');
|
||||
});
|
||||
|
||||
test('create view by select', () => {
|
||||
const testingContext = splitListener.statementsContext[5];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('comedies');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 785,
|
||||
endIndex: 792,
|
||||
line: 29,
|
||||
startColumn: 13,
|
||||
endColumn: 21,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 773,
|
||||
endIndex: 849,
|
||||
startLine: 29,
|
||||
endLine: 32,
|
||||
startColumn: 1,
|
||||
endColumn: 26,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
|
||||
const relatedEntity = tableCreateEntity.relatedEntities[0];
|
||||
expect(relatedEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(allEntities.some((en) => relatedEntity === en)).toBeTruthy();
|
||||
expect(relatedEntity.text).toBe('films');
|
||||
});
|
||||
|
||||
test('create materialized view by columns', () => {
|
||||
const testingContext = splitListener.statementsContext[6];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('comedies_mate');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 878,
|
||||
endIndex: 890,
|
||||
line: 34,
|
||||
startColumn: 26,
|
||||
endColumn: 39,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 853,
|
||||
endIndex: 1055,
|
||||
startLine: 34,
|
||||
endLine: 39,
|
||||
startColumn: 1,
|
||||
endColumn: 17,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns.length).toBe(2);
|
||||
|
||||
tableCreateEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('select with clause', () => {
|
||||
const testingContext = splitListener.statementsContext[7];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableEntity1 = allEntities[0];
|
||||
const tableEntity2 = allEntities[1];
|
||||
expect(tableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity1.text).toBe('table_expression');
|
||||
expect(tableEntity1.position).toEqual({
|
||||
startIndex: 1109,
|
||||
endIndex: 1124,
|
||||
line: 41,
|
||||
startColumn: 51,
|
||||
endColumn: 67,
|
||||
});
|
||||
|
||||
expect(tableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(tableEntity1.belongStmt.position).toEqual({
|
||||
startIndex: 1094,
|
||||
endIndex: 1124,
|
||||
startLine: 41,
|
||||
endLine: 41,
|
||||
startColumn: 36,
|
||||
endColumn: 67,
|
||||
});
|
||||
expect(tableEntity1.columns).toBeNull();
|
||||
expect(tableEntity1.relatedEntities).toBeNull();
|
||||
|
||||
expect(tableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity2.text).toBe('table_expression1');
|
||||
expect(tableEntity2.position).toEqual({
|
||||
startIndex: 1182,
|
||||
endIndex: 1198,
|
||||
line: 42,
|
||||
startColumn: 55,
|
||||
endColumn: 72,
|
||||
});
|
||||
|
||||
expect(tableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(tableEntity2.belongStmt.position).toEqual({
|
||||
startIndex: 1059,
|
||||
endIndex: 1237,
|
||||
startLine: 41,
|
||||
endLine: 42,
|
||||
startColumn: 1,
|
||||
endColumn: 111,
|
||||
});
|
||||
expect(tableEntity2.columns).toBeNull();
|
||||
expect(tableEntity2.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('insert into table', () => {
|
||||
const testingContext = splitListener.statementsContext[8];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableInsertEntity = allEntities[0];
|
||||
|
||||
expect(tableInsertEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableInsertEntity.text).toBe('insert_films');
|
||||
expect(tableInsertEntity.position).toEqual({
|
||||
startIndex: 1253,
|
||||
endIndex: 1264,
|
||||
line: 44,
|
||||
startColumn: 13,
|
||||
endColumn: 25,
|
||||
});
|
||||
|
||||
expect(tableInsertEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
expect(tableInsertEntity.belongStmt.position).toEqual({
|
||||
startIndex: 1241,
|
||||
endIndex: 1355,
|
||||
startLine: 44,
|
||||
endLine: 45,
|
||||
startColumn: 1,
|
||||
endColumn: 55,
|
||||
});
|
||||
|
||||
expect(tableInsertEntity.columns).toBeNull();
|
||||
expect(tableInsertEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('create function', () => {
|
||||
const testingContext = splitListener.statementsContext[9];
|
||||
|
||||
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const functionEntity = allEntities[0];
|
||||
|
||||
expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
|
||||
expect(functionEntity.text).toBe('get_color_note');
|
||||
expect(functionEntity.position).toEqual({
|
||||
endColumn: 31,
|
||||
endIndex: 1388,
|
||||
line: 47,
|
||||
startColumn: 17,
|
||||
startIndex: 1375,
|
||||
});
|
||||
|
||||
expect(functionEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_FUNCTION_STMT
|
||||
);
|
||||
expect(functionEntity.belongStmt.position).toEqual({
|
||||
endColumn: 15,
|
||||
endIndex: 1477,
|
||||
endLine: 49,
|
||||
startColumn: 1,
|
||||
startIndex: 1359,
|
||||
startLine: 47,
|
||||
});
|
||||
|
||||
expect(functionEntity.columns).toBeNull();
|
||||
expect(functionEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
});
|
49
test/parser/pgsql/contextCollect/fixtures/common.sql
Normal file
49
test/parser/pgsql/contextCollect/fixtures/common.sql
Normal file
@ -0,0 +1,49 @@
|
||||
CREATE DATABASE music2
|
||||
LOCALE 'sv_SE.iso885915'
|
||||
ENCODING LATIN9
|
||||
TEMPLATE template0;
|
||||
|
||||
CREATE TABLE films_recent AS
|
||||
SELECT films.name, films.startTime FROM films JOIN films2 ON films2.id = films.id WHERE films.startTime >= '2002-01-01';
|
||||
|
||||
CREATE TABLE distributors (
|
||||
did integer,
|
||||
name varchar(40),
|
||||
CONSTRAINT con1 CHECK (did > 100 AND name <> '')
|
||||
);
|
||||
|
||||
CREATE FOREIGN TABLE films (
|
||||
code char(5) NOT NULL,
|
||||
title varchar(40) NOT NULL,
|
||||
did integer NOT NULL,
|
||||
date_prod date,
|
||||
kind varchar(10),
|
||||
len interval hour to minute
|
||||
)
|
||||
SERVER film_server;
|
||||
|
||||
CREATE FOREIGN TABLE measurement_y2016m07
|
||||
PARTITION OF measurement FOR VALUES FROM ('2016-07-01') TO ('2016-08-01')
|
||||
SERVER server_07;
|
||||
|
||||
CREATE VIEW comedies AS
|
||||
SELECT *
|
||||
FROM films
|
||||
WHERE kind = 'Comedy';
|
||||
|
||||
CREATE MATERIALIZED VIEW comedies_mate
|
||||
(column_name1, column_name2)
|
||||
WITH ( storage_parameter=3, storage_parameter1=4)
|
||||
TABLESPACE tablespace_name
|
||||
AS SELECT * FROM product
|
||||
WITH NO DATA;
|
||||
|
||||
WITH RECURSIVE query_name (id) AS (SELECT id FROM table_expression)
|
||||
SELECT DISTINCT ON (col1,col2) random() AS name1 FROM table_expression1 WHERE name1=name1 GROUP BY DISTINCT id;
|
||||
|
||||
INSERT INTO insert_films (code, title, did, date_prod, kind)
|
||||
VALUES ('T_601', 'Yojimbo', 106, DEFAULT, 'Drama');
|
||||
|
||||
CREATE FUNCTION get_color_note (rainbow) RETURNS text AS
|
||||
'SELECT note FROM my_colors WHERE color = $1'
|
||||
LANGUAGE SQL;
|
@ -1,4 +1,4 @@
|
||||
import PostgresSQL, { PgSqlSplitListener } from 'src/parser/pgsql';
|
||||
import PostgresSQL, { PostgreSqlSplitListener } from 'src/parser/pgsql';
|
||||
import { PostgreSQLParserListener } from 'src/lib/pgsql/PostgreSQLParserListener';
|
||||
|
||||
const validSQL1 = `INSERT INTO country_page_view
|
||||
@ -15,7 +15,7 @@ describe('PgSQL ErrorStrategy test', () => {
|
||||
// const sql = [inValidSQL, validSQL1, validSQL2].join('\n');
|
||||
// // parse with empty errorListener
|
||||
// const parseTree = pgSQL.parse(sql, () => {});
|
||||
// const splitListener = new PgSqlSplitListener();
|
||||
// const splitListener = new PostgreSqlSplitListener();
|
||||
// pgSQL.listen(splitListener as PostgreSQLParserListener, parseTree);
|
||||
|
||||
// const statementCount = splitListener.statementsContext.length;
|
||||
@ -32,7 +32,7 @@ describe('PgSQL ErrorStrategy test', () => {
|
||||
const sql = [validSQL1, inValidSQL, validSQL2].join('\n');
|
||||
// parse with empty errorListener
|
||||
const parseTree = pgSQL.parse(sql, () => {});
|
||||
const splitListener = new PgSqlSplitListener();
|
||||
const splitListener = new PostgreSqlSplitListener();
|
||||
pgSQL.listen(splitListener as PostgreSQLParserListener, parseTree);
|
||||
|
||||
const statementCount = splitListener.statementsContext.length;
|
||||
@ -49,7 +49,7 @@ describe('PgSQL ErrorStrategy test', () => {
|
||||
const sql = [validSQL1, validSQL2, inValidSQL].join('\n');
|
||||
// parse with empty errorListener
|
||||
const parseTree = pgSQL.parse(sql, () => {});
|
||||
const splitListener = new PgSqlSplitListener();
|
||||
const splitListener = new PostgreSqlSplitListener();
|
||||
pgSQL.listen(splitListener as PostgreSQLParserListener, parseTree);
|
||||
|
||||
splitListener.statementsContext.map((item, index) => {
|
||||
|
@ -0,0 +1,11 @@
|
||||
SELECT FROM my_db.tb;
|
||||
|
||||
SELECT name, calculate_age(birthdate) AS age, FROM students;
|
||||
|
||||
INSERT INTO insert_tb SELECT FROM from_tb;
|
||||
|
||||
INSERT INTO insert_tb SELECT id, age, FROM from_tb;
|
||||
|
||||
CREATE TABLE sorted_census_data AS SELECT FROM unsorted_census_data;
|
||||
|
||||
CREATE TABLE sorted_census_data AS SELECT id, age, FROM unsorted_census_data;
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import PostgresSQL from 'src/parser/pgsql';
|
||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||
@ -18,7 +18,7 @@ describe('PgSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -32,10 +32,10 @@ describe('PgSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
const suggestionVw = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -51,7 +51,7 @@ describe('PgSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -65,7 +65,7 @@ describe('PgSQL Multiple Statements Syntax Suggestion', () => {
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
|
156
test/parser/pgsql/suggestion/suggestionWithEntity.test.ts
Normal file
156
test/parser/pgsql/suggestion/suggestionWithEntity.test.ts
Normal file
@ -0,0 +1,156 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import PostgreSQL from 'src/parser/pgsql';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'suggestionWithEntity.sql'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
describe('PostgreSQL Syntax Suggestion with collect entity', () => {
|
||||
const postgre = new PostgreSQL();
|
||||
|
||||
test('select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 1,
|
||||
column: 8,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = postgre.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = postgre.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('my_db.tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('select with columns with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 3,
|
||||
column: 47,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = postgre.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = postgre.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('students');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into table as select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 5,
|
||||
column: 30,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = postgre.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = postgre.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('from_tb');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into table as select with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 7,
|
||||
column: 39,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = postgre.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = postgre.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('from_tb');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 9,
|
||||
column: 43,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = postgre.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = postgre.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('sorted_census_data');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('unsorted_census_data');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 11,
|
||||
column: 52,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = postgre.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = postgre.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('sorted_census_data');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('unsorted_census_data');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
});
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import PostgresSQL from 'src/parser/pgsql';
|
||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
@ -28,7 +28,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -45,7 +45,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -62,7 +62,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -95,13 +95,13 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos2
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -122,7 +122,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -139,7 +139,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -156,7 +156,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -173,7 +173,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -190,7 +190,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -207,7 +207,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -224,7 +224,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -241,7 +241,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -258,7 +258,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -275,7 +275,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -292,7 +292,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -317,10 +317,10 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
posCreate
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestionCreate = syntaxesCreate?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -357,13 +357,13 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
posAction
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestionCreate = syntaxesCreate?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
const suggestionAction = syntaxesAction?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -386,7 +386,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -402,7 +402,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([
|
||||
@ -438,13 +438,13 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
posAction
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestionCreate = syntaxesCreate?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestionAction = syntaxesAction?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -467,7 +467,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -483,7 +483,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -515,13 +515,13 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos2
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['col_name']);
|
||||
@ -541,7 +541,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -557,7 +557,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -589,13 +589,13 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos2
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -615,7 +615,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['col_name']);
|
||||
@ -631,7 +631,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -655,10 +655,10 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos1
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -676,7 +676,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -708,13 +708,13 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos2
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
@ -734,7 +734,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['col_name']);
|
||||
@ -749,7 +749,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['co_name']);
|
||||
@ -765,7 +765,7 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([
|
||||
@ -825,23 +825,23 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos5
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion3 = syntaxes3?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion4 = syntaxes4?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
const suggestion5 = syntaxes5?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
@ -884,13 +884,13 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos2
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['columnname']);
|
||||
@ -918,10 +918,10 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos1
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['col1']);
|
||||
@ -954,13 +954,13 @@ describe('Postgre SQL Syntax Suggestion', () => {
|
||||
pos3
|
||||
)?.syntax;
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion3 = syntaxes3?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion1).not.toBeUndefined();
|
||||
|
Reference in New Issue
Block a user