feat: collect entity (#265)

* feat: add text and word utils

* feat: add entity collector class

* refactor: rename SyntaxContextType to EntityContextType

* refactor: improve EntityCollector

* feat: improve mysql parser grammar

* feat: add mysql entity collector

* test: mysql entity collector tests

* feat: remove useless method

* feat: improve spark grammar file

* feat: add spark entity collector

* test: spark entity collector unit tests

* feat: remove useless code

* feat: add queryStatement label

* feat: add crateDatabaseStmt

* feat: add trino entity collector

* feat: rename trinosql to trino

* test: trino collect entity unit tests

* test: fix spark test

* feat(impala): support impale entity collector (#256)

* Feat/collect entity hive (#263)

* feat(hive): support hive collect entity

* feat(hive): update tableAllColumns

* feat: replace antlr4ts with antlr4ng

* feat(pgsql): pgsql collect entity (#268)

* feat(pgsql): pgsql collect entity

* feat(pgsql): optimize some name

---------

Co-authored-by: zhaoge <>

* feat: get word text by token.text

* feat: supprt collect db/function and add splitListener (#270)

* feat: supprt collect db/function and add splitListner

* feat: remove SplitListener interface in baseParser to use SplitListener in root

* fix(mysql): fix show create xxx not celloct as createXXXEntity type

* test: fix pgsql unit tests

* Feat/error recover predicate (#274)

* feat: optimize pgsql grammar

* feat: add sql parser base

* feat: apply SQLParserBase

* feat: add geAllEntities method

* test: test collect table when missing column

* feat: compose collect and suggestion (#276)

* feat: mark stmt which contain caret

* test: correct name of getAllEntities

* test: remove misscolumn unit tests

* test: add suggestionWithEntity tests

* feat: flink collect entity (#277)

* feat: improve flink sql parser

* feat: support flink entity collector

* test: flink entity collect unit test

* feat: move combine entities to parent class

---------

Co-authored-by: 霜序 <976060700@qq.com>
Co-authored-by: XCynthia <942884029@qq.com>
This commit is contained in:
Hayden
2024-03-26 14:28:27 +08:00
committed by GitHub
parent 3f62ad0d32
commit a99721162b
230 changed files with 56908 additions and 46672 deletions

View File

@ -0,0 +1,250 @@
import fs from 'fs';
import path from 'path';
import TrinoSQL from 'src/parser/trino';
import { TrinoSqlSplitListener, TrinoEntityCollector } from 'src/parser/trino';
import { ParseTreeListener } from 'antlr4ng';
import { TrinoSqlListener } from 'src/lib/trinosql/TrinoSqlListener';
import { EntityContextType } from 'src/parser/common/basic-parser-types';
import { StmtContextType } from 'src/parser/common/entityCollector';
const commonSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'common.sql'), 'utf-8');
describe('Trino entity collector tests', () => {
const trino = new TrinoSQL();
const parseTree = trino.parse(commonSql);
const splitListener = new TrinoSqlSplitListener();
trino.listen(splitListener as TrinoSqlListener, parseTree);
test('validate common sql', () => {
expect(trino.validate(commonSql).length).toBe(0);
});
test('split results', () => {
expect(splitListener.statementsContext.length).toBe(9);
});
test('create table like', () => {
const testingContext = splitListener.statementsContext[0];
const collectListener = new TrinoEntityCollector(commonSql);
trino.listen(collectListener as ParseTreeListener, testingContext);
const allEntities = collectListener.getEntities();
expect(allEntities.length).toBe(2);
const tableCreateEntity = allEntities[0];
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
expect(tableCreateEntity.text).toBe('bar');
expect(tableCreateEntity.position).toEqual({
startIndex: 27,
endIndex: 29,
line: 1,
startColumn: 28,
endColumn: 31,
});
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
StmtContextType.CREATE_TABLE_STMT
);
expect(tableCreateEntity.belongStmt.position).toEqual({
startIndex: 0,
endIndex: 68,
startLine: 1,
endLine: 1,
startColumn: 1,
endColumn: 70,
});
expect(tableCreateEntity.relatedEntities.length).toBe(1);
const beLikedEntity = allEntities[1];
expect(tableCreateEntity.relatedEntities[0]).toBe(beLikedEntity);
expect(beLikedEntity.text).toBe('like_table');
expect(beLikedEntity.entityContextType).toBe(EntityContextType.TABLE);
expect(beLikedEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
});
test('create table as select', () => {
const testingContext = splitListener.statementsContext[1];
const collectListener = new TrinoEntityCollector(commonSql);
trino.listen(collectListener as ParseTreeListener, testingContext);
const allEntities = collectListener.getEntities();
expect(allEntities.length).toBe(2);
const tableCreateEntity = allEntities[0];
const originTableEntity = allEntities[1];
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
expect(tableCreateEntity.text).toBe('foo');
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
StmtContextType.CREATE_TABLE_STMT
);
expect(tableCreateEntity.columns.length).toBe(2);
tableCreateEntity.columns.forEach((columEntity) => {
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
expect(columEntity.text).toBe(
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
);
});
expect(tableCreateEntity.relatedEntities.length).toBe(1);
expect(tableCreateEntity.relatedEntities[0]).toBe(originTableEntity);
expect(originTableEntity.entityContextType).toBe(EntityContextType.TABLE);
expect(originTableEntity.text).toBe('t');
expect(originTableEntity.belongStmt.rootStmt).toBe(tableCreateEntity.belongStmt);
});
test('create view as select', () => {
const testingContext = splitListener.statementsContext[2];
const collectListener = new TrinoEntityCollector(commonSql);
trino.listen(collectListener as ParseTreeListener, testingContext);
const allEntities = collectListener.getEntities();
expect(allEntities.length).toBe(2);
const tableCreateEntity = allEntities[0];
const originTableEntity = allEntities[1];
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
expect(tableCreateEntity.text).toBe('a');
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
expect(tableCreateEntity.columns).toBeNull();
expect(tableCreateEntity.relatedEntities.length).toBe(1);
expect(tableCreateEntity.relatedEntities[0]).toBe(originTableEntity);
expect(originTableEntity.entityContextType).toBe(EntityContextType.TABLE);
expect(originTableEntity.text).toBe('t');
expect(originTableEntity.belongStmt.rootStmt).toBe(tableCreateEntity.belongStmt);
});
test('create materialize view as select', () => {
const testingContext = splitListener.statementsContext[3];
const collectListener = new TrinoEntityCollector(commonSql);
trino.listen(collectListener as ParseTreeListener, testingContext);
const allEntities = collectListener.getEntities();
expect(allEntities.length).toBe(2);
const tableCreateEntity = allEntities[0];
const originTableEntity = allEntities[1];
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
expect(tableCreateEntity.text).toBe('a');
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
expect(tableCreateEntity.columns).toBeNull();
expect(tableCreateEntity.relatedEntities.length).toBe(1);
expect(tableCreateEntity.relatedEntities[0]).toBe(originTableEntity);
expect(originTableEntity.entityContextType).toBe(EntityContextType.TABLE);
expect(originTableEntity.text).toBe('t');
expect(originTableEntity.belongStmt.rootStmt).toBe(tableCreateEntity.belongStmt);
});
test('select from table', () => {
const testingContext = splitListener.statementsContext[4];
const collectListener = new TrinoEntityCollector(commonSql);
trino.listen(collectListener as ParseTreeListener, testingContext);
const allEntities = collectListener.getEntities();
expect(allEntities.length).toBe(1);
const tableCreateEntity = allEntities[0];
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE);
expect(tableCreateEntity.text).toBe('table1');
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
expect(tableCreateEntity.columns).toBeNull();
expect(tableCreateEntity.relatedEntities).toBeNull();
});
test('insert into table as select', () => {
const testingContext = splitListener.statementsContext[5];
const collectListener = new TrinoEntityCollector(commonSql);
trino.listen(collectListener as ParseTreeListener, testingContext);
const allEntities = collectListener.getEntities();
expect(allEntities.length).toBe(2);
const insertTableEntity = allEntities[0];
const fromTableEntity = allEntities[1];
expect(insertTableEntity.entityContextType).toBe(EntityContextType.TABLE);
expect(insertTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
expect(insertTableEntity.text).toBe('orders');
expect(fromTableEntity.entityContextType).toBe(EntityContextType.TABLE);
expect(fromTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
expect(fromTableEntity.text).toBe('new_orders');
expect(fromTableEntity.belongStmt.parentStmt).toBe(insertTableEntity.belongStmt);
});
test('insert into table values', () => {
const testingContext = splitListener.statementsContext[6];
const collectListener = new TrinoEntityCollector(commonSql);
trino.listen(collectListener as ParseTreeListener, testingContext);
const allEntities = collectListener.getEntities();
expect(allEntities.length).toBe(1);
const tableEntity = allEntities[0];
expect(tableEntity.entityContextType).toBe(EntityContextType.TABLE);
expect(tableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
expect(tableEntity.text).toBe('cities');
});
test('create schema', () => {
const testingContext = splitListener.statementsContext[7];
const collectListener = new TrinoEntityCollector(commonSql);
trino.listen(collectListener as ParseTreeListener, testingContext);
const allEntities = collectListener.getEntities();
expect(allEntities.length).toBe(1);
const schemaEntity = allEntities[0];
expect(schemaEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
expect(schemaEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_DATABASE_STMT);
expect(schemaEntity.text).toBe('test');
});
test('use schema', () => {
const testingContext = splitListener.statementsContext[8];
const collectListener = new TrinoEntityCollector(commonSql);
trino.listen(collectListener as ParseTreeListener, testingContext);
const allEntities = collectListener.getEntities();
expect(allEntities.length).toBe(1);
const schemaEntity = allEntities[0];
expect(schemaEntity.entityContextType).toBe(EntityContextType.DATABASE);
expect(schemaEntity.belongStmt.stmtContextType).toBe(StmtContextType.COMMON_STMT);
expect(schemaEntity.text).toBe('information_schema');
});
});

View File

@ -0,0 +1,17 @@
CREATE TABLE IF NOT EXISTS bar (LIKE like_table INCLUDING PROPERTIES);
CREATE TABLE foo(x,y) AS SELECT a,b FROM t;
CREATE VIEW a AS SELECT * FROM t;
CREATE MATERIALIZED VIEW a AS SELECT * FROM t;
SELECT * FROM table1 GROUP BY a;
INSERT INTO orders SELECT * FROM new_orders;
INSERT INTO cities VALUES (1, 'San Francisco');
CREATE SCHEMA IF NOT EXISTS test;
USE information_schema;

View File

@ -0,0 +1,61 @@
import TrinoSQL, { TrinoSqlSplitListener } from 'src/parser/trino';
import { TrinoSqlListener } from 'src/lib/trinosql/TrinoSqlListener';
const validSQL1 = `INSERT INTO country_page_view
VALUES ('Chinese', 'mumiao', 18),
('Amercian', 'georage', 22);`;
const validSQL2 = 'SELECT * FROM tb;';
const inValidSQL = 'CREATE TABLE VALUES;';
describe('TrinoSQL ErrorStrategy test', () => {
const trinoSQL = new TrinoSQL();
test('begin inValid', () => {
const sql = [inValidSQL, validSQL1, validSQL2].join('\n');
// parse with empty errorListener
const parseTree = trinoSQL.parse(sql, () => {});
const splitListener = new TrinoSqlSplitListener();
trinoSQL.listen(splitListener as TrinoSqlListener, parseTree);
const statementCount = splitListener.statementsContext.length;
splitListener.statementsContext.map((item, index) => {
if (index !== statementCount - 1 && index !== statementCount - 2) {
expect(item.exception).not.toBe(null);
} else {
expect(item.exception).toBe(null);
}
});
});
test('middle inValid', () => {
const sql = [validSQL1, inValidSQL, validSQL2].join('\n');
// parse with empty errorListener
const parseTree = trinoSQL.parse(sql, () => {});
const splitListener = new TrinoSqlSplitListener();
trinoSQL.listen(splitListener as TrinoSqlListener, parseTree);
const statementCount = splitListener.statementsContext.length;
splitListener.statementsContext.map((item, index) => {
if (index !== statementCount - 1 && index !== 0) {
expect(item.exception).not.toBe(null);
} else {
expect(item.exception).toBe(null);
}
});
});
test('end inValid', () => {
const sql = [validSQL1, validSQL2, inValidSQL].join('\n');
// parse with empty errorListener
const parseTree = trinoSQL.parse(sql, () => {});
const splitListener = new TrinoSqlSplitListener();
trinoSQL.listen(splitListener as TrinoSqlListener, parseTree);
splitListener.statementsContext.map((item, index) => {
if (index !== 0 && index !== 1) {
expect(item.exception).not.toBe(null);
} else {
expect(item.exception).toBe(null);
}
});
});
});

View File

@ -0,0 +1,12 @@
import TrinoSQL from 'src/parser/trino';
describe('trinoSQL Lexer tests', () => {
const parser = new TrinoSQL();
const sql = 'SELECT * FROM table1';
const tokens = parser.getAllTokens(sql);
test('token counts', () => {
expect(tokens.length).toBe(7);
});
});

View File

@ -0,0 +1,59 @@
import TrinoSQL from 'src/parser/trino';
import { TrinoSqlListener } from 'src/lib/trinosql/TrinoSqlListener';
import { ParseTreeListener } from 'antlr4ng';
describe('trino SQL Listener Tests', () => {
const expectTableName = 'user1';
const sql = `select id,name,sex from ${expectTableName};`;
const parser = new TrinoSQL();
const parseTree = parser.parse(sql);
test('Listener enterTableName', async () => {
let result = '';
class MyListener implements TrinoSqlListener {
enterTableName = (ctx): void => {
result = ctx.getText().toLowerCase();
};
visitTerminal() {}
visitErrorNode() {}
enterEveryRule() {}
exitEveryRule() {}
}
const listenTableName = new MyListener();
await parser.listen(listenTableName as ParseTreeListener, parseTree);
expect(result).toBe(expectTableName);
});
test('Split sql listener', async () => {
const singleStatementArr = [
`SELECT id FROM games ORDER BY score;`,
`INSERT INTO country_page_view
SELECT user1, cnt FROM page_view_source`,
`CREATE TABLE IF NOT EXISTS foo AS SELECT * FROM t;`,
];
const sql = singleStatementArr.join('\n');
const sqlSlices = parser.splitSQLByStatement(sql);
expect(sqlSlices).not.toBeNull();
// check text in result
expect(sqlSlices.map((item) => item.text)).toEqual(singleStatementArr);
// check startIndex and endIndex in result
sqlSlices.forEach((slice, index) => {
expect(sql.slice(slice.startIndex, slice.endIndex + 1)).toBe(singleStatementArr[index]);
});
// check lineNumber in result
expect(sqlSlices[0].startLine).toBe(1);
expect(sqlSlices[0].endLine).toBe(1);
expect(sqlSlices[1].startLine).toBe(2);
expect(sqlSlices[1].endLine).toBe(3);
expect(sqlSlices[2].startLine).toBe(4);
expect(sqlSlices[2].endLine).toBe(4);
});
});

View File

@ -0,0 +1,22 @@
CREATE TABLE VALUES -- unfinished
ALTER SCHEMA foo RENAME TO bar;
DELETE FROM t;
DENY SELECT ON SCHEMA s TO USER u;
SELECT ids FROM db. ; -- unfinished
INSERT INTO weather (date, city, temp_hi, temp_lo) VALUES ('1994-11-29', 'Hayward', 54, 37);
EXPLAIN ANALYZE VERBOSE SELECT * FROM t;
INSERT INTO weather (date, city, temp_hi, temp_lo) VALUES ('1994-11-29', 'Hayward', 54, 37); -- unfinished
DENY SELECT ON SCHEMA s TO USER u;
CALL catalog.schema.test();
INSERT INTO products (product_no, name, price) SELECT * FROM db. ; -- unfinished

View File

@ -0,0 +1,11 @@
SELECT FROM my_db.tb;
SELECT name, calculate_age(birthdate) AS age, FROM students;
INSERT INTO insert_tb SELECT FROM from_tb;
INSERT INTO insert_tb SELECT id, age, FROM from_tb;
CREATE TABLE sorted_census_data AS SELECT FROM unsorted_census_data;
CREATE TABLE sorted_census_data AS SELECT id, age, FROM unsorted_census_data;

View File

@ -0,0 +1,37 @@
INSERT INTO db.tb ;
SELECT ids FROM db.;
CREATE TABLE db. VALUES;
DROP TABLE IF EXISTS db.a;
CREATE OR REPLACE VIEW db.v;
DROP VIEW db.v ;
SELECT name, calculate_age(birthday) AS age FROM students;
CREATE SCHEMA db ;
DROP SCHEMA IF EXISTS sch;
SHOW COLUMNS FROM tb ;
COMMENT ON COLUMN tb.;
ALTER TABLE tb RENAME COLUMN ;
ALTER TABLE tb RENAME COLUMN ids TO ;
ALTER TABLE users DROP COLUMN ;
ALTER TABLE users ADD COLUMN zi ;
SHOW COMMENT ON COLUMN tb.c ;
INSERT INTO tb (id, );
SELECT * FROM tb ORDER BY ;
SELECT * FROM tb GROUP BY ;

View File

@ -0,0 +1,13 @@
ALTER ;
CREATE ;
DEALLOCATE ;
DELETE ;
DESCRIBE ;
DROP ;
INSERT ;

View File

@ -0,0 +1,69 @@
import fs from 'fs';
import path from 'path';
import TrinoSQL from 'src/parser/trino';
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
'utf-8'
);
describe('TrinoSQL Multiple Statements Syntax Suggestion', () => {
const parser = new TrinoSQL();
test('Create table ', () => {
const pos: CaretPosition = {
lineNumber: 1,
column: 14,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
});
test('Select from table', () => {
const pos: CaretPosition = {
lineNumber: 9,
column: 20,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.TABLE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
});
test('Insert into table ', () => {
const pos: CaretPosition = {
lineNumber: 15,
column: 13,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.TABLE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
});
test('Insert into select from table ', () => {
const pos: CaretPosition = {
lineNumber: 21,
column: 65,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.TABLE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
});
});

View File

@ -0,0 +1,159 @@
import fs from 'fs';
import path from 'path';
import TrinoSQL from 'src/parser/trino';
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
import { commentOtherLine } from 'test/helper';
const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'suggestionWithEntity.sql'),
'utf-8'
);
describe('PostgreSQL Syntax Suggestion with collect entity', () => {
const trino = new TrinoSQL();
test('select with no column', () => {
const pos: CaretPosition = {
lineNumber: 1,
column: 8,
};
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
const syntaxes = trino.getSuggestionAtCaretPosition(sql, pos)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
// TODO:
// const entities = trino.getAllEntities(sql, pos);
// expect(entities.length).toBe(1);
// expect(entities[0].text).toBe('my_db.tb');
// expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
// expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
});
test('select with columns with trailing comma', () => {
const pos: CaretPosition = {
lineNumber: 3,
column: 47,
};
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
const syntaxes = trino.getSuggestionAtCaretPosition(sql, pos)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
const entities = trino.getAllEntities(sql, pos);
expect(entities.length).toBe(1);
expect(entities[0].text).toBe('students');
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
});
test('insert into table as select with no column', () => {
const pos: CaretPosition = {
lineNumber: 5,
column: 30,
};
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
const syntaxes = trino.getSuggestionAtCaretPosition(sql, pos)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
const entities = trino.getAllEntities(sql, pos);
expect(entities.length).toBe(1);
expect(entities[0].text).toBe('insert_tb');
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
// TODO:
// expect(entities[1].text).toBe('from_tb');
// expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
// expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
});
test('insert into table as select with trailing comma', () => {
const pos: CaretPosition = {
lineNumber: 7,
column: 39,
};
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
const syntaxes = trino.getSuggestionAtCaretPosition(sql, pos)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
const entities = trino.getAllEntities(sql, pos);
expect(entities.length).toBe(2);
expect(entities[0].text).toBe('insert_tb');
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
expect(entities[1].text).toBe('from_tb');
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
});
test('create table as select with no column', () => {
const pos: CaretPosition = {
lineNumber: 9,
column: 43,
};
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
const syntaxes = trino.getSuggestionAtCaretPosition(sql, pos)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
const entities = trino.getAllEntities(sql, pos);
expect(entities.length).toBe(1);
expect(entities[0].text).toBe('sorted_census_data');
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
// TODO:
// expect(entities[1].text).toBe('unsorted_census_data');
// expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
// expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
});
test('create table as select with trailing comma', () => {
const pos: CaretPosition = {
lineNumber: 11,
column: 52,
};
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
const syntaxes = trino.getSuggestionAtCaretPosition(sql, pos)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
const entities = trino.getAllEntities(sql, pos);
expect(entities.length).toBe(2);
expect(entities[0].text).toBe('sorted_census_data');
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
expect(entities[1].text).toBe('unsorted_census_data');
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
});
});

View File

@ -0,0 +1,352 @@
import fs from 'fs';
import path from 'path';
import TrinoSQL from 'src/parser/trino';
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
import { commentOtherLine } from 'test/helper';
const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
'utf-8'
);
describe('Trino SQL Syntax Suggestion', () => {
const parser = new TrinoSQL();
test('Validate Syntax SQL', () => {
expect(parser.validate(syntaxSql).length).not.toBe(0);
expect(parser.validate(syntaxSql).length).not.toBe(0);
expect(parser.validate(syntaxSql).length).not.toBe(0);
});
test('Insert table ', () => {
const pos: CaretPosition = {
lineNumber: 1,
column: 18,
};
const syntaxes = parser.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos.lineNumber),
pos
)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.TABLE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'tb']);
});
test('Select table ', () => {
const pos: CaretPosition = {
lineNumber: 3,
column: 20,
};
const syntaxes =
parser.getSuggestionAtCaretPosition(commentOtherLine(syntaxSql, pos.lineNumber), pos)
?.syntax ?? [];
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.TABLE
);
expect(
syntaxes.some((item) => item.syntaxContextType === EntityContextType.VIEW)
).toBeTruthy();
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
});
test('Create table ', () => {
const pos: CaretPosition = {
lineNumber: 5,
column: 17,
};
const syntaxes = parser.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos.lineNumber),
pos
)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
});
test('DROP table ', () => {
const pos: CaretPosition = {
lineNumber: 7,
column: 26,
};
const syntaxes = parser.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos.lineNumber),
pos
)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.TABLE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'a']);
});
test('Create view ', () => {
const pos: CaretPosition = {
lineNumber: 9,
column: 28,
};
const syntaxes = parser.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos.lineNumber),
pos
)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.VIEW_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'v']);
});
test('Drop view ', () => {
const pos: CaretPosition = {
lineNumber: 11,
column: 15,
};
const syntaxes = parser.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos.lineNumber),
pos
)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.VIEW
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'v']);
});
test('Use function', () => {
const pos: CaretPosition = {
lineNumber: 13,
column: 27,
};
const syntaxes = parser.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos.lineNumber),
pos
)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['calculate_age']);
});
test('Create schema', () => {
const pos: CaretPosition = {
lineNumber: 15,
column: 17,
};
const syntaxes = parser.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos.lineNumber),
pos
)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.DATABASE_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db']);
});
test('Drop schema', () => {
const pos: CaretPosition = {
lineNumber: 17,
column: 26,
};
const syntaxes = parser.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos.lineNumber),
pos
)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['sch']);
});
test('Show Column From', () => {
const pos: CaretPosition = {
lineNumber: 19,
column: 21,
};
const syntaxes =
parser.getSuggestionAtCaretPosition(commentOtherLine(syntaxSql, pos.lineNumber), pos)
?.syntax ?? [];
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.TABLE
);
expect(
syntaxes.some((item) => item.syntaxContextType === EntityContextType.VIEW)
).toBeTruthy();
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['tb']);
});
test('Comment on column', () => {
const pos: CaretPosition = {
lineNumber: 21,
column: 22,
};
const syntaxes = parser.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos.lineNumber),
pos
)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['tb', '.']);
});
test('RENAME column', () => {
const pos: CaretPosition = {
lineNumber: 23,
column: 30,
};
const syntaxes = parser.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos.lineNumber),
pos
)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
});
test('RENAME column to', () => {
const pos: CaretPosition = {
lineNumber: 25,
column: 37,
};
const syntaxes = parser.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos.lineNumber),
pos
)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
});
test('Drop column', () => {
const pos: CaretPosition = {
lineNumber: 27,
column: 31,
};
const syntaxes = parser.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos.lineNumber),
pos
)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
});
test('Alter table add column', () => {
const pos: CaretPosition = {
lineNumber: 29,
column: 32,
};
const syntaxes = parser.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos.lineNumber),
pos
)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['zi']);
});
test('Show comment on column', () => {
const pos: CaretPosition = {
lineNumber: 31,
column: 28,
};
const syntaxes = parser.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos.lineNumber),
pos
)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['tb', '.', 'c']);
});
test('Insert into spec column', () => {
const pos: CaretPosition = {
lineNumber: 33,
column: 21,
};
const syntaxes = parser.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos.lineNumber),
pos
)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
});
test('Select order by', () => {
const pos: CaretPosition = {
lineNumber: 35,
column: 27,
};
const syntaxes = parser.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos.lineNumber),
pos
)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
});
test('Select group by', () => {
const pos: CaretPosition = {
lineNumber: 37,
column: 27,
};
const syntaxes = parser.getSuggestionAtCaretPosition(
commentOtherLine(syntaxSql, pos.lineNumber),
pos
)?.syntax;
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
});
});

View File

@ -0,0 +1,115 @@
import fs from 'fs';
import path from 'path';
import TrinoSQL from 'src/parser/trino';
import { CaretPosition } from 'src/parser/common/basic-parser-types';
import { commentOtherLine } from 'test/helper';
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
describe('Trino SQL Token Suggestion', () => {
const parser = new TrinoSQL();
test('After ALTER', () => {
const pos: CaretPosition = {
lineNumber: 1,
column: 7,
};
const suggestion = parser.getSuggestionAtCaretPosition(
commentOtherLine(tokenSql, pos.lineNumber),
pos
)?.keywords;
expect(suggestion).toMatchUnorderedArrary(['VIEW', 'MATERIALIZED', 'TABLE', 'SCHEMA']);
});
test('After CREATE', () => {
const pos: CaretPosition = {
lineNumber: 3,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
commentOtherLine(tokenSql, pos.lineNumber),
pos
)?.keywords;
expect(suggestion).toMatchUnorderedArrary([
'ROLE',
'VIEW',
'OR',
'MATERIALIZED',
'TABLE',
'SCHEMA',
]);
});
test('After DEALLOCATE', () => {
const pos: CaretPosition = {
lineNumber: 5,
column: 12,
};
const suggestion = parser.getSuggestionAtCaretPosition(
commentOtherLine(tokenSql, pos.lineNumber),
pos
)?.keywords;
expect(suggestion).toMatchUnorderedArrary(['PREPARE']);
});
test('After DELETE', () => {
const pos: CaretPosition = {
lineNumber: 7,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
commentOtherLine(tokenSql, pos.lineNumber),
pos
)?.keywords;
expect(suggestion).toMatchUnorderedArrary(['FROM']);
});
test('After DESCRIBE', () => {
const pos: CaretPosition = {
lineNumber: 9,
column: 10,
};
const suggestion = parser.getSuggestionAtCaretPosition(
commentOtherLine(tokenSql, pos.lineNumber),
pos
)?.keywords;
expect(suggestion).toMatchUnorderedArrary(['OUTPUT', 'INPUT']);
});
test('After DROP', () => {
const pos: CaretPosition = {
lineNumber: 11,
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(
commentOtherLine(tokenSql, pos.lineNumber),
pos
)?.keywords;
expect(suggestion).toMatchUnorderedArrary([
'ROLE',
'VIEW',
'MATERIALIZED',
'TABLE',
'SCHEMA',
]);
});
test('After INSERT', () => {
const pos: CaretPosition = {
lineNumber: 13,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
commentOtherLine(tokenSql, pos.lineNumber),
pos
)?.keywords;
expect(suggestion).toMatchUnorderedArrary(['INTO']);
});
});

View File

@ -0,0 +1,33 @@
import TrinoSQL from 'src/parser/trino';
import { readSQL } from 'test/helper';
const features = {
table: readSQL(__dirname, 'alter_table.sql'),
view: readSQL(__dirname, 'alter_view.sql'),
schema: readSQL(__dirname, 'alter_schema.sql'),
materializedView: readSQL(__dirname, 'alter_materialized_view.sql'),
};
describe('TrinoSQL Alter Statements Syntax Tests', () => {
const parser = new TrinoSQL();
features.table.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.view.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.schema.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.materializedView.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,16 @@
import TrinoSQL from 'src/parser/trino';
import { readSQL } from 'test/helper';
const features = {
analyze: readSQL(__dirname, 'analyze.sql'),
};
describe('TrinoSQL Analyze Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// analyze statements
features.analyze.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,16 @@
import TrinoSQL from 'src/parser/trino';
import { readSQL } from 'test/helper';
const features = {
call: readSQL(__dirname, 'call.sql'),
};
describe('TrinoSQL Call Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// call statements
features.call.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,16 @@
import TrinoSQL from 'src/parser/trino';
import { readSQL } from 'test/helper';
const features = {
comment: readSQL(__dirname, 'comment.sql'),
};
describe('TrinoSQL Comment Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// Comment statements
features.comment.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,16 @@
import TrinoSQL from 'src/parser/trino';
import { readSQL } from 'test/helper';
const features = {
commit: readSQL(__dirname, 'commit.sql'),
};
describe('TrinoSQL Commit Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// commit statements
features.commit.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,46 @@
import TrinoSQL from 'src/parser/trino';
import { readSQL } from 'test/helper';
const features = {
table: readSQL(__dirname, 'create_table.sql'),
view: readSQL(__dirname, 'create_view.sql'),
schema: readSQL(__dirname, 'create_schema.sql'),
role: readSQL(__dirname, 'create_role.sql'),
tableAsSelect: readSQL(__dirname, 'create_table_as_select.sql'),
materializedView: readSQL(__dirname, 'create_materialized_view.sql'),
};
describe('TrinoSQL Create Statements Syntax Tests', () => {
const parser = new TrinoSQL();
features.table.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.view.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.schema.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.tableAsSelect.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.role.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.materializedView.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,16 @@
import TrinoSQL from 'src/parser/trino';
import { readSQL } from 'test/helper';
const features = {
deallocatePrepare: readSQL(__dirname, 'deallocate_prepare.sql'),
};
describe('TrinoSQL deallocatePrepare Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// deallocate_prepare statements
features.deallocatePrepare.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,16 @@
import TrinoSQL from 'src/parser/trino';
import { readSQL } from 'test/helper';
const features = {
delete: readSQL(__dirname, 'delete.sql'),
};
describe('TrinoSQL Delete Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// delete statements
features.delete.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,16 @@
import TrinoSQL from 'src/parser/trino';
import { readSQL } from 'test/helper';
const features = {
deny: readSQL(__dirname, 'deny.sql'),
};
describe('TrinoSQL Deny Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// deny statements
features.deny.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,16 @@
import TrinoSQL from 'src/parser/trino';
import { readSQL } from 'test/helper';
const features = {
describe: readSQL(__dirname, 'describe.sql'),
};
describe('TrinoSQL Describe Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// describe statements
features.describe.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,46 @@
import TrinoSQL from 'src/parser/trino';
import { readSQL } from 'test/helper';
const features = {
table: readSQL(__dirname, 'drop_table.sql'),
view: readSQL(__dirname, 'drop_view.sql'),
schema: readSQL(__dirname, 'drop_schema.sql'),
role: readSQL(__dirname, 'drop_role.sql'),
column: readSQL(__dirname, 'drop_column.sql'),
materializedView: readSQL(__dirname, 'drop_materialized_view.sql'),
};
describe('TrinoSQL Drop Statements Syntax Tests', () => {
const parser = new TrinoSQL();
features.table.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.view.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.schema.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.column.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.role.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.materializedView.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,16 @@
import TrinoSQL from 'src/parser/trino';
import { readSQL } from 'test/helper';
const features = {
execute: readSQL(__dirname, 'execute.sql'),
};
describe('TrinoSQL Execute Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// execute statements
features.execute.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,16 @@
import TrinoSQL from 'src/parser/trino';
import { readSQL } from 'test/helper';
const features = {
explain: readSQL(__dirname, 'explain.sql'),
};
describe('TrinoSQL Explain Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// explain statements
features.explain.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,5 @@
ALTER MATERIALIZED VIEW people RENAME TO users;
ALTER MATERIALIZED VIEW IF EXISTS people RENAME TO users;
ALTER MATERIALIZED VIEW people SET PROPERTIES x = 'y';
ALTER MATERIALIZED VIEW people SET PROPERTIES foo = 123, bar = 456;
ALTER MATERIALIZED VIEW people SET PROPERTIES x = DEFAULT;

View File

@ -0,0 +1,7 @@
ALTER SCHEMA foo RENAME TO bar;
ALTER SCHEMA foo.bar RENAME TO baz;
ALTER SCHEMA "awesome schema"."awesome table" RENAME TO "even more awesome table";
ALTER SCHEMA web SET AUTHORIZATION alice;
ALTER SCHEMA web SET AUTHORIZATION ROLE alice;
ALTER SCHEMA web SET AUTHORIZATION USER alice;

View File

@ -0,0 +1,15 @@
ALTER TABLE users RENAME TO people;
ALTER TABLE IF EXISTS users RENAME TO people;
ALTER TABLE users ADD COLUMN zip varchar;
ALTER TABLE IF EXISTS users ADD COLUMN IF NOT EXISTS zip varchar;
ALTER TABLE users DROP COLUMN zip;
ALTER TABLE IF EXISTS users DROP COLUMN IF EXISTS zip;
ALTER TABLE users RENAME COLUMN id TO user_id;
ALTER TABLE IF EXISTS users RENAME column IF EXISTS id to user_id;
ALTER TABLE people SET AUTHORIZATION alice;
ALTER TABLE people SET AUTHORIZATION ROLE PUBLIC;
ALTER TABLE people SET PROPERTIES x = 'y';
ALTER TABLE people SET PROPERTIES foo = 123, "foo bar" = 456;
ALTER TABLE people SET PROPERTIES x = DEFAULT;
ALTER TABLE hive.schema.test_table EXECUTE optimize(file_size_threshold => '10MB');

View File

@ -0,0 +1,4 @@
ALTER VIEW people RENAME TO users;
ALTER VIEW people SET AUTHORIZATION alice;
ALTER VIEW people SET AUTHORIZATION USER alice;
ALTER VIEW people SET AUTHORIZATION ROLE alice;

View File

@ -0,0 +1,4 @@
ANALYZE foo;
ANALYZE foo WITH ( "string" = 'bar', "long" = 42, computed = concat('ban', 'ana'), a = ARRAY[ 'v1', 'v2' ] );
EXPLAIN ANALYZE foo;
EXPLAIN ANALYZE ANALYZE foo;

View File

@ -0,0 +1,3 @@
CALL foo();
CALL foo(123, a => 1, b => 'go', 456);
CALL catalog.schema.test();

View File

@ -0,0 +1,7 @@
COMMENT ON TABLE users IS 'master table';
COMMENT ON COLUMN users.name IS 'full name';
SHOW COMMENT ON COLUMN column1;
SHOW COMMENT ON TABLE table1;

View File

@ -0,0 +1,2 @@
COMMIT;
COMMIT WORK;

View File

@ -0,0 +1,5 @@
CREATE MATERIALIZED VIEW a AS SELECT * FROM t;
CREATE OR REPLACE MATERIALIZED VIEW catalog.schema.matview COMMENT 'A simple materialized view' AS SELECT * FROM catalog2.schema2.tab;
CREATE OR REPLACE MATERIALIZED VIEW catalog.schema.matview COMMENT 'A simple materialized view' AS SELECT * FROM catalog2.schema2.tab;
CREATE OR REPLACE MATERIALIZED VIEW catalog.schema.matview COMMENT 'A simple materialized view'WITH (partitioned_by = ARRAY ['dateint']) AS SELECT * FROM catalog2.schema2.tab;
CREATE OR REPLACE MATERIALIZED VIEW catalog.schema.matview COMMENT 'A partitioned materialized view' WITH (partitioned_by = ARRAY ['dateint']) AS WITH a (t, u) AS (SELECT * FROM x), b AS (SELECT * FROM a) TABLE b;

View File

@ -0,0 +1,10 @@
CREATE ROLE role;
CREATE ROLE role1 WITH ADMIN admin;
CREATE ROLE "role" WITH ADMIN "admin";
CREATE ROLE "ro le" WITH ADMIN "ad min";
CREATE ROLE "!@#$%^&*'" WITH ADMIN "ад""мін";
CREATE ROLE role2 WITH ADMIN USER admin1;
CREATE ROLE role2 WITH ADMIN ROLE role1;
CREATE ROLE role2 WITH ADMIN CURRENT_USER;
CREATE ROLE role2 WITH ADMIN CURRENT_ROLE;
CREATE ROLE role WITH ADMIN CURRENT_ROLE IN my_catalog;

View File

@ -0,0 +1,4 @@
CREATE SCHEMA test;
CREATE SCHEMA IF NOT EXISTS test;
CREATE SCHEMA test WITH (a = 'apple', b = 123);
CREATE SCHEMA "some name that contains space";

View File

@ -0,0 +1,2 @@
CREATE TABLE IF NOT EXISTS bar (LIKE like_table);
CREATE TABLE IF NOT EXISTS bar (LIKE like_table INCLUDING PROPERTIES);

View File

@ -0,0 +1,19 @@
CREATE TABLE foo AS SELECT * FROM t;
CREATE TABLE foo(x) AS SELECT a FROM t;
CREATE TABLE foo(x,y) AS SELECT a,b FROM t;
CREATE TABLE IF NOT EXISTS foo AS SELECT * FROM t;
CREATE TABLE IF NOT EXISTS foo(x) AS SELECT a FROM t;
CREATE TABLE IF NOT EXISTS foo(x,y) AS SELECT a,b FROM t;
CREATE TABLE foo AS SELECT * FROM t WITH NO DATA;
CREATE TABLE foo(x) AS SELECT a FROM t WITH NO DATA;
CREATE TABLE foo(x,y) AS SELECT a,b FROM t WITH NO DATA;
CREATE TABLE foo WITH ( string = 'bar', long = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT * FROM t;
CREATE TABLE foo(x) WITH ( string = 'bar', long = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT a FROM t;
CREATE TABLE foo(x,y) WITH ( string = 'bar', long = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT a,b FROM t;
CREATE TABLE foo WITH ( string = 'bar', long = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT * FROM t WITH NO DATA;
CREATE TABLE foo(x) WITH ( string = 'bar', long = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT a FROM t WITH NO DATA;
CREATE TABLE foo(x,y) WITH ( string = 'bar', long = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT a,b FROM t WITH NO DATA;
CREATE TABLE foo COMMENT 'test'WITH ( string = 'bar', long = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT * FROM t WITH NO DATA;
CREATE TABLE foo(x) COMMENT 'test'WITH ( string = 'bar', long = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT a FROM t WITH NO DATA;
CREATE TABLE foo(x,y) COMMENT 'test'WITH ( string = 'bar', long = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT a,b FROM t WITH NO DATA;
CREATE TABLE foo(x,y) COMMENT 'test'WITH ( "string" = 'bar', "long" = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT a,b FROM t WITH NO DATA;

View File

@ -0,0 +1,11 @@
CREATE VIEW a AS SELECT * FROM t;
CREATE OR REPLACE VIEW a AS SELECT * FROM t;
CREATE VIEW a SECURITY DEFINER AS SELECT * FROM t;
CREATE VIEW a SECURITY INVOKER AS SELECT * FROM t;
CREATE VIEW a COMMENT 'comment' SECURITY DEFINER AS SELECT * FROM t;
CREATE VIEW a COMMENT '' SECURITY INVOKER AS SELECT * FROM t;
CREATE VIEW a COMMENT 'comment' AS SELECT * FROM t;
CREATE VIEW a COMMENT '' AS SELECT * FROM t;
CREATE VIEW bar.foo AS SELECT * FROM t;
CREATE VIEW "awesome view" AS SELECT * FROM t;
CREATE VIEW "awesome schema"."awesome view" AS SELECT * FROM t;

View File

@ -0,0 +1 @@
DEALLOCATE PREPARE my_query;

View File

@ -0,0 +1,5 @@
DELETE FROM t;
DELETE FROM "awesome table";
DELETE FROM t WHERE a = b;
DELETE FROM lineitem
WHERE orderkey IN (SELECT orderkey FROM orders WHERE priority = 'LOW');

View File

@ -0,0 +1,4 @@
DENY INSERT, DELETE ON t TO u;
DENY UPDATE ON t TO u;
DENY ALL PRIVILEGES ON TABLE t TO USER u;
DENY SELECT ON SCHEMA s TO USER u;

View File

@ -0,0 +1,6 @@
-- DESCRIBE INPUT
DESCRIBE INPUT myquery;
-- DESCRIBE OUTPUT
DESCRIBE OUTPUT myquery;
-- DESCRIBLE table_name
DESCRIBE table_name;

View File

@ -0,0 +1,5 @@
ALTER TABLE foo.t DROP COLUMN c;
ALTER TABLE "t x" DROP COLUMN "c d";
ALTER TABLE IF EXISTS foo.t DROP COLUMN c;
ALTER TABLE foo.t DROP COLUMN IF EXISTS c;
ALTER TABLE IF EXISTS foo.t DROP COLUMN IF EXISTS c;

View File

@ -0,0 +1,6 @@
DROP MATERIALIZED VIEW a;
DROP MATERIALIZED VIEW a.b;
DROP MATERIALIZED VIEW a.b.c;
DROP MATERIALIZED VIEW IF EXISTS a;
DROP MATERIALIZED VIEW IF EXISTS a.b;
DROP MATERIALIZED VIEW IF EXISTS a.b.c;

View File

@ -0,0 +1,4 @@
DROP ROLE role;
DROP ROLE "role";
DROP ROLE "ro le";
DROP ROLE "!@#$%^&*'ад""мін";

View File

@ -0,0 +1,5 @@
DROP SCHEMA test;
DROP SCHEMA test CASCADE;
DROP SCHEMA IF EXISTS test;
DROP SCHEMA IF EXISTS test RESTRICT;
DROP SCHEMA "some schema that contains space";

View File

@ -0,0 +1,8 @@
DROP TABLE a;
DROP TABLE a.b;
DROP TABLE a.b.c;
DROP TABLE a."b/y".c;
DROP TABLE IF EXISTS a;
DROP TABLE IF EXISTS a.b;
DROP TABLE IF EXISTS a.b.c;
DROP TABLE IF EXISTS a."b/y".c;

View File

@ -0,0 +1,6 @@
DROP VIEW a;
DROP VIEW a.b;
DROP VIEW a.b.c;
DROP VIEW IF EXISTS a;
DROP VIEW IF EXISTS a.b;
DROP VIEW IF EXISTS a.b.c;

View File

@ -0,0 +1,11 @@
PREPARE my_select1 FROM
SELECT name FROM nation;
EXECUTE my_select1;
-- execute with using
PREPARE my_select2 FROM
SELECT name FROM nation WHERE regionkey = ? and nationkey < ?;
EXECUTE my_select2 USING 1, 3;

View File

@ -0,0 +1,6 @@
EXPLAIN SELECT * FROM t;
EXPLAIN (TYPE LOGICAL) SELECT * FROM t;
EXPLAIN (TYPE LOGICAL, FORMAT TEXT) SELECT * FROM t;
-- EXPLAIN ANALYZE
EXPLAIN ANALYZE SELECT * FROM t;
EXPLAIN ANALYZE VERBOSE SELECT * FROM t;

View File

@ -0,0 +1,15 @@
GRANT INSERT, DELETE ON t TO u;
GRANT UPDATE ON t TO u;
GRANT SELECT ON t TO ROLE PUBLIC WITH GRANT OPTION;
GRANT ALL PRIVILEGES ON TABLE t TO USER u;
GRANT DELETE ON "t" TO ROLE "public" WITH GRANT OPTION;
GRANT SELECT ON SCHEMA s TO USER u;
-- GRANT role
GRANT role1 TO user1;
GRANT role1, role2, role3 TO user1, USER user2, ROLE role4 WITH ADMIN OPTION;
GRANT role1 TO user1 WITH ADMIN OPTION GRANTED BY admin;
GRANT role1 TO USER user1 WITH ADMIN OPTION GRANTED BY USER admin;
GRANT role1 TO ROLE role2 WITH ADMIN OPTION GRANTED BY ROLE admin;
GRANT role1 TO ROLE role2 GRANTED BY ROLE admin;
GRANT "role1" TO ROLE "role2" GRANTED BY ROLE "admin";
GRANT role1 TO user1 IN my_catalog;

View File

@ -0,0 +1 @@
SELECT * FROM a, b;

View File

@ -0,0 +1,12 @@
INSERT INTO orders
SELECT * FROM new_orders;
INSERT INTO cities VALUES (1, 'San Francisco');
INSERT INTO cities VALUES (2, 'San Jose'), (3, 'Oakland');
INSERT INTO nation (nationkey, name, regionkey, comment)
VALUES (26, 'POLAND', 3, 'no comment');
INSERT INTO nation (nationkey, name, regionkey)
VALUES (26, 'POLAND', 3);

View File

@ -0,0 +1,16 @@
SELECT * FROM orders MATCH_RECOGNIZE(
PARTITION BY custkey
ORDER BY orderdate
MEASURES
A.totalprice AS starting_price,
LAST(B.totalprice) AS bottom_price,
LAST(U.totalprice) AS top_price
ONE ROW PER MATCH
AFTER MATCH SKIP PAST LAST ROW
PATTERN (A B+ C+ D+)
SUBSET U = (C, D)
DEFINE
B AS totalprice < PREV(totalprice),
C AS totalprice > PREV(totalprice) AND totalprice <= A.totalprice,
D AS totalprice > PREV(totalprice)
);

View File

@ -0,0 +1 @@
MERGE INTO inventory AS i USING changes AS c ON i.part = c.part WHEN MATCHED AND c.action = 'mod' THEN UPDATE SET qty = qty + c.qty , ts = CURRENT_TIMESTAMP WHEN MATCHED AND c.action = 'del' THEN DELETE WHEN NOT MATCHED AND c.action = 'new' THEN INSERT (part, qty) VALUES (c.part, c.qty);

View File

@ -0,0 +1,9 @@
PREPARE myquery FROM select * from foo;
PREPARE myquery FROM SELECT ?, ? FROM foo;
PREPARE myquery FROM SELECT * FROM foo LIMIT ?;
PREPARE myquery FROM SELECT ?, ? FROM foo LIMIT ?;
PREPARE myquery FROM SELECT ? FROM foo FETCH FIRST ? ROWS ONLY;
PREPARE myquery FROM SELECT ?, ? FROM foo FETCH NEXT ? ROWS WITH TIES;
PREPARE myquery FROM SELECT ?, ? FROM foo OFFSET ? ROWS;
PREPARE myquery FROM SELECT ? FROM foo OFFSET ? ROWS LIMIT ?;
PREPARE myquery FROM SELECT ? FROM foo OFFSET ? ROWS FETCH FIRST ? ROWS WITH TIES;

View File

@ -0,0 +1,2 @@
REFRESH MATERIALIZED VIEW test;
REFRESH MATERIALIZED VIEW "some name that contains space";

View File

@ -0,0 +1,2 @@
RESET SESSION foo.bar;
RESET SESSION foo;

View File

@ -0,0 +1,6 @@
REVOKE INSERT, DELETE ON t FROM u;
REVOKE UPDATE ON t FROM u;
REVOKE GRANT OPTION FOR SELECT ON t FROM ROLE PUBLIC;
REVOKE ALL PRIVILEGES ON TABLE t FROM USER u;
REVOKE DELETE ON TABLE "t" FROM "u";
REVOKE SELECT ON SCHEMA s FROM USER u;

View File

@ -0,0 +1,7 @@
REVOKE role1 FROM user1;
REVOKE ADMIN OPTION FOR role1, role2, role3 FROM user1, USER user2, ROLE role4;
REVOKE ADMIN OPTION FOR role1 FROM user1 GRANTED BY admin;
REVOKE ADMIN OPTION FOR role1 FROM USER user1 GRANTED BY USER admin;
REVOKE role1 FROM ROLE role2 GRANTED BY ROLE admin;
REVOKE "role1" FROM ROLE "role2" GRANTED BY ROLE "admin";
REVOKE role1 FROM user1 IN my_catalog;

View File

@ -0,0 +1,2 @@
ROLLBACK;
ROLLBACK WORK;

View File

@ -0,0 +1,117 @@
-- DOUBLE IN Query
SELECT 123.456E7 FROM DUAL;
-- GROUP BY
SELECT * FROM table1 GROUP BY a;
SELECT * FROM table1 GROUP BY a, b;
SELECT * FROM table1 GROUP BY ();
-- GROUP BY GROUPING SETS
SELECT * FROM table1 GROUP BY GROUPING SETS (a);
SELECT a, b, GROUPING(a, b) FROM table1 GROUP BY GROUPING SETS ((a), (b));
-- GROUP BY ROLLUP
SELECT * FROM table1 GROUP BY ALL GROUPING SETS ((a, b), (a), ()), CUBE (c), ROLLUP (d);
SELECT * FROM table1 GROUP BY DISTINCT GROUPING SETS ((a, b), (a), ()), CUBE (c), ROLLUP (d);
-- GROUP BY CUBE
SELECT origin_state, destination_state, sum(package_weight)
FROM shipping
GROUP BY CUBE (origin_state, destination_state);
-- GROUP BY Combining multiple grouping expressions
SELECT origin_state, destination_state, origin_zip, sum(package_weight)
FROM shipping
GROUP BY
GROUPING SETS ((origin_state, destination_state)),
ROLLUP (origin_zip);
SELECT origin_state, destination_state, origin_zip, sum(package_weight)
FROM shipping
GROUP BY GROUPING SETS (
(origin_state, destination_state, origin_zip),
(origin_state, destination_state)
);
SELECT origin_state, destination_state, origin_zip, sum(package_weight)
FROM shipping
GROUP BY
GROUPING SETS ((origin_state, destination_state)),
GROUPING SETS ((origin_zip), ());
-- GROUP BY ALL and DISTINCT quantifiers
SELECT origin_state, destination_state, origin_zip, sum(package_weight)
FROM shipping
GROUP BY ALL
CUBE (origin_state, destination_state),
ROLLUP (origin_state, origin_zip);
SELECT origin_state, destination_state, origin_zip, sum(package_weight)
FROM shipping
GROUP BY GROUPING SETS (
(origin_state, destination_state, origin_zip),
(origin_state, origin_zip),
(origin_state, destination_state, origin_zip),
(origin_state, origin_zip),
(origin_state, destination_state),
(origin_state),
(origin_state, destination_state),
(origin_state),
(origin_state, destination_state),
(origin_state),
(destination_state),
()
);
SELECT origin_state, destination_state, origin_zip, sum(package_weight)
FROM shipping
GROUP BY DISTINCT
CUBE (origin_state, destination_state),
ROLLUP (origin_state, origin_zip);
SELECT origin_state, destination_state, origin_zip, sum(package_weight)
FROM shipping
GROUP BY GROUPING SETS (
(origin_state, destination_state, origin_zip),
(origin_state, origin_zip),
(origin_state, destination_state),
(origin_state),
(destination_state),
()
);
-- GROUP BY GROUPING operation
SELECT origin_state, origin_zip, destination_state, sum(package_weight),
grouping(origin_state, origin_zip, destination_state)
FROM shipping
GROUP BY GROUPING SETS (
(origin_state),
(origin_state, origin_zip),
(destination_state)
);
-- ORDER BY
SELECT * FROM table1 ORDER BY a;
-- Select expressions
SELECT (CAST(ROW(1, true) AS ROW(field1 bigint, field2 boolean))).* AS (alias1, alias2);
SELECT (CAST(ROW(1, true) AS ROW(field1 bigint, field2 boolean))).*;
SELECT (ROW(1, true)).*;
-- LIMIT
SELECT * FROM table1 LIMIT 2;
SELECT * FROM table1 LIMIT ALL;
SELECT * FROM (VALUES (1, '1'), (2, '2')) LIMIT ALL;
-- HAVING
SELECT count(*), mktsegment, nationkey,
CAST(sum(acctbal) AS bigint) AS totalbal
FROM customer
GROUP BY mktsegment, nationkey
HAVING sum(acctbal) > 5700000
ORDER BY totalbal DESC;
-- WINDOW
SELECT orderkey, clerk, totalprice,
rank() OVER w AS rnk
FROM orders
WINDOW w AS (PARTITION BY clerk ORDER BY totalprice DESC)
ORDER BY count() OVER w, clerk, rnk
-- AGGREGATION FILTER/ ORDER BY
SELECT SUM(x) FILTER (WHERE x > 4);
SELECT array_agg(x ORDER BY t.y) FROM t;
-- INTERSECT
SELECT 123 INTERSECT DISTINCT SELECT 123 INTERSECT ALL SELECT 123;
-- substring_built_in_function
SELECT substring('string' FROM 2);
SELECT substring('string' FROM 2 FOR 3);

View File

@ -0,0 +1,56 @@
SELECT * FROM UNNEST(ARRAY[1,2]) AS t(number);
SELECT * FROM UNNEST(
map_from_entries(
ARRAY[
('SQL',1974),
('Java', 1995)
]
)
) AS t(language, first_appeared_year);
SELECT *
FROM UNNEST(
ARRAY[
ROW('Java', 1995),
ROW('SQL' , 1974)],
ARRAY[
ROW(false),
ROW(true)]
) as t(language,first_appeared_year,declarative);
SELECT a, b, rownumber
FROM UNNEST (
ARRAY[2, 5],
ARRAY[7, 8, 9]
) WITH ORDINALITY AS t(a, b, rownumber);
SELECT * FROM UNNEST (ARRAY[]) AS t(value);
SELECT * FROM UNNEST (CAST(null AS ARRAY(integer))) AS t(number);
SELECT student, score
FROM (
VALUES
('John', ARRAY[7, 10, 9]),
('Mary', ARRAY[4, 8, 9])
) AS tests (student, scores)
CROSS JOIN UNNEST(scores) AS t(score);
SELECT numbers, animals, n, a
FROM (
VALUES
(ARRAY[2, 5], ARRAY['dog', 'cat', 'bird']),
(ARRAY[7, 8, 9], ARRAY['cow', 'pig'])
) AS x (numbers, animals)
CROSS JOIN UNNEST(numbers, animals) AS t (n, a);
SELECT runner, checkpoint
FROM (
VALUES
('Joe', ARRAY[10, 20, 30, 42]),
('Roger', ARRAY[10]),
('Dave', ARRAY[]),
('Levi', NULL)
) AS marathon (runner, checkpoints)
LEFT JOIN UNNEST(checkpoints) AS t(checkpoint) ON TRUE;

View File

@ -0,0 +1,28 @@
SELECT a, b
FROM (
SELECT a, MAX(b) AS b FROM t GROUP BY a
) AS x;
WITH x AS (SELECT a, MAX(b) AS b FROM t GROUP BY a)
SELECT a, b FROM x;
WITH
t1 AS (SELECT a, MAX(b) AS b FROM x GROUP BY a),
t2 AS (SELECT a, AVG(d) AS d FROM y GROUP BY a)
SELECT t1.*, t2.*
FROM t1
JOIN t2 ON t1.a = t2.a;
WITH
x AS (SELECT a FROM t),
y AS (SELECT a AS b FROM x),
z AS (SELECT b AS c FROM y)
SELECT c FROM z;
WITH RECURSIVE t(n) AS (
VALUES (1)
UNION ALL
SELECT n + 1 FROM t WHERE n < 4
)
SELECT sum(n) FROM t;

View File

@ -0,0 +1,4 @@
SELECT EXISTS(SELECT 1);
SELECT EXISTS(SELECT 1) = EXISTS(SELECT 2);
SELECT NOT EXISTS(SELECT 1) = EXISTS(SELECT 2);
SELECT (NOT EXISTS(SELECT 1)) = EXISTS(SELECT 2);

View File

@ -0,0 +1,6 @@
SELECT * FROM table1 FETCH FIRST 2 ROWS ONLY;
SELECT * FROM table1 FETCH NEXT ROW ONLY;
SELECT * FROM (VALUES (1, '1'), (2, '2')) FETCH FIRST ROW ONLY;
SELECT * FROM (VALUES (1, '1'), (2, '2')) FETCH FIRST ROW WITH TIES;
SELECT * FROM table1 FETCH FIRST 2 ROWS WITH TIES;
SELECT * FROM table1 FETCH NEXT ROW WITH TIES;

View File

@ -0,0 +1,23 @@
SELECT * FROM users CROSS JOIN UNNEST(friends) WITH ordinality;
-- LATERAL
SELECT name, x, y
FROM nation
CROSS JOIN LATERAL (SELECT name || ' :-' AS x)
CROSS JOIN LATERAL (SELECT x || ')' AS y);
-- Qualifying column names#
SELECT nation.name, region.name
FROM nation
CROSS JOIN region;
SELECT n.name, r.name
FROM nation AS n
CROSS JOIN region AS r;
SELECT n.name, r.name
FROM nation n
CROSS JOIN region r;
SELECT * FROM a CROSS JOIN b LEFT JOIN c ON true;
SELECT * FROM a CROSS JOIN b NATURAL JOIN c CROSS JOIN d NATURAL JOIN e;

View File

@ -0,0 +1,4 @@
SELECT * FROM table1 OFFSET 2 ROWS;
SELECT * FROM table1 OFFSET 2;
SELECT * FROM (VALUES (1, '1'), (2, '2')) OFFSET 2 ROWS;
SELECT * FROM (VALUES (1, '1'), (2, '2')) OFFSET 2;

View File

@ -0,0 +1,6 @@
SELECT col1.f1, col2, col3.f1.f2.f3 FROM table1;
SELECT col1.f1[0], col2, col3[2].f2.f3, col4[4] FROM table1;
SELECT CAST(ROW(11, 12) AS ROW(COL0 INTEGER, COL1 INTEGER)).col0;
-- ALL COLUMNS
SELECT ROW (1, 'a', true).*;
SELECT ROW (1, 'a', true).* AS (f1, f2, f3);

View File

@ -0,0 +1,16 @@
-- UNION
SELECT 13
UNION
SELECT 42;
SELECT 13
UNION
SELECT * FROM (VALUES 42, 13);
-- INTERSECT
SELECT * FROM (VALUES 13, 42)
INTERSECT
SELECT 13;
--EXCEPT
SELECT * FROM (VALUES 13, 42)
EXCEPT
SELECT 13;

View File

@ -0,0 +1,20 @@
-- EXISTS
SELECT name
FROM nation
WHERE EXISTS (
SELECT *
FROM region
WHERE region.regionkey = nation.regionkey
);
-- IN
SELECT name
FROM nation
WHERE regionkey IN (
SELECT regionkey
FROM region
WHERE name = 'AMERICA' OR name = 'AFRICA'
);
-- Scalar subquery
SELECT name
FROM nation
WHERE regionkey = (SELECT max(regionkey) FROM region);

View File

@ -0,0 +1,10 @@
SELECT *
FROM users TABLESAMPLE BERNOULLI (50);
SELECT *
FROM users TABLESAMPLE SYSTEM (75);
SELECT o.*, i.*
FROM orders o TABLESAMPLE SYSTEM (10)
JOIN lineitem i TABLESAMPLE BERNOULLI (40)
ON o.orderkey = i.orderkey;

View File

@ -0,0 +1,2 @@
SELECT 123 UNION DISTINCT
SELECT 123 UNION ALL SELECT 123;

View File

@ -0,0 +1,2 @@
SET PATH iLikeToEat.apples, andBananas;
SET PATH "schemas,with"."grammar.in", "their!names";

View File

@ -0,0 +1,5 @@
SET ROLE ALL;
SET ROLE NONE;
SET ROLE role;
SET ROLE "role";
SET ROLE role IN my_catalog;

View File

@ -0,0 +1,4 @@
SET SESSION foo = 'bar';
SET SESSION foo.bar = 'baz';
SET SESSION foo.bar.boo = 'baz';
SET SESSION foo.bar = 'ban' || 'ana';

View File

@ -0,0 +1,10 @@
SET TIME ZONE LOCAL;
SET TIME ZONE '-08:00';
SET TIME ZONE INTERVAL '10' HOUR;
SET TIME ZONE INTERVAL -'08:00' HOUR TO MINUTE;
SET TIME ZONE 'America/Los_Angeles';
SET TIME ZONE concat_ws('/', 'America', 'Los_Angeles');

View File

@ -0,0 +1,3 @@
SHOW CATALOGS;
SHOW CATALOGS LIKE '%';
SHOW CATALOGS LIKE '%$_%' ESCAPE '$';

View File

@ -0,0 +1,5 @@
SHOW COLUMNS FROM a;
SHOW COLUMNS FROM a.b;
SHOW COLUMNS FROM "awesome table";
SHOW COLUMNS FROM "awesome schema"."awesome table";
SHOW COLUMNS FROM a.b LIKE '%$_%' ESCAPE '$';

View File

@ -0,0 +1,16 @@
SHOW CREATE TABLE sf1.orders;
SHOW CREATE SCHEMA IF NOT EXISTS traffic;
SHOW CREATE VIEW test AS
SELECT orderkey, orderstatus, totalprice / 2 AS half
FROM orders;
SHOW CREATE MATERIALIZED VIEW cancelled_orders
AS
SELECT orderkey, totalprice
FROM orders
WHERE orderstatus = 3;

View File

@ -0,0 +1,3 @@
SHOW FUNCTIONS;
SHOW FUNCTIONS LIKE '%';
SHOW FUNCTIONS LIKE '%' ESCAPE '$';

View File

@ -0,0 +1,3 @@
SHOW GRANTS ON TABLE t;
SHOW GRANTS ON t;
SHOW GRANTS;

View File

@ -0,0 +1,2 @@
SHOW ROLE GRANTS;
SHOW ROLE GRANTS FROM catalog;

View File

@ -0,0 +1,6 @@
SHOW ROLES;
SHOW ROLES FROM foo;
SHOW ROLES IN foo;
SHOW CURRENT ROLES;
SHOW CURRENT ROLES FROM foo;
SHOW CURRENT ROLES IN foo;

View File

@ -0,0 +1,4 @@
SHOW SCHEMAS;
SHOW SCHEMAS FROM foo;
SHOW SCHEMAS IN foo LIKE '%';
SHOW SCHEMAS IN foo LIKE '%$_%' ESCAPE '$';

View File

@ -0,0 +1,3 @@
SHOW SESSION;
SHOW SESSION LIKE '%';
SHOW SESSION LIKE '%' ESCAPE '$';

View File

@ -0,0 +1 @@
SHOW STATS FOR a;

View File

@ -0,0 +1,3 @@
SHOW STATS FOR (SELECT * FROM a);
SHOW STATS FOR (SELECT * FROM a WHERE field > 0);
SHOW STATS FOR (SELECT * FROM a WHERE field > 0 or field < 0);

View File

@ -0,0 +1,4 @@
SHOW TABLES;
SHOW TABLES FROM a;
SHOW TABLES FROM "awesome schema";
SHOW TABLES IN a LIKE '%$_%' ESCAPE '$';

View File

@ -0,0 +1,10 @@
START TRANSACTION;
START TRANSACTION ISOLATION LEVEL READ UNCOMMITTED;
START TRANSACTION ISOLATION LEVEL READ COMMITTED;
START TRANSACTION ISOLATION LEVEL REPEATABLE READ;
START TRANSACTION ISOLATION LEVEL SERIALIZABLE;
START TRANSACTION READ ONLY;
START TRANSACTION READ WRITE;
START TRANSACTION ISOLATION LEVEL READ COMMITTED, READ ONLY;
START TRANSACTION READ ONLY, ISOLATION LEVEL READ COMMITTED;
START TRANSACTION READ WRITE, ISOLATION LEVEL SERIALIZABLE;

View File

@ -0,0 +1,2 @@
SELECT substring('string' FROM 2);
SELECT substring('string' FROM 2 FOR 3);

View File

@ -0,0 +1,3 @@
TRUNCATE TABLE a;
TRUNCATE TABLE a.b;
TRUNCATE TABLE a.b.c;

View File

@ -0,0 +1,10 @@
UPDATE foo_tablen SET bar = 23, baz = 3.1415E0, bletch = 'barf' WHERE (nothing = 'fun');
UPDATE new_hires SET manager = (
SELECT
e.name
FROM
employees e
WHERE
e.employee_id = new_hires.manager_id
);

View File

@ -0,0 +1,2 @@
USE hive.finance;
USE information_schema;

View File

@ -0,0 +1,2 @@
VALUES ('a', 1, 2.2e0), ('b', 2, 3.3e0);
SELECT * FROM (VALUES ('a', 1, 2.2e0), ('b', 2, 3.3e0));

View File

@ -0,0 +1,14 @@
SELECT cust_key, value OVER w, label OVER w
FROM orders
WINDOW w AS (
PARTITION BY cust_key
ORDER BY order_date
MEASURES
RUNNING LAST(total_price) AS value,
CLASSIFIER() AS label
ROWS BETWEEN CURRENT ROW AND UNBOUNDED FOLLOWING
PATTERN (A B+ C+)
DEFINE
B AS B.value < PREV (B.value),
C AS C.value > PREV (C.value)
);

View File

@ -0,0 +1,16 @@
import TrinoSQL from 'src/parser/trino';
import { readSQL } from 'test/helper';
const features = {
grant: readSQL(__dirname, 'grant.sql'),
};
describe('TrinoSQL Grant Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// grant statements
features.grant.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,15 @@
import TrinoSQL from 'src/parser/trino';
import { readSQL } from 'test/helper';
const features = {
insertIntoTable: readSQL(__dirname, 'insert_into.sql'),
};
describe('TrinoSQL Insert Statements Syntax Tests', () => {
const parser = new TrinoSQL();
features.insertIntoTable.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

Some files were not shown because too many files have changed in this diff Show More