refactor: standard naming (#278)
* refactor: rename flinksql to flink * refactor: rename pgsql to postgresql * refactor: rename trinosql to trino * refactor: replace all default exports with named export * refactor: rename basicParser to basicSQL * refactor: rename basic-parser-types to types * refactor: replace arrow func with plain func
This commit is contained in:
448
test/parser/postgresql/contextCollect/entityCollector.test.ts
Normal file
448
test/parser/postgresql/contextCollect/entityCollector.test.ts
Normal file
@ -0,0 +1,448 @@
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { PostgreSqlParserListener } from 'src/lib/postgresql/PostgreSqlParserListener';
|
||||
import { EntityContextType } from 'src/parser/common/types';
|
||||
import { StmtContextType } from 'src/parser/common/entityCollector';
|
||||
import {
|
||||
PostgreSQL,
|
||||
PostgreSqlEntityCollector,
|
||||
PostgreSqlSplitListener,
|
||||
} from 'src/parser/postgresql';
|
||||
|
||||
const commonSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'common.sql'), 'utf-8');
|
||||
|
||||
describe('PostgreSql entity collector tests', () => {
|
||||
const postgreSql = new PostgreSQL();
|
||||
const parseTree = postgreSql.parse(commonSql);
|
||||
const splitListener = new PostgreSqlSplitListener();
|
||||
postgreSql.listen(splitListener as PostgreSqlParserListener, parseTree);
|
||||
|
||||
test('validate common sql', () => {
|
||||
expect(postgreSql.validate(commonSql).length).toBe(0);
|
||||
});
|
||||
|
||||
test('split results', () => {
|
||||
expect(splitListener.statementsContext.length).toBe(10);
|
||||
});
|
||||
|
||||
test('create database', () => {
|
||||
const testingContext = splitListener.statementsContext[0];
|
||||
|
||||
const collectListener = new PostgreSqlEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const sourceTableEntity = allEntities[0];
|
||||
|
||||
expect(sourceTableEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||
expect(sourceTableEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_DATABASE_STMT
|
||||
);
|
||||
expect(sourceTableEntity.text).toBe('music2');
|
||||
});
|
||||
|
||||
test('create table by select', () => {
|
||||
const testingContext = splitListener.statementsContext[1];
|
||||
|
||||
const collectListener = new PostgreSqlEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(3);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('films_recent');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 110,
|
||||
endIndex: 121,
|
||||
line: 6,
|
||||
startColumn: 14,
|
||||
endColumn: 26,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 97,
|
||||
endIndex: 246,
|
||||
startLine: 6,
|
||||
endLine: 7,
|
||||
startColumn: 1,
|
||||
endColumn: 122,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(2);
|
||||
tableCreateEntity.relatedEntities.forEach((relatedEntity) => {
|
||||
expect(relatedEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(allEntities.some((en) => relatedEntity === en)).toBeTruthy();
|
||||
});
|
||||
|
||||
expect(allEntities[1].text).toBe('films');
|
||||
expect(allEntities[1].belongStmt.rootStmt).toBe(allEntities[0].belongStmt);
|
||||
expect(allEntities[1].position).toEqual({
|
||||
startIndex: 168,
|
||||
endIndex: 172,
|
||||
line: 7,
|
||||
startColumn: 43,
|
||||
endColumn: 48,
|
||||
});
|
||||
|
||||
expect(allEntities[2].text).toBe('films2');
|
||||
expect(allEntities[2].belongStmt.rootStmt).toBe(allEntities[0].belongStmt);
|
||||
expect(allEntities[2].position).toEqual({
|
||||
startIndex: 179,
|
||||
endIndex: 184,
|
||||
line: 7,
|
||||
startColumn: 54,
|
||||
endColumn: 60,
|
||||
});
|
||||
});
|
||||
|
||||
test('create table of columns', () => {
|
||||
const testingContext = splitListener.statementsContext[2];
|
||||
|
||||
const collectListener = new PostgreSqlEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('distributors');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 263,
|
||||
endIndex: 274,
|
||||
line: 9,
|
||||
startColumn: 14,
|
||||
endColumn: 26,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 250,
|
||||
endIndex: 377,
|
||||
startLine: 9,
|
||||
endLine: 13,
|
||||
startColumn: 1,
|
||||
endColumn: 2,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns.length).toBe(3);
|
||||
tableCreateEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('create foreign table by columns', () => {
|
||||
const testingContext = splitListener.statementsContext[3];
|
||||
|
||||
const collectListener = new PostgreSqlEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('films');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 402,
|
||||
endIndex: 406,
|
||||
line: 15,
|
||||
startColumn: 22,
|
||||
endColumn: 27,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 381,
|
||||
endIndex: 626,
|
||||
startLine: 15,
|
||||
endLine: 23,
|
||||
startColumn: 1,
|
||||
endColumn: 19,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns.length).toBe(6);
|
||||
tableCreateEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('create foreign table of partition', () => {
|
||||
const testingContext = splitListener.statementsContext[4];
|
||||
|
||||
const collectListener = new PostgreSqlEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('measurement_y2016m07');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 651,
|
||||
endIndex: 670,
|
||||
line: 25,
|
||||
startColumn: 22,
|
||||
endColumn: 42,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_TABLE_STMT
|
||||
);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 630,
|
||||
endIndex: 769,
|
||||
startLine: 25,
|
||||
endLine: 27,
|
||||
startColumn: 1,
|
||||
endColumn: 21,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
|
||||
const relatedEntity = tableCreateEntity.relatedEntities[0];
|
||||
expect(relatedEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(allEntities.some((en) => relatedEntity === en)).toBeTruthy();
|
||||
expect(relatedEntity.text).toBe('measurement');
|
||||
});
|
||||
|
||||
test('create view by select', () => {
|
||||
const testingContext = splitListener.statementsContext[5];
|
||||
|
||||
const collectListener = new PostgreSqlEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('comedies');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 785,
|
||||
endIndex: 792,
|
||||
line: 29,
|
||||
startColumn: 13,
|
||||
endColumn: 21,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 773,
|
||||
endIndex: 849,
|
||||
startLine: 29,
|
||||
endLine: 32,
|
||||
startColumn: 1,
|
||||
endColumn: 26,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns).toBeNull();
|
||||
|
||||
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||
|
||||
const relatedEntity = tableCreateEntity.relatedEntities[0];
|
||||
expect(relatedEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(allEntities.some((en) => relatedEntity === en)).toBeTruthy();
|
||||
expect(relatedEntity.text).toBe('films');
|
||||
});
|
||||
|
||||
test('create materialized view by columns', () => {
|
||||
const testingContext = splitListener.statementsContext[6];
|
||||
|
||||
const collectListener = new PostgreSqlEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableCreateEntity = allEntities[0];
|
||||
|
||||
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||
expect(tableCreateEntity.text).toBe('comedies_mate');
|
||||
expect(tableCreateEntity.position).toEqual({
|
||||
startIndex: 878,
|
||||
endIndex: 890,
|
||||
line: 34,
|
||||
startColumn: 26,
|
||||
endColumn: 39,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||
startIndex: 853,
|
||||
endIndex: 1055,
|
||||
startLine: 34,
|
||||
endLine: 39,
|
||||
startColumn: 1,
|
||||
endColumn: 17,
|
||||
});
|
||||
|
||||
expect(tableCreateEntity.columns.length).toBe(2);
|
||||
|
||||
tableCreateEntity.columns.forEach((columEntity) => {
|
||||
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||
expect(columEntity.text).toBe(
|
||||
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
test('select with clause', () => {
|
||||
const testingContext = splitListener.statementsContext[7];
|
||||
|
||||
const collectListener = new PostgreSqlEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(2);
|
||||
|
||||
const tableEntity1 = allEntities[0];
|
||||
const tableEntity2 = allEntities[1];
|
||||
expect(tableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity1.text).toBe('table_expression');
|
||||
expect(tableEntity1.position).toEqual({
|
||||
startIndex: 1109,
|
||||
endIndex: 1124,
|
||||
line: 41,
|
||||
startColumn: 51,
|
||||
endColumn: 67,
|
||||
});
|
||||
|
||||
expect(tableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(tableEntity1.belongStmt.position).toEqual({
|
||||
startIndex: 1094,
|
||||
endIndex: 1124,
|
||||
startLine: 41,
|
||||
endLine: 41,
|
||||
startColumn: 36,
|
||||
endColumn: 67,
|
||||
});
|
||||
expect(tableEntity1.columns).toBeNull();
|
||||
expect(tableEntity1.relatedEntities).toBeNull();
|
||||
|
||||
expect(tableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableEntity2.text).toBe('table_expression1');
|
||||
expect(tableEntity2.position).toEqual({
|
||||
startIndex: 1182,
|
||||
endIndex: 1198,
|
||||
line: 42,
|
||||
startColumn: 55,
|
||||
endColumn: 72,
|
||||
});
|
||||
|
||||
expect(tableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||
expect(tableEntity2.belongStmt.position).toEqual({
|
||||
startIndex: 1059,
|
||||
endIndex: 1237,
|
||||
startLine: 41,
|
||||
endLine: 42,
|
||||
startColumn: 1,
|
||||
endColumn: 111,
|
||||
});
|
||||
expect(tableEntity2.columns).toBeNull();
|
||||
expect(tableEntity2.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('insert into table', () => {
|
||||
const testingContext = splitListener.statementsContext[8];
|
||||
|
||||
const collectListener = new PostgreSqlEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const tableInsertEntity = allEntities[0];
|
||||
|
||||
expect(tableInsertEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(tableInsertEntity.text).toBe('insert_films');
|
||||
expect(tableInsertEntity.position).toEqual({
|
||||
startIndex: 1253,
|
||||
endIndex: 1264,
|
||||
line: 44,
|
||||
startColumn: 13,
|
||||
endColumn: 25,
|
||||
});
|
||||
|
||||
expect(tableInsertEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||
expect(tableInsertEntity.belongStmt.position).toEqual({
|
||||
startIndex: 1241,
|
||||
endIndex: 1355,
|
||||
startLine: 44,
|
||||
endLine: 45,
|
||||
startColumn: 1,
|
||||
endColumn: 55,
|
||||
});
|
||||
|
||||
expect(tableInsertEntity.columns).toBeNull();
|
||||
expect(tableInsertEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
|
||||
test('create function', () => {
|
||||
const testingContext = splitListener.statementsContext[9];
|
||||
|
||||
const collectListener = new PostgreSqlEntityCollector(commonSql);
|
||||
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||
|
||||
const allEntities = collectListener.getEntities();
|
||||
expect(allEntities.length).toBe(1);
|
||||
|
||||
const functionEntity = allEntities[0];
|
||||
|
||||
expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
|
||||
expect(functionEntity.text).toBe('get_color_note');
|
||||
expect(functionEntity.position).toEqual({
|
||||
endColumn: 31,
|
||||
endIndex: 1388,
|
||||
line: 47,
|
||||
startColumn: 17,
|
||||
startIndex: 1375,
|
||||
});
|
||||
|
||||
expect(functionEntity.belongStmt.stmtContextType).toBe(
|
||||
StmtContextType.CREATE_FUNCTION_STMT
|
||||
);
|
||||
expect(functionEntity.belongStmt.position).toEqual({
|
||||
endColumn: 15,
|
||||
endIndex: 1477,
|
||||
endLine: 49,
|
||||
startColumn: 1,
|
||||
startIndex: 1359,
|
||||
startLine: 47,
|
||||
});
|
||||
|
||||
expect(functionEntity.columns).toBeNull();
|
||||
expect(functionEntity.relatedEntities).toBeNull();
|
||||
});
|
||||
});
|
49
test/parser/postgresql/contextCollect/fixtures/common.sql
Normal file
49
test/parser/postgresql/contextCollect/fixtures/common.sql
Normal file
@ -0,0 +1,49 @@
|
||||
CREATE DATABASE music2
|
||||
LOCALE 'sv_SE.iso885915'
|
||||
ENCODING LATIN9
|
||||
TEMPLATE template0;
|
||||
|
||||
CREATE TABLE films_recent AS
|
||||
SELECT films.name, films.startTime FROM films JOIN films2 ON films2.id = films.id WHERE films.startTime >= '2002-01-01';
|
||||
|
||||
CREATE TABLE distributors (
|
||||
did integer,
|
||||
name varchar(40),
|
||||
CONSTRAINT con1 CHECK (did > 100 AND name <> '')
|
||||
);
|
||||
|
||||
CREATE FOREIGN TABLE films (
|
||||
code char(5) NOT NULL,
|
||||
title varchar(40) NOT NULL,
|
||||
did integer NOT NULL,
|
||||
date_prod date,
|
||||
kind varchar(10),
|
||||
len interval hour to minute
|
||||
)
|
||||
SERVER film_server;
|
||||
|
||||
CREATE FOREIGN TABLE measurement_y2016m07
|
||||
PARTITION OF measurement FOR VALUES FROM ('2016-07-01') TO ('2016-08-01')
|
||||
SERVER server_07;
|
||||
|
||||
CREATE VIEW comedies AS
|
||||
SELECT *
|
||||
FROM films
|
||||
WHERE kind = 'Comedy';
|
||||
|
||||
CREATE MATERIALIZED VIEW comedies_mate
|
||||
(column_name1, column_name2)
|
||||
WITH ( storage_parameter=3, storage_parameter1=4)
|
||||
TABLESPACE tablespace_name
|
||||
AS SELECT * FROM product
|
||||
WITH NO DATA;
|
||||
|
||||
WITH RECURSIVE query_name (id) AS (SELECT id FROM table_expression)
|
||||
SELECT DISTINCT ON (col1,col2) random() AS name1 FROM table_expression1 WHERE name1=name1 GROUP BY DISTINCT id;
|
||||
|
||||
INSERT INTO insert_films (code, title, did, date_prod, kind)
|
||||
VALUES ('T_601', 'Yojimbo', 106, DEFAULT, 'Drama');
|
||||
|
||||
CREATE FUNCTION get_color_note (rainbow) RETURNS text AS
|
||||
'SELECT note FROM my_colors WHERE color = $1'
|
||||
LANGUAGE SQL;
|
63
test/parser/postgresql/errorStrategy.test.ts
Normal file
63
test/parser/postgresql/errorStrategy.test.ts
Normal file
@ -0,0 +1,63 @@
|
||||
import { PostgreSQL, PostgreSqlSplitListener } from 'src/parser/postgresql';
|
||||
import { PostgreSqlParserListener } from 'src/lib/postgresql/PostgreSqlParserListener';
|
||||
|
||||
const validSQL1 = `INSERT INTO country_page_view
|
||||
VALUES ('Chinese', 'mumiao', 18),
|
||||
('Amercian', 'georage', 22);`;
|
||||
const validSQL2 = 'SELECT * FROM tb;';
|
||||
const inValidSQL = 'CREATE TABLE';
|
||||
|
||||
describe('PgSQL ErrorStrategy test', () => {
|
||||
const pgSQL = new PostgreSQL();
|
||||
|
||||
// TODO: handle unexpected case
|
||||
// test('begin inValid', () => {
|
||||
// const sql = [inValidSQL, validSQL1, validSQL2].join('\n');
|
||||
// // parse with empty errorListener
|
||||
// const parseTree = pgSQL.parse(sql, () => {});
|
||||
// const splitListener = new PostgreSQLSplitListener();
|
||||
// pgSQL.listen(splitListener as PostgreSqlParserListener, parseTree);
|
||||
|
||||
// const statementCount = splitListener.statementsContext.length;
|
||||
// splitListener.statementsContext.map((item, index) => {
|
||||
// if(index !== statementCount-1 && index !== statementCount - 2) {
|
||||
// expect(item.exception).not.toBe(null);
|
||||
// } else {
|
||||
// expect(item.exception).toBe(null);
|
||||
// }
|
||||
// })
|
||||
// });
|
||||
|
||||
test('middle inValid', () => {
|
||||
const sql = [validSQL1, inValidSQL, validSQL2].join('\n');
|
||||
// parse with empty errorListener
|
||||
const parseTree = pgSQL.parse(sql, () => {});
|
||||
const splitListener = new PostgreSqlSplitListener();
|
||||
pgSQL.listen(splitListener as PostgreSqlParserListener, parseTree);
|
||||
|
||||
const statementCount = splitListener.statementsContext.length;
|
||||
splitListener.statementsContext.map((item, index) => {
|
||||
if (index !== statementCount - 1 && index !== 0) {
|
||||
expect(item.exception).not.toBe(null);
|
||||
} else {
|
||||
expect(item.exception).toBe(null);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
test('end inValid', () => {
|
||||
const sql = [validSQL1, validSQL2, inValidSQL].join('\n');
|
||||
// parse with empty errorListener
|
||||
const parseTree = pgSQL.parse(sql, () => {});
|
||||
const splitListener = new PostgreSqlSplitListener();
|
||||
pgSQL.listen(splitListener as PostgreSqlParserListener, parseTree);
|
||||
|
||||
splitListener.statementsContext.map((item, index) => {
|
||||
if (index !== 0 && index !== 1) {
|
||||
expect(item.exception).not.toBe(null);
|
||||
} else {
|
||||
expect(item.exception).toBe(null);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
12
test/parser/postgresql/lexer.test.ts
Normal file
12
test/parser/postgresql/lexer.test.ts
Normal file
@ -0,0 +1,12 @@
|
||||
import { PostgreSQL } from 'src/parser/postgresql';
|
||||
|
||||
describe('PostgreSQL Lexer tests', () => {
|
||||
const mysqlParser = new PostgreSQL();
|
||||
|
||||
const sql = 'select id,name,sex from user1;';
|
||||
const tokens = mysqlParser.getAllTokens(sql);
|
||||
|
||||
test('token counts', () => {
|
||||
expect(tokens.length).toBe(12);
|
||||
});
|
||||
});
|
64
test/parser/postgresql/listener.test.ts
Normal file
64
test/parser/postgresql/listener.test.ts
Normal file
@ -0,0 +1,64 @@
|
||||
import { PostgreSQL } from 'src/parser/postgresql';
|
||||
import { PostgreSqlParserListener } from 'src/lib/postgresql/PostgreSqlParserListener';
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
|
||||
describe('PostgreSQL Listener Tests', () => {
|
||||
const expectTableName = 'user1';
|
||||
const sql = `select id,name,sex from ${expectTableName};`;
|
||||
const postgresql = new PostgreSQL();
|
||||
|
||||
const parseTree = postgresql.parse(sql);
|
||||
|
||||
test('Listener enterTableName', async () => {
|
||||
let result = '';
|
||||
class MyListener implements PostgreSqlParserListener {
|
||||
enterTable_ref(ctx) {
|
||||
result = ctx.getText().toLowerCase();
|
||||
}
|
||||
visitTerminal() {}
|
||||
visitErrorNode() {}
|
||||
enterEveryRule() {}
|
||||
exitEveryRule() {}
|
||||
}
|
||||
const listenTableName = new MyListener();
|
||||
|
||||
await postgresql.listen(listenTableName as ParseTreeListener, parseTree);
|
||||
expect(result).toBe(expectTableName);
|
||||
});
|
||||
|
||||
test('Split sql listener', async () => {
|
||||
const singleStatementArr = [
|
||||
`SELECT id FROM games ORDER BY score;`,
|
||||
|
||||
`INSERT INTO country_page_view
|
||||
SELECT user1, cnt FROM page_view_source`,
|
||||
|
||||
`CREATE GLOBAL TEMPORARY TABLE table_name (column_name, column_name2)
|
||||
WITH ( storage_parameter = 4)
|
||||
ON COMMIT PRESERVE ROWS
|
||||
TABLESPACE tablespace_name
|
||||
AS SELECT * FROM ad
|
||||
WITH NO DATA;`,
|
||||
];
|
||||
const sql = singleStatementArr.join('\n');
|
||||
const sqlSlices = postgresql.splitSQLByStatement(sql);
|
||||
|
||||
expect(sqlSlices).not.toBeNull();
|
||||
|
||||
// check text in result
|
||||
expect(sqlSlices.map((item) => item.text)).toEqual(singleStatementArr);
|
||||
|
||||
// check startIndex and endIndex in result
|
||||
sqlSlices.forEach((slice, index) => {
|
||||
expect(sql.slice(slice.startIndex, slice.endIndex + 1)).toBe(singleStatementArr[index]);
|
||||
});
|
||||
|
||||
// check lineNumber in result
|
||||
expect(sqlSlices[0].startLine).toBe(1);
|
||||
expect(sqlSlices[0].endLine).toBe(1);
|
||||
expect(sqlSlices[1].startLine).toBe(2);
|
||||
expect(sqlSlices[1].endLine).toBe(3);
|
||||
expect(sqlSlices[2].startLine).toBe(4);
|
||||
expect(sqlSlices[2].endLine).toBe(9);
|
||||
});
|
||||
});
|
@ -0,0 +1,21 @@
|
||||
CREATE TABLE VALUES -- unfinished
|
||||
|
||||
CREATE UNLOGGED TABLE table1 (col1 int) INHERITS (table_parent) WITHOUT OIDS ON COMMIT DROP;
|
||||
|
||||
CREATE SCHEMA schemaname AUTHORIZATION username;
|
||||
|
||||
ALTER TABLE products ADD FOREIGN KEY (product_group_id) REFERENCES product_groups;
|
||||
|
||||
SELECT * FROM db. ; -- unfinished
|
||||
|
||||
INSERT INTO weather (date, city, temp_hi, temp_lo) VALUES ('1994-11-29', 'Hayward', 54, 37);
|
||||
|
||||
ANALYZE VERBOSE table_name ( column_name, column_name2);
|
||||
|
||||
INSERT INTO weather (date, city, temp_hi, temp_lo) VALUES ('1994-11-29', 'Hayward', 54, 37); -- unfinished
|
||||
|
||||
DROP TABLE products CASCADE;
|
||||
|
||||
DROP AGGREGATE aggname2(int);
|
||||
|
||||
INSERT INTO products (product_no, name, price) SELECT * FROM db. ; -- unfinished
|
@ -0,0 +1,11 @@
|
||||
SELECT FROM my_db.tb;
|
||||
|
||||
SELECT name, calculate_age(birthdate) AS age, FROM students;
|
||||
|
||||
INSERT INTO insert_tb SELECT FROM from_tb;
|
||||
|
||||
INSERT INTO insert_tb SELECT id, age, FROM from_tb;
|
||||
|
||||
CREATE TABLE sorted_census_data AS SELECT FROM unsorted_census_data;
|
||||
|
||||
CREATE TABLE sorted_census_data AS SELECT id, age, FROM unsorted_census_data;
|
@ -0,0 +1,77 @@
|
||||
CREATE TABLE db.s (column_name int) PARTITION BY LIST (column_name);
|
||||
|
||||
INSERT INTO db.tb ;
|
||||
|
||||
SELECT * FROM db. ;
|
||||
|
||||
ALTER TABLE db ALTER column_name DROP NOT NULL;
|
||||
|
||||
CREATE OR REPLACE VIEW db.v;
|
||||
|
||||
ALTER VIEW db.v ;
|
||||
|
||||
DROP VIEW db. ;
|
||||
|
||||
CREATE FUNCTION fn1;
|
||||
|
||||
DROP FUNCTION fn1;
|
||||
|
||||
CREATE DATABASE db;
|
||||
|
||||
DROP DATABASE db ;
|
||||
|
||||
ALTER DATABASE db ;
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS schema_name;
|
||||
|
||||
DROP SCHEMA IF EXISTS sch;
|
||||
|
||||
ALTER SCHEMA name RENAME TO new_name;
|
||||
|
||||
ALTER FOREIGN TABLE table_name RENAME column_name TO new_column_name;
|
||||
|
||||
ALTER MATERIALIZED VIEW view_name RENAME COLUMN column_name TO new_column_name;
|
||||
|
||||
ALTER MATERIALIZED VIEW view_name ALTER column_name SET STATISTICS 45;
|
||||
|
||||
ALTER PUBLICATION name ADD TABLE table_name ( column_name);
|
||||
|
||||
ALTER SEQUENCE name OWNED BY table_name.column_name;
|
||||
|
||||
ALTER TABLE db RENAME column_name TO new_column_name;
|
||||
|
||||
ANALYZE table_name (column_name);
|
||||
|
||||
COMMENT ON COLUMN relation_name.column_name IS NULL;
|
||||
|
||||
COPY table_name (col_name, col_name2) FROM 'filename' WITH (FORCE_QUOTE (clumn_name));
|
||||
|
||||
CREATE FOREIGN TABLE table_name (column_name int) SERVER server_name;
|
||||
|
||||
CREATE FUNCTION name (int) RETURNS TABLE (column_name column_type) LANGUAGE lang_name;
|
||||
|
||||
CREATE INDEX ON table_name (column_name) INCLUDE (col_name1, col_name2);
|
||||
|
||||
CREATE MATERIALIZED VIEW table_name (col_name);
|
||||
|
||||
CREATE STATISTICS ON column_name FROM table_name;
|
||||
|
||||
CREATE TRIGGER name AFTER UPDATE OF column_name ON table_name EXECUTE FUNCTION function_name;
|
||||
|
||||
GRANT SELECT ( column_name) ON table_name TO role_specification;
|
||||
|
||||
INSERT INTO table_name (column_name) DEFAULT VALUES ON CONFLICT (index_column_name) DO UPDATE SET column_name_exp = DEFAULT;
|
||||
|
||||
MERGE INTO wines w USING wine_stock_changes s ON s.winename = w.winename WHEN NOT MATCHED AND stock_delta > 0 THEN INSERT (col_name) VALUES(s.winename, s.stock_delta);
|
||||
|
||||
REVOKE SELECT (co_name) ON table_name FROM PUBLIC;
|
||||
|
||||
SECURITY LABEL ON COLUMN tablename.columnname IS string_literal;
|
||||
|
||||
WITH with_query_name (col_name) AS (SELECT id FROM table_expression) SEARCH DEPTH FIRST BY column_name SET column_name CYCLE col_name SET col_name USING col_name SELECT;
|
||||
|
||||
UPDATE tablename SET columnname = a + b, (col1, col2) = (a+3, b+4);
|
||||
|
||||
VACUUM tablename (col1, col2);
|
||||
|
||||
SELECT * FROM db.tbs GROUP BY (col1, col2) ORDER BY col3;
|
@ -0,0 +1,12 @@
|
||||
DROP ;
|
||||
|
||||
ALTER ;
|
||||
|
||||
INSERT ;
|
||||
|
||||
DELETE ;
|
||||
|
||||
CREATE ;
|
||||
|
||||
|
||||
|
74
test/parser/postgresql/suggestion/multipleStatement.test.ts
Normal file
74
test/parser/postgresql/suggestion/multipleStatement.test.ts
Normal file
@ -0,0 +1,74 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { PostgreSQL } from 'src/parser/postgresql';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/types';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
describe('PgSQL Multiple Statements Syntax Suggestion', () => {
|
||||
const postgresql = new PostgreSQL();
|
||||
|
||||
test('Create table ', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 1,
|
||||
column: 14,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
});
|
||||
|
||||
test('Select from table or view', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 9,
|
||||
column: 18,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
const suggestionVw = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
|
||||
expect(suggestionVw).not.toBeUndefined();
|
||||
expect(suggestionVw?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
|
||||
});
|
||||
|
||||
test('Insert into table ', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 15,
|
||||
column: 13,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
});
|
||||
|
||||
test('Insert into select from table ', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 21,
|
||||
column: 65,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
|
||||
});
|
||||
});
|
156
test/parser/postgresql/suggestion/suggestionWithEntity.test.ts
Normal file
156
test/parser/postgresql/suggestion/suggestionWithEntity.test.ts
Normal file
@ -0,0 +1,156 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { PostgreSQL } from 'src/parser/postgresql';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'suggestionWithEntity.sql'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
describe('PostgreSql Syntax Suggestion with collect entity', () => {
|
||||
const postgre = new PostgreSQL();
|
||||
|
||||
test('select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 1,
|
||||
column: 8,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = postgre.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = postgre.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('my_db.tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('select with columns with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 3,
|
||||
column: 47,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = postgre.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = postgre.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(1);
|
||||
expect(entities[0].text).toBe('students');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into table as select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 5,
|
||||
column: 30,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = postgre.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = postgre.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('from_tb');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('insert into table as select with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 7,
|
||||
column: 39,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = postgre.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = postgre.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('insert_tb');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('from_tb');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with no column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 9,
|
||||
column: 43,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = postgre.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = postgre.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('sorted_census_data');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('unsorted_census_data');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
|
||||
test('create table as select with trailing comma', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 11,
|
||||
column: 52,
|
||||
};
|
||||
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||
|
||||
const syntaxes = postgre.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
|
||||
const entities = postgre.getAllEntities(sql, pos);
|
||||
expect(entities.length).toBe(2);
|
||||
expect(entities[0].text).toBe('sorted_census_data');
|
||||
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||
|
||||
expect(entities[1].text).toBe('unsorted_census_data');
|
||||
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||
});
|
||||
});
|
973
test/parser/postgresql/suggestion/syntaxSuggestion.test.ts
Normal file
973
test/parser/postgresql/suggestion/syntaxSuggestion.test.ts
Normal file
@ -0,0 +1,973 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/types';
|
||||
import { PostgreSQL } from 'src/parser/postgresql';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
describe('Postgre SQL Syntax Suggestion', () => {
|
||||
const postgresql = new PostgreSQL();
|
||||
|
||||
test('Validate Syntax SQL', () => {
|
||||
expect(postgresql.validate(syntaxSql).length).not.toBe(0);
|
||||
expect(postgresql.validate(syntaxSql).length).not.toBe(0);
|
||||
expect(postgresql.validate(syntaxSql).length).not.toBe(0);
|
||||
});
|
||||
|
||||
test('Insert table ', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 3,
|
||||
column: 18,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'tb']);
|
||||
});
|
||||
|
||||
test('Alter table ', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 7,
|
||||
column: 15,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db']);
|
||||
});
|
||||
|
||||
test('Select table', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 5,
|
||||
column: 18,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
|
||||
});
|
||||
|
||||
test('Create table ', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 1,
|
||||
column: 18,
|
||||
};
|
||||
const pos1: CaretPosition = {
|
||||
lineNumber: 1,
|
||||
column: 31,
|
||||
};
|
||||
const pos2: CaretPosition = {
|
||||
lineNumber: 1,
|
||||
column: 67,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const syntaxes1 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos1.lineNumber),
|
||||
pos1
|
||||
)?.syntax;
|
||||
const syntaxes2 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos2.lineNumber),
|
||||
pos2
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 's']);
|
||||
expect(suggestion1).not.toBeUndefined();
|
||||
expect(suggestion1?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
expect(suggestion2).not.toBeUndefined();
|
||||
expect(suggestion2?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
});
|
||||
|
||||
test('Create view ', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 9,
|
||||
column: 28,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'v']);
|
||||
});
|
||||
|
||||
test('Drop view ', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 13,
|
||||
column: 14,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
|
||||
});
|
||||
|
||||
test('Alter view ', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 11,
|
||||
column: 16,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'v']);
|
||||
});
|
||||
|
||||
test('Create function ', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 15,
|
||||
column: 20,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['fn1']);
|
||||
});
|
||||
|
||||
test('Drop function', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 17,
|
||||
column: 18,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['fn1']);
|
||||
});
|
||||
|
||||
test('Create database', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 19,
|
||||
column: 19,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db']);
|
||||
});
|
||||
|
||||
test('Drop database', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 21,
|
||||
column: 17,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db']);
|
||||
});
|
||||
|
||||
test('Alter database', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 23,
|
||||
column: 18,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db']);
|
||||
});
|
||||
|
||||
test('Create schema', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 25,
|
||||
column: 40,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['schema_name']);
|
||||
});
|
||||
|
||||
test('Drop schema', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 27,
|
||||
column: 26,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['sch']);
|
||||
});
|
||||
|
||||
test('Alter schema', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 29,
|
||||
column: 18,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['name']);
|
||||
});
|
||||
|
||||
test('Alter Foreign Table With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 31,
|
||||
column: 50,
|
||||
};
|
||||
const posCreate: CaretPosition = {
|
||||
lineNumber: 31,
|
||||
column: 69,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const syntaxesCreate = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, posCreate.lineNumber),
|
||||
posCreate
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestionCreate = syntaxesCreate?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
expect(suggestionCreate).not.toBeUndefined();
|
||||
expect(suggestionCreate?.wordRanges.map((token) => token.text)).toEqual([
|
||||
'new_column_name',
|
||||
]);
|
||||
});
|
||||
|
||||
test('Alter MATERIALIZED VIEW With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 33,
|
||||
column: 60,
|
||||
};
|
||||
const posCreate: CaretPosition = {
|
||||
lineNumber: 33,
|
||||
column: 79,
|
||||
};
|
||||
const posAction: CaretPosition = {
|
||||
lineNumber: 35,
|
||||
column: 52,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const syntaxesCreate = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, posCreate.lineNumber),
|
||||
posCreate
|
||||
)?.syntax;
|
||||
const syntaxesAction = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, posAction.lineNumber),
|
||||
posAction
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestionCreate = syntaxesCreate?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
const suggestionAction = syntaxesAction?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
expect(suggestionCreate).not.toBeUndefined();
|
||||
expect(suggestionCreate?.wordRanges.map((token) => token.text)).toEqual([
|
||||
'new_column_name',
|
||||
]);
|
||||
expect(suggestionAction).not.toBeUndefined();
|
||||
expect(suggestionAction?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
});
|
||||
|
||||
test('Alter PUBLICATION With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 37,
|
||||
column: 58,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
});
|
||||
|
||||
test('Alter SEQUENCE With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 39,
|
||||
column: 52,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([
|
||||
'table_name',
|
||||
'.',
|
||||
'column_name',
|
||||
]);
|
||||
});
|
||||
|
||||
test('Alter Table With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 7,
|
||||
column: 33,
|
||||
};
|
||||
const posCreate: CaretPosition = {
|
||||
lineNumber: 41,
|
||||
column: 34,
|
||||
};
|
||||
const posAction: CaretPosition = {
|
||||
lineNumber: 41,
|
||||
column: 53,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const syntaxesCreate = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, posCreate.lineNumber),
|
||||
posCreate
|
||||
)?.syntax;
|
||||
const syntaxesAction = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, posAction.lineNumber),
|
||||
posAction
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestionCreate = syntaxesCreate?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestionAction = syntaxesAction?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
expect(suggestionCreate).not.toBeUndefined();
|
||||
expect(suggestionCreate?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
expect(suggestionAction).not.toBeUndefined();
|
||||
expect(suggestionAction?.wordRanges.map((token) => token.text)).toEqual([
|
||||
'new_column_name',
|
||||
]);
|
||||
});
|
||||
|
||||
test('ANALYZE With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 43,
|
||||
column: 32,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
});
|
||||
|
||||
test('Comment On With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 45,
|
||||
column: 44,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
});
|
||||
|
||||
test('Copy With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 47,
|
||||
column: 26,
|
||||
};
|
||||
const pos1: CaretPosition = {
|
||||
lineNumber: 47,
|
||||
column: 37,
|
||||
};
|
||||
const pos2: CaretPosition = {
|
||||
lineNumber: 47,
|
||||
column: 84,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const syntaxes1 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos1.lineNumber),
|
||||
pos1
|
||||
)?.syntax;
|
||||
const syntaxes2 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos2.lineNumber),
|
||||
pos2
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['col_name']);
|
||||
expect(suggestion1).not.toBeUndefined();
|
||||
expect(suggestion1?.wordRanges.map((token) => token.text)).toEqual(['col_name2']);
|
||||
expect(suggestion2).not.toBeUndefined();
|
||||
expect(suggestion2?.wordRanges.map((token) => token.text)).toEqual(['clumn_name']);
|
||||
});
|
||||
|
||||
test('Create Foreign Table With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 49,
|
||||
column: 45,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
});
|
||||
|
||||
test('Create Function With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 51,
|
||||
column: 54,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
});
|
||||
|
||||
test('Create Index With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 53,
|
||||
column: 40,
|
||||
};
|
||||
const pos1: CaretPosition = {
|
||||
lineNumber: 53,
|
||||
column: 60,
|
||||
};
|
||||
const pos2: CaretPosition = {
|
||||
lineNumber: 53,
|
||||
column: 71,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const syntaxes1 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos1.lineNumber),
|
||||
pos1
|
||||
)?.syntax;
|
||||
const syntaxes2 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos2.lineNumber),
|
||||
pos2
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
expect(suggestion1).not.toBeUndefined();
|
||||
expect(suggestion1?.wordRanges.map((token) => token.text)).toEqual(['col_name1']);
|
||||
expect(suggestion2).not.toBeUndefined();
|
||||
expect(suggestion2?.wordRanges.map((token) => token.text)).toEqual(['col_name2']);
|
||||
});
|
||||
|
||||
test('Create MATERIALIZED VIEW With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 55,
|
||||
column: 46,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['col_name']);
|
||||
});
|
||||
|
||||
test('Create STATISTICS With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 57,
|
||||
column: 33,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
});
|
||||
|
||||
test('Create TRIGGER With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 59,
|
||||
column: 48,
|
||||
};
|
||||
const pos1: CaretPosition = {
|
||||
lineNumber: 59,
|
||||
column: 93,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const syntaxes1 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos1.lineNumber),
|
||||
pos1
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
expect(suggestion1).not.toBeUndefined();
|
||||
expect(suggestion1?.wordRanges.map((token) => token.text)).toEqual(['function_name']);
|
||||
});
|
||||
|
||||
test('GRANT With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 61,
|
||||
column: 27,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
});
|
||||
|
||||
test('Insert Into With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 63,
|
||||
column: 36,
|
||||
};
|
||||
const pos1: CaretPosition = {
|
||||
lineNumber: 63,
|
||||
column: 83,
|
||||
};
|
||||
const pos2: CaretPosition = {
|
||||
lineNumber: 63,
|
||||
column: 114,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const syntaxes1 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos1.lineNumber),
|
||||
pos1
|
||||
)?.syntax;
|
||||
const syntaxes2 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos2.lineNumber),
|
||||
pos2
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
expect(suggestion1).not.toBeUndefined();
|
||||
expect(suggestion1?.wordRanges.map((token) => token.text)).toEqual(['index_column_name']);
|
||||
expect(suggestion2).not.toBeUndefined();
|
||||
expect(suggestion2?.wordRanges.map((token) => token.text)).toEqual(['column_name_exp']);
|
||||
});
|
||||
|
||||
test('Merge With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 65,
|
||||
column: 132,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['col_name']);
|
||||
});
|
||||
test('REVOKE With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 67,
|
||||
column: 23,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['co_name']);
|
||||
});
|
||||
|
||||
test('SECURITY With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 69,
|
||||
column: 46,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([
|
||||
'tablename',
|
||||
'.',
|
||||
'columnname',
|
||||
]);
|
||||
});
|
||||
|
||||
test('Select With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 71,
|
||||
column: 31,
|
||||
};
|
||||
const pos1: CaretPosition = {
|
||||
lineNumber: 71,
|
||||
column: 103,
|
||||
};
|
||||
const pos2: CaretPosition = {
|
||||
lineNumber: 71,
|
||||
column: 119,
|
||||
};
|
||||
const pos3: CaretPosition = {
|
||||
lineNumber: 71,
|
||||
column: 134,
|
||||
};
|
||||
const pos4: CaretPosition = {
|
||||
lineNumber: 71,
|
||||
column: 147,
|
||||
};
|
||||
const pos5: CaretPosition = {
|
||||
lineNumber: 71,
|
||||
column: 162,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const syntaxes1 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos1.lineNumber),
|
||||
pos1
|
||||
)?.syntax;
|
||||
const syntaxes2 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos2.lineNumber),
|
||||
pos2
|
||||
)?.syntax;
|
||||
const syntaxes3 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos3.lineNumber),
|
||||
pos3
|
||||
)?.syntax;
|
||||
const syntaxes4 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos4.lineNumber),
|
||||
pos4
|
||||
)?.syntax;
|
||||
const syntaxes5 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos5.lineNumber),
|
||||
pos5
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion3 = syntaxes3?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion4 = syntaxes4?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
const suggestion5 = syntaxes5?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['col_name']);
|
||||
expect(suggestion1).not.toBeUndefined();
|
||||
expect(suggestion1?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
expect(suggestion2).not.toBeUndefined();
|
||||
expect(suggestion2?.wordRanges.map((token) => token.text)).toEqual(['column_name']);
|
||||
expect(suggestion3).not.toBeUndefined();
|
||||
expect(suggestion3?.wordRanges.map((token) => token.text)).toEqual(['col_name']);
|
||||
expect(suggestion4).not.toBeUndefined();
|
||||
expect(suggestion4?.wordRanges.map((token) => token.text)).toEqual(['col_name']);
|
||||
expect(suggestion5).not.toBeUndefined();
|
||||
expect(suggestion5?.wordRanges.map((token) => token.text)).toEqual(['col_name']);
|
||||
});
|
||||
|
||||
test('Update With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 73,
|
||||
column: 32,
|
||||
};
|
||||
const pos1: CaretPosition = {
|
||||
lineNumber: 73,
|
||||
column: 47,
|
||||
};
|
||||
const pos2: CaretPosition = {
|
||||
lineNumber: 73,
|
||||
column: 53,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const syntaxes1 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos1.lineNumber),
|
||||
pos1
|
||||
)?.syntax;
|
||||
const syntaxes2 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos2.lineNumber),
|
||||
pos2
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['columnname']);
|
||||
expect(suggestion1).not.toBeUndefined();
|
||||
expect(suggestion1?.wordRanges.map((token) => token.text)).toEqual(['col1']);
|
||||
expect(suggestion2).not.toBeUndefined();
|
||||
expect(suggestion2?.wordRanges.map((token) => token.text)).toEqual(['col2']);
|
||||
});
|
||||
|
||||
test('Vacuum With Column', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 75,
|
||||
column: 23,
|
||||
};
|
||||
const pos1: CaretPosition = {
|
||||
lineNumber: 75,
|
||||
column: 29,
|
||||
};
|
||||
const syntaxes = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
const syntaxes1 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos1.lineNumber),
|
||||
pos1
|
||||
)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['col1']);
|
||||
expect(suggestion1).not.toBeUndefined();
|
||||
expect(suggestion1?.wordRanges.map((token) => token.text)).toEqual(['col2']);
|
||||
});
|
||||
test('Select table with expression', () => {
|
||||
const pos1: CaretPosition = {
|
||||
lineNumber: 77,
|
||||
column: 36,
|
||||
};
|
||||
const pos2: CaretPosition = {
|
||||
lineNumber: 77,
|
||||
column: 42,
|
||||
};
|
||||
const pos3: CaretPosition = {
|
||||
lineNumber: 77,
|
||||
column: 57,
|
||||
};
|
||||
const syntaxes1 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos1.lineNumber),
|
||||
pos1
|
||||
)?.syntax;
|
||||
const syntaxes2 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos2.lineNumber),
|
||||
pos2
|
||||
)?.syntax;
|
||||
const syntaxes3 = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos3.lineNumber),
|
||||
pos3
|
||||
)?.syntax;
|
||||
const suggestion1 = syntaxes1?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion2 = syntaxes2?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
const suggestion3 = syntaxes3?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||
);
|
||||
|
||||
expect(suggestion1).not.toBeUndefined();
|
||||
expect(suggestion1?.wordRanges.map((token) => token.text)).toEqual(['col1']);
|
||||
expect(suggestion2).not.toBeUndefined();
|
||||
expect(suggestion2?.wordRanges.map((token) => token.text)).toEqual(['col2']);
|
||||
expect(suggestion3).not.toBeUndefined();
|
||||
expect(suggestion3?.wordRanges.map((token) => token.text)).toEqual(['col3']);
|
||||
});
|
||||
});
|
193
test/parser/postgresql/suggestion/tokenSuggestion.test.ts
Normal file
193
test/parser/postgresql/suggestion/tokenSuggestion.test.ts
Normal file
@ -0,0 +1,193 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { PostgreSQL } from 'src/parser/postgresql';
|
||||
import { CaretPosition } from 'src/parser/common/types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
|
||||
|
||||
describe('Postgres SQL Token Suggestion', () => {
|
||||
const postgresql = new PostgreSQL();
|
||||
test('After ALTER', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 3,
|
||||
column: 7,
|
||||
};
|
||||
const suggestion = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
expect(suggestion).toMatchUnorderedArrary([
|
||||
'TYPE',
|
||||
'TEXT',
|
||||
'STATISTICS',
|
||||
'TABLESPACE',
|
||||
'USER',
|
||||
'ROLE',
|
||||
'EVENT',
|
||||
'TRIGGER',
|
||||
'RULE',
|
||||
'FOREIGN',
|
||||
'TABLE',
|
||||
'MATERIALIZED',
|
||||
'VIEW',
|
||||
'INDEX',
|
||||
'SEQUENCE',
|
||||
'SUBSCRIPTION',
|
||||
'SERVER',
|
||||
'SCHEMA',
|
||||
'ROUTINE',
|
||||
'PUBLICATION',
|
||||
'PROCEDURE',
|
||||
'POLICY',
|
||||
'OPERATOR',
|
||||
'LANGUAGE',
|
||||
'PROCEDURAL',
|
||||
'GROUP',
|
||||
'FUNCTION',
|
||||
'DOMAIN',
|
||||
'DATABASE',
|
||||
'CONVERSION',
|
||||
'COLLATION',
|
||||
'AGGREGATE',
|
||||
'SYSTEM',
|
||||
'LARGE',
|
||||
'EXTENSION',
|
||||
'DEFAULT',
|
||||
]);
|
||||
});
|
||||
|
||||
test('After CREATE', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 9,
|
||||
column: 8,
|
||||
};
|
||||
const suggestion = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
expect(suggestion).toMatchUnorderedArrary([
|
||||
'RECURSIVE',
|
||||
'VIEW',
|
||||
'TEMPORARY',
|
||||
'TEMP',
|
||||
'LOCAL',
|
||||
'GLOBAL',
|
||||
'UNLOGGED',
|
||||
'OR',
|
||||
'RULE',
|
||||
'INDEX',
|
||||
'UNIQUE',
|
||||
'TABLE',
|
||||
'COLLATION',
|
||||
'TEXT',
|
||||
'TYPE',
|
||||
'OPERATOR',
|
||||
'AGGREGATE',
|
||||
'DATABASE',
|
||||
'USER',
|
||||
'ROLE',
|
||||
'EVENT',
|
||||
'TRIGGER',
|
||||
'CONSTRAINT',
|
||||
'TRANSFORM',
|
||||
'TABLESPACE',
|
||||
'STATISTICS',
|
||||
'SUBSCRIPTION',
|
||||
'SEQUENCE',
|
||||
'SCHEMA',
|
||||
'LANGUAGE',
|
||||
'PROCEDURAL',
|
||||
'TRUSTED',
|
||||
'POLICY',
|
||||
'PUBLICATION',
|
||||
'MATERIALIZED',
|
||||
'GROUP',
|
||||
'PROCEDURE',
|
||||
'FUNCTION',
|
||||
'FOREIGN',
|
||||
'SERVER',
|
||||
'EXTENSION',
|
||||
'DOMAIN',
|
||||
'CONVERSION',
|
||||
'DEFAULT',
|
||||
'CAST',
|
||||
'ASSERTION',
|
||||
'ACCESS',
|
||||
]);
|
||||
});
|
||||
|
||||
test('After DELETE', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 7,
|
||||
column: 8,
|
||||
};
|
||||
const suggestion = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
expect(suggestion).toMatchUnorderedArrary(['FROM']);
|
||||
});
|
||||
|
||||
test('After DROP', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 1,
|
||||
column: 6,
|
||||
};
|
||||
const suggestion = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
expect(suggestion).toMatchUnorderedArrary([
|
||||
'OPERATOR',
|
||||
'ROUTINE',
|
||||
'PROCEDURE',
|
||||
'FUNCTION',
|
||||
'AGGREGATE',
|
||||
'DATABASE',
|
||||
'USER',
|
||||
'GROUP',
|
||||
'ROLE',
|
||||
'TRANSFORM',
|
||||
'TABLESPACE',
|
||||
'SUBSCRIPTION',
|
||||
'VIEW',
|
||||
'OWNED',
|
||||
'CAST',
|
||||
'INDEX',
|
||||
'DOMAIN',
|
||||
'TYPE',
|
||||
'TRIGGER',
|
||||
'RULE',
|
||||
'POLICY',
|
||||
'SCHEMA',
|
||||
'SERVER',
|
||||
'PUBLICATION',
|
||||
'LANGUAGE',
|
||||
'PROCEDURAL',
|
||||
'FOREIGN',
|
||||
'EXTENSION',
|
||||
'EVENT',
|
||||
'ACCESS',
|
||||
'TEXT',
|
||||
'STATISTICS',
|
||||
'CONVERSION',
|
||||
'COLLATION',
|
||||
'MATERIALIZED',
|
||||
'SEQUENCE',
|
||||
'TABLE',
|
||||
]);
|
||||
});
|
||||
|
||||
test('After INSERT', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 5,
|
||||
column: 8,
|
||||
};
|
||||
const suggestion = postgresql.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
expect(suggestion).toMatchUnorderedArrary(['INTO']);
|
||||
});
|
||||
});
|
16
test/parser/postgresql/syntax/alterStatement.test.ts
Normal file
16
test/parser/postgresql/syntax/alterStatement.test.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import { PostgreSQL } from 'src/parser/postgresql';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const postgresql = new PostgreSQL();
|
||||
|
||||
const features = {
|
||||
alters: readSQL(__dirname, 'alter.sql'),
|
||||
};
|
||||
|
||||
describe('PgSQL Create Syntax Tests', () => {
|
||||
features.alters.forEach((alters) => {
|
||||
it(alters, () => {
|
||||
expect(postgresql.validate(alters).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
16
test/parser/postgresql/syntax/createStatement.test.ts
Normal file
16
test/parser/postgresql/syntax/createStatement.test.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import { PostgreSQL } from 'src/parser/postgresql';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const postgresql = new PostgreSQL();
|
||||
|
||||
const features = {
|
||||
creates: readSQL(__dirname, 'create.sql'),
|
||||
};
|
||||
|
||||
describe('PgSQL Create Syntax Tests', () => {
|
||||
features.creates.forEach((create) => {
|
||||
it(create, () => {
|
||||
expect(postgresql.validate(create).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
16
test/parser/postgresql/syntax/deleteStatement.test.ts
Normal file
16
test/parser/postgresql/syntax/deleteStatement.test.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import { PostgreSQL } from 'src/parser/postgresql';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const postgresql = new PostgreSQL();
|
||||
|
||||
const features = {
|
||||
deletes: readSQL(__dirname, 'delete.sql'),
|
||||
};
|
||||
|
||||
describe('PgSQL Delete Syntax Tests', () => {
|
||||
features.deletes.forEach((deleteItem) => {
|
||||
it(deleteItem, () => {
|
||||
expect(postgresql.validate(deleteItem).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
16
test/parser/postgresql/syntax/dropStatement.test.ts
Normal file
16
test/parser/postgresql/syntax/dropStatement.test.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import { PostgreSQL } from 'src/parser/postgresql';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const postgresql = new PostgreSQL();
|
||||
|
||||
const features = {
|
||||
drops: readSQL(__dirname, 'drop.sql'),
|
||||
};
|
||||
|
||||
describe('PgSQL Delete Syntax Tests', () => {
|
||||
features.drops.forEach((drop) => {
|
||||
it(drop, () => {
|
||||
expect(postgresql.validate(drop).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
413
test/parser/postgresql/syntax/fixtures/alter.sql
Normal file
413
test/parser/postgresql/syntax/fixtures/alter.sql
Normal file
@ -0,0 +1,413 @@
|
||||
-- Modifying Tables
|
||||
-- Adding a Column
|
||||
ALTER TABLE products ADD COLUMN description text CHECK (description <> '');
|
||||
|
||||
-- Removing a Column
|
||||
ALTER TABLE products DROP COLUMN description;
|
||||
|
||||
ALTER TABLE products DROP COLUMN description CASCADE;
|
||||
|
||||
-- Adding a Constraint
|
||||
ALTER TABLE products ADD CHECK (name <> '');
|
||||
|
||||
ALTER TABLE products ADD CONSTRAINT some_name UNIQUE (product_no);
|
||||
|
||||
ALTER TABLE products ADD FOREIGN KEY (product_group_id) REFERENCES product_groups;
|
||||
|
||||
-- Removing a Constraint
|
||||
ALTER TABLE products DROP CONSTRAINT some_name;
|
||||
|
||||
ALTER TABLE products ALTER COLUMN product_no SET NOT NULL;
|
||||
|
||||
-- Changing a Column's Default Value
|
||||
ALTER TABLE products ALTER COLUMN price SET DEFAULT 7.77;
|
||||
|
||||
ALTER TABLE products ALTER COLUMN price DROP DEFAULT;
|
||||
|
||||
-- Changing a Column's Data Type
|
||||
ALTER TABLE products ALTER COLUMN price TYPE numeric(10,2);
|
||||
|
||||
-- Renaming a Column
|
||||
ALTER TABLE products RENAME COLUMN product_no TO product_number;
|
||||
|
||||
-- Renaming a Table
|
||||
ALTER TABLE products RENAME TO items;
|
||||
|
||||
-- Managing Partitions
|
||||
ALTER TABLE measurement_y2006m02 NO INHERIT measurement;
|
||||
|
||||
ALTER TABLE measurement_y2008m02 ADD CONSTRAINT y2008m02
|
||||
CHECK ( logdate >= DATE '2008-02-01' AND logdate < DATE '2008-03-01' );
|
||||
|
||||
-- ALTER AGGREGATE
|
||||
ALTER AGGREGATE name ( int, integer) RENAME TO new_name;
|
||||
ALTER AGGREGATE name ( text,int ) OWNER TO new_owner;
|
||||
ALTER AGGREGATE name ( integer ) SET SCHEMA new_schema;
|
||||
|
||||
-- ALTER COLLATION
|
||||
ALTER COLLATION name RENAME TO new_name;
|
||||
ALTER COLLATION name OWNER TO new_owner;
|
||||
ALTER COLLATION name SET SCHEMA new_schema;
|
||||
|
||||
-- ALTER CONVERSION
|
||||
ALTER CONVERSION name RENAME TO new_name;
|
||||
ALTER CONVERSION name OWNER TO new_owner;
|
||||
ALTER CONVERSION name SET SCHEMA new_schema;
|
||||
|
||||
-- ALTER DATABASE
|
||||
ALTER DATABASE name WITH CONNECTION LIMIT connlimit;
|
||||
ALTER DATABASE name RENAME TO new_name;
|
||||
ALTER DATABASE name OWNER TO new_owner;
|
||||
ALTER DATABASE name OWNER TO CURRENT_ROLE;
|
||||
ALTER DATABASE name OWNER TO CURRENT_USER;
|
||||
ALTER DATABASE name OWNER TO SESSION_USER;
|
||||
ALTER DATABASE name SET TABLESPACE new_tablespace;
|
||||
ALTER DATABASE name SET configuration_parameter TO DEFAULT;
|
||||
ALTER DATABASE name SET configuration_parameter FROM CURRENT;
|
||||
ALTER DATABASE name RESET configuration_parameter;
|
||||
ALTER DATABASE name RESET ALL;
|
||||
ALTER DATABASE name;
|
||||
|
||||
-- ALTER DEFAULT PRIVILEGES
|
||||
ALTER DEFAULT PRIVILEGES
|
||||
FOR ROLE target_role, target_role2
|
||||
IN SCHEMA schema_name, schema_name2
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER
|
||||
ON TABLES
|
||||
TO GROUP role_name, PUBLIC WITH GRANT OPTION;
|
||||
|
||||
-- ALTER DOMAIN
|
||||
ALTER DOMAIN name SET DEFAULT expression;
|
||||
ALTER DOMAIN name DROP DEFAULT;
|
||||
ALTER DOMAIN name SET NOT NULL;
|
||||
ALTER DOMAIN name ADD CONSTRAINT constraint_name
|
||||
CHECK (char_length(VALUE) = 5) NOT VALID;
|
||||
ALTER DOMAIN name
|
||||
DROP CONSTRAINT IF EXISTS constraint_name RESTRICT;
|
||||
ALTER DOMAIN name
|
||||
RENAME CONSTRAINT constraint_name TO new_constraint_name;
|
||||
ALTER DOMAIN name
|
||||
VALIDATE CONSTRAINT constraint_name;
|
||||
ALTER DOMAIN name
|
||||
OWNER TO new_owner;
|
||||
ALTER DOMAIN name
|
||||
RENAME TO new_name;
|
||||
ALTER DOMAIN name
|
||||
SET SCHEMA new_schema;
|
||||
|
||||
-- ALTER EVENT TRIGGER
|
||||
ALTER EVENT TRIGGER name DISABLE;
|
||||
ALTER EVENT TRIGGER name ENABLE REPLICA;
|
||||
ALTER EVENT TRIGGER name OWNER TO new_owner;
|
||||
ALTER EVENT TRIGGER name RENAME TO new_name;
|
||||
|
||||
-- ALTER EXTENSION
|
||||
ALTER EXTENSION name UPDATE TO new_version;
|
||||
ALTER EXTENSION name SET SCHEMA new_schema;
|
||||
ALTER EXTENSION name ADD AGGREGATE agg_name (agg_type, agg_type2);
|
||||
ALTER EXTENSION name DROP CAST (source_type AS target_type);
|
||||
ALTER EXTENSION name DROP COLLATION object_name;
|
||||
ALTER EXTENSION name DROP CONVERSION object_name;
|
||||
ALTER EXTENSION name DROP DOMAIN object_name;
|
||||
ALTER EXTENSION name DROP EVENT TRIGGER object_name;
|
||||
ALTER EXTENSION name DROP FOREIGN DATA WRAPPER object_name;
|
||||
ALTER EXTENSION name DROP FOREIGN TABLE object_name ;
|
||||
ALTER EXTENSION name DROP FUNCTION function_name ( IN argname int);
|
||||
ALTER EXTENSION name DROP MATERIALIZED VIEW object_name;
|
||||
ALTER EXTENSION name DROP OPERATOR > (int, int);
|
||||
ALTER EXTENSION name DROP OPERATOR CLASS object_name USING index_method;
|
||||
ALTER EXTENSION name DROP OPERATOR FAMILY object_name USING index_method;
|
||||
ALTER EXTENSION name DROP PROCEDURAL LANGUAGE object_name;
|
||||
ALTER EXTENSION name DROP SCHEMA object_name;
|
||||
ALTER EXTENSION name DROP SEQUENCE object_name;
|
||||
ALTER EXTENSION name DROP SERVER object_name;
|
||||
ALTER EXTENSION name DROP TABLE object_name;
|
||||
ALTER EXTENSION name DROP TEXT SEARCH CONFIGURATION object_name;
|
||||
ALTER EXTENSION name DROP TEXT SEARCH DICTIONARY object_name;
|
||||
ALTER EXTENSION name DROP TEXT SEARCH PARSER object_name;
|
||||
ALTER EXTENSION name DROP TEXT SEARCH TEMPLATE object_name;
|
||||
ALTER EXTENSION name DROP TYPE object_name;
|
||||
ALTER EXTENSION name DROP VIEW object_name;
|
||||
|
||||
-- ALTER FOREIGN DATA WRAPPER
|
||||
ALTER FOREIGN DATA WRAPPER name
|
||||
HANDLER handler_function
|
||||
VALIDATOR validator_function
|
||||
OPTIONS (ADD option_1 'value', DROP option_2 'value');
|
||||
ALTER FOREIGN DATA WRAPPER name OWNER TO new_owner;
|
||||
ALTER FOREIGN DATA WRAPPER name RENAME TO new_name;
|
||||
|
||||
-- ALTER FOREIGN TABLE
|
||||
ALTER FOREIGN TABLE IF EXISTS name
|
||||
ADD COLUMN column_name data_type COLLATE collation_name, DROP COLUMN IF EXISTS column_name CASCADE, ALTER COLUMN column_name SET DATA TYPE data_type, OWNER TO new_owner, OPTIONS (ADD option_1 'value', DROP option_2 'value');
|
||||
ALTER FOREIGN TABLE table_name ALTER column_name DROP DEFAULT;
|
||||
ALTER FOREIGN TABLE IF EXISTS name
|
||||
RENAME COLUMN column_name TO new_column_name;
|
||||
ALTER FOREIGN TABLE IF EXISTS name
|
||||
RENAME TO new_name;
|
||||
ALTER FOREIGN TABLE IF EXISTS name
|
||||
SET SCHEMA new_schema;
|
||||
|
||||
-- ALTER FUNCTION
|
||||
ALTER FUNCTION name (VARIADIC argname argtype)
|
||||
CALLED ON NULL INPUT RESTRICT;
|
||||
ALTER FUNCTION name ( INOUT argname argtype, OUT argname argtype)
|
||||
RENAME TO new_name;
|
||||
ALTER FUNCTION name (INOUT argname argtype)
|
||||
OWNER TO new_owner;
|
||||
ALTER FUNCTION name (IN argname argtype)
|
||||
SET SCHEMA new_schema;
|
||||
|
||||
-- ALTER GROUP
|
||||
ALTER GROUP group_name ADD USER user_name, user_name1;
|
||||
ALTER GROUP group_name DROP USER user_name,user_name2;
|
||||
ALTER GROUP group_name RENAME TO new_name;
|
||||
|
||||
-- ALTER INDEX
|
||||
ALTER INDEX IF EXISTS name RENAME TO new_name;
|
||||
ALTER INDEX IF EXISTS name SET TABLESPACE tablespace_name;
|
||||
ALTER INDEX IF EXISTS name SET ( storage_parameter = value2 );
|
||||
ALTER INDEX IF EXISTS name RESET ( storage_parameter );
|
||||
ALTER INDEX name ATTACH PARTITION index_name;
|
||||
ALTER INDEX name NO DEPENDS ON EXTENSION extension_name;
|
||||
ALTER INDEX IF EXISTS name ALTER COLUMN column_number
|
||||
SET STATISTICS 5;
|
||||
ALTER INDEX ALL IN TABLESPACE name OWNED BY role_name,role_name2
|
||||
SET TABLESPACE new_tablespace NOWAIT;
|
||||
|
||||
|
||||
-- ALTER LANGUAGE
|
||||
ALTER PROCEDURAL LANGUAGE name RENAME TO new_name;
|
||||
ALTER LANGUAGE name OWNER TO new_owner;
|
||||
ALTER LANGUAGE name OWNER TO CURRENT_ROLE;
|
||||
ALTER LANGUAGE name OWNER TO CURRENT_USER;
|
||||
ALTER LANGUAGE name OWNER TO SESSION_USER;
|
||||
|
||||
-- ALTER LARGE OBJECT
|
||||
ALTER LARGE OBJECT 32423 OWNER TO new_owner;
|
||||
|
||||
-- ALTER MATERIALIZED VIEW
|
||||
ALTER MATERIALIZED VIEW IF EXISTS name
|
||||
SET WITHOUT CLUSTER,CLUSTER ON index_name;
|
||||
ALTER MATERIALIZED VIEW IF EXISTS name
|
||||
RENAME COLUMN column_name TO new_column_name;
|
||||
ALTER MATERIALIZED VIEW IF EXISTS name
|
||||
RENAME TO new_name;
|
||||
ALTER MATERIALIZED VIEW IF EXISTS name
|
||||
SET SCHEMA new_schema;
|
||||
ALTER MATERIALIZED VIEW name SET SCHEMA new_schema;
|
||||
|
||||
-- ALTER OPERATOR
|
||||
ALTER OPERATOR - ( NONE , int ) OWNER TO new_owner;
|
||||
ALTER OPERATOR = ( integer , NONE ) SET SCHEMA new_schema;
|
||||
|
||||
-- ALTER OPERATOR CLASS
|
||||
ALTER OPERATOR CLASS name USING index_method RENAME TO new_name;
|
||||
ALTER OPERATOR CLASS name USING index_method OWNER TO new_owner;
|
||||
ALTER OPERATOR CLASS name USING index_method SET SCHEMA new_schema;
|
||||
ALTER OPERATOR FAMILY name USING index_method ADD
|
||||
OPERATOR 1 > ( int, int )
|
||||
FOR SEARCH,
|
||||
FUNCTION 24 ( int)
|
||||
function_name( int );
|
||||
|
||||
ALTER OPERATOR FAMILY name USING index_method DROP
|
||||
OPERATOR 32 ( op_typ ),FUNCTION 34 ( op_type);
|
||||
|
||||
-- ALTER OPERATOR FAMILY
|
||||
ALTER OPERATOR FAMILY name USING index_method ADD
|
||||
OPERATOR 4 > ( int, integer ) FOR SEARCH,
|
||||
FUNCTION 3 ( int, int) function_name ( numeric );
|
||||
ALTER OPERATOR FAMILY name USING index_method DROP
|
||||
OPERATOR 4 ( op_type, op_type ),
|
||||
FUNCTION 4 ( op_type, op_type );
|
||||
ALTER OPERATOR FAMILY name USING index_method RENAME TO new_name;
|
||||
ALTER OPERATOR FAMILY name USING index_method OWNER TO new_owner;
|
||||
ALTER OPERATOR FAMILY name USING index_method SET SCHEMA new_schema;
|
||||
|
||||
-- ALTER POLICY
|
||||
ALTER POLICY name ON table_name RENAME TO new_name;
|
||||
ALTER POLICY name ON table_name
|
||||
TO role_name, PUBLIC, CURRENT_ROLE, CURRENT_USER, SESSION_USER
|
||||
USING ( using_expression )
|
||||
WITH CHECK ( check_expression );
|
||||
|
||||
-- ALTER PROCEDURE
|
||||
ALTER PROCEDURE name ( IN argname integer, IN argname int)
|
||||
RESET ALL RESTRICT;
|
||||
ALTER PROCEDURE name ( IN argname integer )
|
||||
RENAME TO new_name;
|
||||
ALTER PROCEDURE name ( IN argname integer)
|
||||
OWNER TO CURRENT_ROLE;
|
||||
ALTER PROCEDURE name ( IN argname integer)
|
||||
SET SCHEMA new_schema;
|
||||
ALTER PROCEDURE name ( IN argname integer)
|
||||
NO DEPENDS ON EXTENSION extension_name;
|
||||
|
||||
-- ALTER PUBLICATION
|
||||
ALTER PUBLICATION name ADD TABLE ONLY table_name * ( column_name,column_name2) WHERE ( expression>3 );
|
||||
ALTER PUBLICATION name SET TABLE ONLY table_name * ( column_name,column_name2) WHERE ( expression ), TABLE ONLY table_name * ( column_name,column_name2) WHERE ( expression );
|
||||
ALTER PUBLICATION name DROP TABLE ONLY table_name * ( column_name,column_name2) WHERE ( expression );
|
||||
ALTER PUBLICATION name SET ( publication_parameter = value, parameters2 )
|
||||
ALTER PUBLICATION name OWNER TO CURRENT_ROLE;
|
||||
ALTER PUBLICATION name RENAME TO new_name;
|
||||
|
||||
-- ALTER ROLE
|
||||
ALTER ROLE name WITH SUPERUSER CREATEDB CREATEROLE VALID UNTIL 'timestamp';
|
||||
ALTER ROLE name RENAME TO new_name;
|
||||
ALTER ROLE ALL IN DATABASE database_name SET configuration_parameter = DEFAULT;
|
||||
ALTER ROLE name IN DATABASE database_name SET configuration_parameter FROM CURRENT;
|
||||
ALTER ROLE ALL IN DATABASE database_name RESET configuration_parameter;
|
||||
ALTER ROLE name IN DATABASE database_name RESET ALL;
|
||||
|
||||
-- ALTER ROUTINE
|
||||
ALTER ROUTINE name ( IN argname integer)
|
||||
COST execution_cost RESTRICT;
|
||||
ALTER ROUTINE name
|
||||
COST execution_cost IMMUTABLE LEAKPROOF SECURITY INVOKER ROWS result_rows SET configuration_parameter TO DEFAULT SET configuration_parameter FROM CURRENT RESET configuration_parameter RESET ALL;
|
||||
ALTER ROUTINE name ( IN argname integer)
|
||||
RENAME TO new_name;
|
||||
ALTER ROUTINE name ( IN argname integer)
|
||||
OWNER TO CURRENT_ROLE;
|
||||
ALTER ROUTINE name ( IN argname integer)
|
||||
SET SCHEMA new_schema;
|
||||
ALTER ROUTINE name ( IN argname integer)
|
||||
NO DEPENDS ON EXTENSION extension_name;
|
||||
|
||||
-- ALTER RULE
|
||||
ALTER RULE name ON table_name RENAME TO new_name;
|
||||
|
||||
-- ALTER SCHEMA
|
||||
ALTER SCHEMA name RENAME TO new_name;
|
||||
ALTER SCHEMA name OWNER TO new_owner;
|
||||
|
||||
-- ALTER SEQUENCE
|
||||
ALTER SEQUENCE IF EXISTS name INCREMENT BY 324
|
||||
MINVALUE 34 MAXVALUE 66
|
||||
START WITH 12 RESTART WITH 34
|
||||
RESTART WITH restart
|
||||
CACHE 324 NO CYCLE
|
||||
OWNED BY table_name.column_name;
|
||||
ALTER SEQUENCE name OWNER TO new_owner;
|
||||
ALTER SEQUENCE IF EXISTS name RENAME TO new_name;
|
||||
ALTER SEQUENCE name SET SCHEMA new_schema;
|
||||
|
||||
-- ALTER SERVER
|
||||
ALTER SERVER name VERSION 'new_version' OPTIONS ( ADD option 'value', SET option 'value', DROP option 'value');
|
||||
ALTER SERVER name OWNER TO new_owner;
|
||||
ALTER SERVER name RENAME TO new_name;
|
||||
|
||||
-- ALTER STATISTICS
|
||||
ALTER STATISTICS name OWNER TO CURRENT_ROLE;
|
||||
ALTER STATISTICS name OWNER TO CURRENT_USER;
|
||||
ALTER STATISTICS name OWNER TO SESSION_USER;
|
||||
ALTER STATISTICS name RENAME TO new_name;
|
||||
ALTER STATISTICS name SET SCHEMA new_schema;
|
||||
ALTER STATISTICS name SET STATISTICS 23;
|
||||
|
||||
-- ALTER SUBSCRIPTION
|
||||
ALTER SUBSCRIPTION name CONNECTION 'conninfo';
|
||||
ALTER SUBSCRIPTION name SET PUBLICATION publication_name,publication_name1 WITH ( publication_option = value) ;
|
||||
ALTER SUBSCRIPTION name ADD PUBLICATION publication_name WITH ( publication_option );
|
||||
ALTER SUBSCRIPTION name DROP PUBLICATION publication_name WITH ( publication_option);
|
||||
ALTER SUBSCRIPTION name REFRESH PUBLICATION WITH ( refresh_option = value);
|
||||
ALTER SUBSCRIPTION name ENABLE;
|
||||
ALTER SUBSCRIPTION name DISABLE;
|
||||
ALTER SUBSCRIPTION name SET ( subscription_parameter = value);
|
||||
ALTER SUBSCRIPTION name SKIP ( skip_option = value );
|
||||
ALTER SUBSCRIPTION name OWNER TO CURRENT_ROLE;
|
||||
ALTER SUBSCRIPTION name RENAME TO new_name;
|
||||
|
||||
-- ALTER SYSTEM
|
||||
ALTER SYSTEM SET configuration_parameter TO DEFAULT;
|
||||
ALTER SYSTEM RESET configuration_parameter;
|
||||
ALTER SYSTEM RESET ALL;
|
||||
|
||||
-- ALTER TABLE
|
||||
ALTER TABLE IF EXISTS ONLY name *
|
||||
ALTER COLUMN column_name SET DEFAULT expression, DISABLE RULE rewrite_rule_name, ADD CONSTRAINT constraint_name
|
||||
UNIQUE USING INDEX index_name DEFERRABLE INITIALLY DEFERRED;
|
||||
ALTER TABLE name
|
||||
RENAME COLUMN column_name TO new_column_name;
|
||||
ALTER TABLE IF EXISTS ONLY name *
|
||||
RENAME CONSTRAINT constraint_name TO new_constraint_name;
|
||||
ALTER TABLE IF EXISTS name
|
||||
RENAME TO new_name;
|
||||
ALTER TABLE name
|
||||
SET SCHEMA new_schema;
|
||||
ALTER TABLE ALL IN TABLESPACE name OWNED BY role_name,role_name2
|
||||
SET TABLESPACE new_tablespace NOWAIT;
|
||||
ALTER TABLE IF EXISTS name
|
||||
ATTACH PARTITION partition_name FOR VALUES FROM (MINVALUE, x>3) TO (MAXVALUE,MAXVALUE);
|
||||
ALTER TABLE orders
|
||||
ATTACH PARTITION orders_p4 FOR VALUES WITH (MODULUS 4, REMAINDER 4);
|
||||
ALTER TABLE IF EXISTS name
|
||||
DETACH PARTITION partition_name CONCURRENTLY;
|
||||
|
||||
-- ALTER TABLESPACE
|
||||
ALTER TABLESPACE name RENAME TO new_name;
|
||||
ALTER TABLESPACE name OWNER TO new_owner;
|
||||
ALTER TABLESPACE name SET ( tablespace_option = value, tablespace_option = value2 );
|
||||
ALTER TABLESPACE name RESET ( tablespace_option, tablespace_option2 );
|
||||
|
||||
-- ALTER TEXT SEARCH CONFIGURATION
|
||||
ALTER TEXT SEARCH CONFIGURATION name
|
||||
ADD MAPPING FOR token_type, token_type2 WITH dictionary_name, dictionary_name2;
|
||||
ALTER TEXT SEARCH CONFIGURATION name
|
||||
ALTER MAPPING FOR token_type, token_type2 WITH dictionary_name, dictionary_name2;
|
||||
ALTER TEXT SEARCH CONFIGURATION name
|
||||
ALTER MAPPING REPLACE old_dictionary WITH new_dictionary;
|
||||
ALTER TEXT SEARCH CONFIGURATION name
|
||||
ALTER MAPPING FOR token_type REPLACE old_dictionary WITH new_dictionary;
|
||||
ALTER TEXT SEARCH CONFIGURATION name
|
||||
DROP MAPPING IF EXISTS FOR token_type;
|
||||
ALTER TEXT SEARCH CONFIGURATION name RENAME TO new_name;
|
||||
ALTER TEXT SEARCH CONFIGURATION name OWNER TO new_owner;
|
||||
ALTER TEXT SEARCH CONFIGURATION name SET SCHEMA new_schema;
|
||||
|
||||
-- ALTER TEXT SEARCH DICTIONARY
|
||||
ALTER TEXT SEARCH DICTIONARY name (
|
||||
option_1 = value1, option_2
|
||||
);
|
||||
ALTER TEXT SEARCH DICTIONARY name RENAME TO new_name;
|
||||
ALTER TEXT SEARCH DICTIONARY name OWNER TO new_owner;
|
||||
ALTER TEXT SEARCH DICTIONARY name SET SCHEMA new_schema;
|
||||
|
||||
-- ALTER TEXT SEARCH PARSER
|
||||
ALTER TEXT SEARCH PARSER name RENAME TO new_name;
|
||||
ALTER TEXT SEARCH PARSER name SET SCHEMA new_schema;
|
||||
|
||||
-- ALTER TEXT SEARCH TEMPLATE
|
||||
ALTER TEXT SEARCH TEMPLATE name RENAME TO new_name;
|
||||
ALTER TEXT SEARCH TEMPLATE name SET SCHEMA new_schema;
|
||||
|
||||
-- ALTER TRIGGER
|
||||
ALTER TRIGGER name ON table_name RENAME TO new_name;
|
||||
|
||||
-- ALTER TYPE
|
||||
ALTER TYPE name ADD ATTRIBUTE attribute_name data_type COLLATE collation_name CASCADE;
|
||||
ALTER TYPE name OWNER TO new_owner;
|
||||
ALTER TYPE name RENAME ATTRIBUTE attribute_name TO new_attribute_name RESTRICT;
|
||||
ALTER TYPE name RENAME TO new_name;
|
||||
ALTER TYPE name SET SCHEMA new_schema;
|
||||
ALTER TYPE name ADD VALUE IF NOT EXISTS 'new_enum_value' BEFORE 'existing_enum_value';
|
||||
|
||||
-- ALTER USER
|
||||
ALTER USER name WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOCREATEUSER NOINHERIT NOLOGIN NOREPLICATION CONNECTION LIMIT 23 PASSWORD 'password' VALID UNTIL 'timestamp';
|
||||
ALTER USER name;
|
||||
|
||||
-- ALTER USER MAPPING
|
||||
ALTER USER MAPPING FOR CURRENT_USER
|
||||
SERVER server_name
|
||||
OPTIONS (ADD option 'value', SET option 'value', DROP option 'value');
|
||||
|
||||
-- ALTER VIEW
|
||||
ALTER VIEW name ALTER COLUMN column_name SET DEFAULT expression;
|
||||
ALTER VIEW IF EXISTS name ALTER column_name DROP DEFAULT;
|
||||
ALTER VIEW name OWNER TO new_owner;
|
||||
ALTER VIEW IF EXISTS name RENAME TO new_name;
|
||||
ALTER VIEW name SET SCHEMA new_schema;
|
||||
ALTER VIEW IF EXISTS name SET ( view_option_name = view_option_value, view_option_name2 = view_option_value2);
|
||||
ALTER VIEW name RESET ( view_option_name, view_option_name );
|
||||
|
||||
|
637
test/parser/postgresql/syntax/fixtures/create.sql
Normal file
637
test/parser/postgresql/syntax/fixtures/create.sql
Normal file
@ -0,0 +1,637 @@
|
||||
-- Creating a New Table
|
||||
-- index_method: btree, hash, gist, spgist and gin
|
||||
-- The Most Complicated
|
||||
CREATE GLOBAL TEMPORARY TABLE IF NOT EXISTS table1 (col1 int COLLATE collation1 CONSTRAINT constraint_name NOT NULL DEFERRABLE) INHERITS (table_parent) WITH (storage_parameter = 1) ON COMMIT PRESERVE ROWS TABLESPACE tablespace_name;
|
||||
|
||||
CREATE LOCAL TEMP TABLE table1 (col1 int CONSTRAINT constraint_name NULL NOT DEFERRABLE, col2 text CHECK (age > 5) NOT DEFERRABLE INITIALLY DEFERRED, LIKE source_table INCLUDING DEFAULTS) INHERITS (table_parent) WITH OIDS TABLESPACE tablespace_name;
|
||||
|
||||
CREATE LOCAL TEMP TABLE table1 (col1 int) INHERITS (table_parent) WITH OIDS ON COMMIT DELETE ROWS;
|
||||
|
||||
CREATE UNLOGGED TABLE table1 (col1 int) INHERITS (table_parent) WITHOUT OIDS ON COMMIT DROP;
|
||||
|
||||
CREATE TABLE table_name1 OF type_name (col1 WITH OPTIONS CONSTRAINT constraint_name NOT NULL, col2 WITH OPTIONS CONSTRAINT constraint_name CHECK (age > 5) NOT DEFERRABLE INITIALLY DEFERRED);
|
||||
|
||||
CREATE TABLE table_name1 OF type_name (col1 WITH OPTIONS CONSTRAINT constraint_name NOT NULL, EXCLUDE USING hash (c WITH &&+) WITH (storage_parameter=3) USING INDEX TABLESPACE tablespace_name WHERE (predicate1=123) NOT DEFERRABLE INITIALLY DEFERRED);
|
||||
|
||||
|
||||
CREATE TABLE weather (
|
||||
city varchar(80),
|
||||
temp_lo int,
|
||||
-- low temperature
|
||||
temp_hi int,
|
||||
-- high temperature
|
||||
prcp real,
|
||||
-- precipitation
|
||||
date date
|
||||
);
|
||||
|
||||
CREATE TABLE cities (
|
||||
name varchar(80),
|
||||
location point
|
||||
);
|
||||
|
||||
-- remove it
|
||||
DROP TABLE weather;
|
||||
|
||||
-- operate view
|
||||
CREATE VIEW myview AS
|
||||
SELECT
|
||||
city,
|
||||
temp_lo,
|
||||
temp_hi,
|
||||
prcp,
|
||||
date,
|
||||
location
|
||||
FROM
|
||||
weather,
|
||||
cities
|
||||
WHERE
|
||||
city = name;
|
||||
|
||||
-- Foreign Keys
|
||||
CREATE TABLE cities (
|
||||
city varchar(80) primary key,
|
||||
location point
|
||||
);
|
||||
|
||||
CREATE TABLE weather (
|
||||
city varchar(80) references cities(city),
|
||||
temp_lo int,
|
||||
temp_hi int,
|
||||
prcp real,
|
||||
date date
|
||||
);
|
||||
|
||||
-- Schemas
|
||||
|
||||
-- Creating a Schema
|
||||
CREATE SCHEMA myschema;
|
||||
|
||||
CREATE SCHEMA database1.schema1.table1;
|
||||
|
||||
CREATE SCHEMA schemaname AUTHORIZATION username;
|
||||
|
||||
-- The Public Schema
|
||||
CREATE TABLE public7.products(col1 int);
|
||||
|
||||
-- Inheritance
|
||||
CREATE TABLE capitals (
|
||||
state char(2)
|
||||
) INHERITS (cities);
|
||||
|
||||
-- Partitioning Implementing Partitioning
|
||||
CREATE TABLE measurement_y2006m02 (
|
||||
CHECK ( logdate >= DATE '2006-02-01' AND logdate < DATE '2006-03-01' )
|
||||
) INHERITS (measurement);
|
||||
|
||||
CREATE TRIGGER insert_measurement_trigger
|
||||
BEFORE INSERT ON measurement
|
||||
FOR EACH ROW EXECUTE PROCEDURE measurement_insert_trigger();
|
||||
|
||||
CREATE TABLE measurement (
|
||||
city_id int not null,
|
||||
logdate date not null,
|
||||
peaktemp int,
|
||||
unitsales int
|
||||
);
|
||||
|
||||
CREATE VIEW measurement AS
|
||||
SELECT * FROM measurement_y2006m02
|
||||
UNION ALL SELECT * FROM measurement_y2006m03;
|
||||
|
||||
-- create Function
|
||||
CREATE FUNCTION get_color_note (rainbow) RETURNS text AS
|
||||
'SELECT note FROM my_colors WHERE color = $1'
|
||||
LANGUAGE SQL;
|
||||
|
||||
-- Data types
|
||||
CREATE TABLE products (
|
||||
product_no integer,
|
||||
price numeric,
|
||||
col4 smallint,
|
||||
col5 int,
|
||||
col6 bigint,
|
||||
col7 decimal,
|
||||
col8 smallserial,
|
||||
col9 serial,
|
||||
col10 bigserial,
|
||||
col11 real
|
||||
);
|
||||
|
||||
-- Character Types
|
||||
CREATE TABLE test1 (
|
||||
name text,
|
||||
a character(4),
|
||||
b character varying(4),
|
||||
c varchar(4)
|
||||
);
|
||||
|
||||
-- Enumerated Types
|
||||
CREATE TYPE mood AS ENUM ('sad', 'ok', 'happy');
|
||||
|
||||
-- Bit String Types
|
||||
CREATE TABLE test (a BIT(3), b BIT VARYING(5));
|
||||
|
||||
-- Arrays
|
||||
CREATE TABLE sal_emp (
|
||||
name text,
|
||||
pay_by_quarter integer[],
|
||||
schedule text[][]
|
||||
);
|
||||
|
||||
-- Composite Types
|
||||
CREATE TYPE inventory_item AS (
|
||||
name text,
|
||||
supplier_id integer,
|
||||
price numeric
|
||||
);
|
||||
CREATE TABLE on_hand (
|
||||
item inventory_item,
|
||||
count integer
|
||||
);
|
||||
|
||||
-- Ranger Types
|
||||
CREATE TYPE floatrange AS RANGE (
|
||||
subtype = float8,
|
||||
subtype_diff = float8mi
|
||||
);
|
||||
|
||||
-- CREATE ACCESS METHOD
|
||||
CREATE ACCESS METHOD name
|
||||
TYPE TABLE
|
||||
HANDLER handler_function;
|
||||
|
||||
-- CREATE AGGREGATE
|
||||
CREATE AGGREGATE agg_name1 ( int, integer) (
|
||||
SFUNC = sfunc,
|
||||
STYPE = state_data_type,
|
||||
FINALFUNC = ffunc,
|
||||
INITCOND = initial_condition,
|
||||
SORTOP = sort_operator
|
||||
);
|
||||
CREATE AGGREGATE agg_name2 ( int, integer) (
|
||||
SFUNC = sfunc,
|
||||
STYPE = state_data_type
|
||||
);
|
||||
CREATE AGGREGATE agg_name3 (
|
||||
BASETYPE = base_type,
|
||||
SFUNC = sfunc,
|
||||
STYPE = state_data_type,
|
||||
FINALFUNC = ffunc,
|
||||
INITCOND = initial_condition,
|
||||
SORTOP = sort_operator
|
||||
);
|
||||
CREATE AGGREGATE agg_name4 (
|
||||
BASETYPE = base_type,
|
||||
SFUNC = sfunc,
|
||||
STYPE = state_data_type
|
||||
);
|
||||
|
||||
-- CREATE CAST
|
||||
CREATE CAST (source_type1 AS target_type1)
|
||||
WITH FUNCTION function_name1 (argument_type1,argument_type2)
|
||||
AS ASSIGNMENT;
|
||||
CREATE CAST (source_type1 AS target_type1)
|
||||
WITH FUNCTION function_name1 (argument_type1);
|
||||
CREATE CAST (source_type2 AS target_type2)
|
||||
WITHOUT FUNCTION
|
||||
AS IMPLICIT;
|
||||
CREATE CAST (source_type2 AS target_type2)
|
||||
WITHOUT FUNCTION;
|
||||
CREATE CAST (source_type3 AS target_type3)
|
||||
WITH INOUT
|
||||
AS ASSIGNMENT;
|
||||
CREATE CAST (source_type3 AS target_type3)
|
||||
WITH INOUT;
|
||||
|
||||
-- CREATE COLLATION
|
||||
CREATE COLLATION coll_name (
|
||||
LOCALE = locale,
|
||||
LC_COLLATE = lc_collate,
|
||||
LC_CTYPE = lc_ctype,
|
||||
PROVIDER = provider,
|
||||
DETERMINISTIC = boolean,
|
||||
RULES = rules,
|
||||
VERSION = version
|
||||
);
|
||||
CREATE COLLATION coll_name FROM existing_collation;
|
||||
|
||||
-- CREATE CONVERSION
|
||||
CREATE DEFAULT CONVERSION conver_name
|
||||
FOR 'source_encoding' TO 'dest_encoding' FROM function_name;
|
||||
CREATE CONVERSION conver_name1
|
||||
FOR 'source_encoding' TO 'dest_encoding' FROM function_name;
|
||||
|
||||
-- CREATE DATABASE
|
||||
CREATE DATABASE name1
|
||||
WITH
|
||||
OWNER = user_name
|
||||
TEMPLATE = template
|
||||
ENCODING = encoding
|
||||
STRATEGY = strategy
|
||||
LOCALE = locale
|
||||
LC_COLLATE = lc_collate
|
||||
LC_CTYPE = lc_ctype
|
||||
ICU_LOCALE = icu_locale
|
||||
ICU_RULES = icu_rules
|
||||
LOCALE_PROVIDER = locale_provider
|
||||
COLLATION_VERSION = collation_version
|
||||
TABLESPACE = tablespace_name
|
||||
ALLOW_CONNECTIONS = allowconn
|
||||
CONNECTION LIMIT = connlimit
|
||||
IS_TEMPLATE = istemplate
|
||||
OID = oid;
|
||||
CREATE DATABASE name2;
|
||||
|
||||
-- CREATE DOMAIN
|
||||
CREATE DOMAIN domain_name AS data_type
|
||||
COLLATE col
|
||||
DEFAULT expr
|
||||
CONSTRAINT constraint_name NOT NULL
|
||||
NULL
|
||||
CHECK(
|
||||
VALUE ~ '^\d{5}$'
|
||||
OR VALUE ~ '^\d{5}-\d{4}$'
|
||||
);
|
||||
CREATE DOMAIN domain_name my_type;
|
||||
|
||||
-- CREATE EVENT TRIGGER
|
||||
CREATE EVENT TRIGGER trigger_name
|
||||
ON event_name
|
||||
WHEN TAG IN ('filter_value1', 'filter_value2') AND filter_variable IN ('filter_value1', 'filter_value2')
|
||||
EXECUTE PROCEDURE function_name();
|
||||
CREATE EVENT TRIGGER trigger_name
|
||||
ON event_name
|
||||
EXECUTE PROCEDURE function_name();
|
||||
|
||||
-- CREATE EXTENSION
|
||||
CREATE EXTENSION IF NOT EXISTS extension_name
|
||||
WITH SCHEMA schema_name
|
||||
VERSION version
|
||||
FROM old_version;
|
||||
CREATE EXTENSION extension_name1;
|
||||
|
||||
-- CREATE FOREIGN DATA WRAPPER
|
||||
CREATE FOREIGN DATA WRAPPER name1
|
||||
HANDLER handler_function
|
||||
VALIDATOR validator_function
|
||||
OPTIONS ( option_name 'value', option_name1 'value2');
|
||||
CREATE FOREIGN DATA WRAPPER name2
|
||||
NO HANDLER
|
||||
NO VALIDATOR
|
||||
OPTIONS ( option_name 'value');
|
||||
CREATE FOREIGN DATA WRAPPER name3;
|
||||
|
||||
-- CREATE FOREIGN TABLE
|
||||
CREATE FOREIGN TABLE IF NOT EXISTS table_name (column_name varchar(10) OPTIONS ( option_name1 'value', option_name2 'values') COLLATE coll_name CONSTRAINT constraint_name NOT NULL)
|
||||
SERVER server_name
|
||||
OPTIONS ( option_name1 'value', option_name2 'value3');
|
||||
CREATE FOREIGN TABLE films (
|
||||
code char(5) NOT NULL,
|
||||
title varchar(40) NOT NULL,
|
||||
did integer NOT NULL,
|
||||
date_prod date,
|
||||
kind varchar(10),
|
||||
len interval hour to minute
|
||||
)
|
||||
SERVER film_server;
|
||||
|
||||
-- CREATE FUNCTION
|
||||
CREATE OR REPLACE FUNCTION
|
||||
name ( INOUT argname int DEFAULT a>3)
|
||||
RETURNS integer
|
||||
AS 'obj_file'
|
||||
WITH (isStrict, isCachable);
|
||||
|
||||
-- CREATE GROUP
|
||||
CREATE GROUP group_name WITH SUPERUSER NOSUPERUSER CREATEDB NOCREATEDB
|
||||
CREATEROLE NOCREATEROLE
|
||||
INHERIT NOINHERIT
|
||||
LOGIN NOLOGIN
|
||||
REPLICATION NOREPLICATION
|
||||
BYPASSRLS NOBYPASSRLS
|
||||
CONNECTION LIMIT 234
|
||||
ENCRYPTED PASSWORD 'password'
|
||||
VALID UNTIL '2023-09-23'
|
||||
IN ROLE role_name, role_name1
|
||||
IN GROUP role_name
|
||||
ROLE role_name
|
||||
ADMIN role_name
|
||||
USER role_name
|
||||
SYSID 757;
|
||||
CREATE GROUP group_name WITH ENCRYPTED PASSWORD 'password';
|
||||
CREATE GROUP group_name;
|
||||
|
||||
-- CREATE INDEX
|
||||
CREATE UNIQUE INDEX CONCURRENTLY index_name ON table_name USING btree
|
||||
((a > 4) COLLATE collation_name ASC NULLS LAST )
|
||||
INCLUDE (column_name1, clou_2)
|
||||
NULLS NOT DISTINCT
|
||||
WITH ( storage_parameter = 1)
|
||||
TABLESPACE tablespace_name
|
||||
WHERE (y > 4);
|
||||
CREATE INDEX ON table_name (col1);
|
||||
|
||||
-- CREATE LANGUAGE
|
||||
CREATE OR REPLACE TRUSTED PROCEDURAL LANGUAGE lan_name1
|
||||
HANDLER call_handler INLINE inline_handler VALIDATOR valfunction;
|
||||
CREATE LANGUAGE name;
|
||||
CREATE LANGUAGE name HANDLER call_handler;
|
||||
|
||||
-- CREATE MATERIALIZED VIEW
|
||||
CREATE MATERIALIZED VIEW table_name
|
||||
(column_name, column_name2)
|
||||
WITH ( storage_parameter=3, storage_parameter1=4)
|
||||
TABLESPACE tablespace_name
|
||||
AS SELECT * FROM product
|
||||
WITH NO DATA;
|
||||
CREATE MATERIALIZED VIEW table_name2 AS SELECT * FROM product;
|
||||
|
||||
-- CREATE OPERATOR
|
||||
CREATE OPERATOR - (
|
||||
PROCEDURE = function_name,
|
||||
LEFTARG = left_type,
|
||||
RIGHTARG = right_type,
|
||||
COMMUTATOR = com_op,
|
||||
NEGATOR = neg_op,
|
||||
RESTRICT = res_proc,
|
||||
JOIN = join_proc,
|
||||
HASHES,
|
||||
MERGES
|
||||
);
|
||||
CREATE OPERATOR == (
|
||||
PROCEDURE = function_name
|
||||
);
|
||||
|
||||
-- CREATE OPERATOR CLASS
|
||||
CREATE OPERATOR CLASS op_class_name DEFAULT FOR TYPE type4
|
||||
USING index_method FAMILY family_name AS
|
||||
OPERATOR 2 = (arraytype, arraytype1) FOR ORDER BY sort_family_name,
|
||||
FUNCTION 4 (op_type1, op_type2 ) function_name ( argument_type, argument_type2 ),
|
||||
STORAGE storage_type;
|
||||
CREATE OPERATOR CLASS gist__int_ops
|
||||
FOR TYPE _int4 USING gist AS
|
||||
OPERATOR 3 &&;
|
||||
|
||||
-- CREATE OPERATOR FAMILY
|
||||
CREATE OPERATOR FAMILY name USING index_method;
|
||||
|
||||
-- CREATE POLICY
|
||||
CREATE POLICY name ON table_name
|
||||
AS PERMISSIVE
|
||||
FOR INSERT
|
||||
TO PUBLIC
|
||||
USING ( using_expression )
|
||||
WITH CHECK ( check_expression );
|
||||
CREATE POLICY name ON table_name;
|
||||
|
||||
-- CREATE PROCEDURE
|
||||
CREATE OR REPLACE PROCEDURE
|
||||
name ( IN argname int DEFAULT default_expr)
|
||||
LANGUAGE lang_name
|
||||
TRANSFORM FOR TYPE type_name
|
||||
EXTERNAL SECURITY INVOKER
|
||||
EXTERNAL SECURITY DEFINER
|
||||
SET configuration_parameter FROM CURRENT
|
||||
AS 'definition'
|
||||
AS 'obj_file', 'link_symbol'
|
||||
sql_body;
|
||||
|
||||
-- CREATE PUBLICATION
|
||||
CREATE PUBLICATION name
|
||||
FOR ALL TABLES
|
||||
WITH ( publication_parameter = value);
|
||||
CREATE PUBLICATION name;
|
||||
|
||||
-- CREATE ROLE
|
||||
CREATE ROLE name WITH SUPERUSER CREATEDB CREATEROLE
|
||||
CREATEUSER
|
||||
INHERIT
|
||||
LOGIN
|
||||
REPLICATION
|
||||
CONNECTION LIMIT 234
|
||||
ENCRYPTED PASSWORD 'password'
|
||||
VALID UNTIL '2013-09-20'
|
||||
IN ROLE role_name, role_name2
|
||||
IN GROUP role_name2, role_name3
|
||||
ROLE role_name3, role_name4
|
||||
ADMIN role_name4, role_name5
|
||||
USER role_name5, role_name6
|
||||
SYSID 234;
|
||||
|
||||
-- CREATE RULE
|
||||
CREATE OR REPLACE RULE name AS ON SELECT
|
||||
TO table_name WHERE y=3
|
||||
DO INSTEAD NOTHING;
|
||||
CREATE OR REPLACE RULE name AS ON SELECT
|
||||
TO table_name WHERE y=3
|
||||
DO ALSO (SELECT bb FROM prod);
|
||||
CREATE RULE rule_name AS ON UPDATE TO table_name DO NOTHING;
|
||||
|
||||
-- CREATE SCHEMA
|
||||
CREATE SCHEMA schema_name AUTHORIZATION user_name CREATE TABLE films (title text, release date, awards text[]) CREATE VIEW winners AS
|
||||
SELECT title, release FROM films WHERE awards IS NOT NULL;
|
||||
CREATE SCHEMA AUTHORIZATION user_name CREATE TABLE films (title text, release date, awards text[]);
|
||||
CREATE SCHEMA IF NOT EXISTS schema_name AUTHORIZATION user_name;
|
||||
CREATE SCHEMA IF NOT EXISTS AUTHORIZATION user_name;
|
||||
|
||||
-- CREATE SEQUENCE
|
||||
CREATE TEMPORARY SEQUENCE squen_name INCREMENT BY 2432 MINVALUE 45 MAXVALUE 12
|
||||
START WITH 4654 CACHE 1232 NO CYCLE
|
||||
OWNED BY table_name.column_name;
|
||||
CREATE SEQUENCE squen_name;
|
||||
|
||||
-- CREATE SERVER
|
||||
CREATE SERVER server_name TYPE 'server_type' VERSION 'server_version'
|
||||
FOREIGN DATA WRAPPER fdw_name
|
||||
OPTIONS ( option 'value', option 'value3');
|
||||
CREATE SERVER server_name FOREIGN DATA WRAPPER fdw_name;
|
||||
|
||||
-- CREATE STATISTICS
|
||||
CREATE STATISTICS IF NOT EXISTS statistics_name
|
||||
ON ( expression )
|
||||
FROM table_name;
|
||||
CREATE STATISTICS IF NOT EXISTS statistics_name
|
||||
( statistics_kind )
|
||||
ON column_name, ( expression )
|
||||
FROM table_name;
|
||||
CREATE STATISTICS ON column_name, column_name FROM table_name;
|
||||
|
||||
-- CREATE SUBSCRIPTION
|
||||
CREATE SUBSCRIPTION subscription_name
|
||||
CONNECTION 'conninfo'
|
||||
PUBLICATION publication_name, publication_name1
|
||||
WITH ( subscription_parameter = value, subscription_parameter = value);
|
||||
CREATE SUBSCRIPTION subscription_name
|
||||
CONNECTION 'conninfo'
|
||||
PUBLICATION publication_name;
|
||||
|
||||
-- CREATE TABLE
|
||||
CREATE UNLOGGED TABLE IF NOT EXISTS table_name (
|
||||
column_name int STORAGE PLAIN COMPRESSION compression_method COLLATE collation_name NOT NULL,
|
||||
CONSTRAINT constraint_name
|
||||
CHECK ( expression>3 ) NO INHERIT ,
|
||||
LIKE source_table
|
||||
)
|
||||
INHERITS ( parent_table, parent_table)
|
||||
PARTITION BY RANGE ( column_name COLLATE collation_name opclass)
|
||||
USING method
|
||||
WITH ( storage_parameter = value)
|
||||
ON COMMIT PRESERVE ROWS
|
||||
TABLESPACE tablespace_name;
|
||||
CREATE TABLE table_name (column_name int);
|
||||
CREATE GLOBAL TEMPORARY TABLE table_name
|
||||
OF int ( column_name WITH OPTIONS GENERATED ALWAYS AS ( generation_expr ) STORED
|
||||
)
|
||||
PARTITION BY HASH ( ( expression>3 ) COLLATE collation_name opclass)
|
||||
USING method
|
||||
WITH ( storage_parameter = value )
|
||||
ON COMMIT PRESERVE ROWS
|
||||
TABLESPACE tablespace_name;
|
||||
CREATE TABLE table_name OF type_name;
|
||||
CREATE TABLE table_name
|
||||
PARTITION OF parent_table (
|
||||
column_name WITH OPTIONS NOT NULL
|
||||
NULL
|
||||
CHECK ( expression ) NO INHERIT
|
||||
DEFAULT default_expr
|
||||
GENERATED ALWAYS AS ( generation_expr ) STORED
|
||||
GENERATED BY DEFAULT AS IDENTITY ( AS data_type )
|
||||
UNIQUE NULLS NOT DISTINCT INCLUDE ( column_name )
|
||||
PRIMARY KEY WITH ( storage_parameter = value )
|
||||
REFERENCES reftable ( refcolumn ) MATCH FULL
|
||||
ON DELETE NO ACTION ON UPDATE RESTRICT
|
||||
DEFERRABLE INITIALLY DEFERRED
|
||||
) FOR VALUES FROM (MINVALUE, x>3) TO (MAXVALUE,MAXVALUE);
|
||||
|
||||
-- CREATE TABLE AS
|
||||
CREATE GLOBAL TEMPORARY TABLE table_name
|
||||
(column_name, column_name2)
|
||||
WITH ( storage_parameter = 4)
|
||||
ON COMMIT PRESERVE ROWS
|
||||
TABLESPACE tablespace_name
|
||||
AS SELECT * FROM ad
|
||||
WITH NO DATA;
|
||||
CREATE TABLE table_name AS SELECT * FROM ad;
|
||||
|
||||
-- CREATE TABLESPACE
|
||||
CREATE TABLESPACE tablespace_name OWNER user_name LOCATION 'directory';
|
||||
CREATE TABLESPACE tablespace_name LOCATION 'directory';
|
||||
|
||||
-- CREATE TEXT SEARCH CONFIGURATION
|
||||
CREATE TEXT SEARCH CONFIGURATION name (
|
||||
PARSER = parser_name
|
||||
);
|
||||
CREATE TEXT SEARCH CONFIGURATION name (
|
||||
COPY = source_config
|
||||
);
|
||||
|
||||
-- CREATE TEXT SEARCH DICTIONARY
|
||||
CREATE TEXT SEARCH DICTIONARY name (
|
||||
TEMPLATE = template
|
||||
);
|
||||
|
||||
-- CREATE TEXT SEARCH PARSER
|
||||
CREATE TEXT SEARCH PARSER name (
|
||||
START = start_function ,
|
||||
GETTOKEN = gettoken_function ,
|
||||
END = end_function ,
|
||||
LEXTYPES = lextypes_function,
|
||||
HEADLINE = headline_function
|
||||
);
|
||||
CREATE TEXT SEARCH PARSER name (
|
||||
START = start_function ,
|
||||
GETTOKEN = gettoken_function ,
|
||||
END = end_function ,
|
||||
LEXTYPES = lextypes_function
|
||||
);
|
||||
|
||||
-- CREATE TEXT SEARCH TEMPLATE
|
||||
CREATE TEXT SEARCH TEMPLATE name (
|
||||
INIT = init_function,
|
||||
LEXIZE = lexize_function
|
||||
);
|
||||
CREATE TEXT SEARCH TEMPLATE name (
|
||||
LEXIZE = lexize_function
|
||||
);
|
||||
|
||||
-- CREATE TRANSFORM
|
||||
CREATE OR REPLACE TRANSFORM FOR type_name LANGUAGE lang_name (
|
||||
FROM SQL WITH FUNCTION from_sql_function_name (argument_type),
|
||||
TO SQL WITH FUNCTION to_sql_function_name (argument_type )
|
||||
);
|
||||
|
||||
-- CREATE TRIGGER
|
||||
CREATE OR REPLACE CONSTRAINT TRIGGER trig_name INSTEAD OF INSERT OR UPDATE
|
||||
ON table_name
|
||||
FROM referenced_table_name
|
||||
DEFERRABLE INITIALLY IMMEDIATE
|
||||
FOR EACH STATEMENT
|
||||
WHEN (OLD.balance IS DISTINCT FROM NEW.balance)
|
||||
EXECUTE PROCEDURE function_name ();
|
||||
|
||||
-- CREATE TYPE
|
||||
CREATE TYPE name AS
|
||||
(attribute_name int COLLATE collation_name, attribute_name integer COLLATE collation_name);
|
||||
|
||||
CREATE TYPE name AS ENUM
|
||||
('label', 'name');
|
||||
|
||||
CREATE TYPE name AS RANGE (
|
||||
SUBTYPE = subtype,
|
||||
SUBTYPE_OPCLASS = subtype_operator_class,
|
||||
COLLATION = collation,
|
||||
CANONICAL = canonical_function,
|
||||
SUBTYPE_DIFF = subtype_diff_function
|
||||
);
|
||||
CREATE TYPE name (
|
||||
INPUT = input_function,
|
||||
OUTPUT = output_function,
|
||||
RECEIVE = receive_function,
|
||||
SEND = send_function,
|
||||
TYPMOD_IN = type_modifier_input_function,
|
||||
TYPMOD_OUT = type_modifier_output_function,
|
||||
ANALYZE = analyze_function,
|
||||
INTERNALLENGTH = 13,
|
||||
PASSEDBYVALUE,
|
||||
ALIGNMENT = alignment,
|
||||
STORAGE = storage,
|
||||
LIKE = like_type,
|
||||
CATEGORY = category,
|
||||
PREFERRED = preferred,
|
||||
DEFAULT = default_value,
|
||||
ELEMENT = float4,
|
||||
DELIMITER = delimiter,
|
||||
COLLATABLE = collatable
|
||||
);
|
||||
CREATE TYPE name;
|
||||
|
||||
-- CREATE USER
|
||||
CREATE USER name WITH NOSUPERUSER NOCREATEDB NOCREATEROLE
|
||||
NOCREATEUSER
|
||||
NOINHERIT
|
||||
NOLOGIN
|
||||
NOREPLICATION
|
||||
CONNECTION LIMIT 234
|
||||
UNENCRYPTED PASSWORD 'password'
|
||||
VALID UNTIL '2013-09-20'
|
||||
IN ROLE role_name, role_name2
|
||||
IN GROUP role_name2, role_name3
|
||||
ROLE role_name3, role_name4
|
||||
ADMIN role_name4, role_name5
|
||||
USER role_name5, role_name6
|
||||
SYSID 234;
|
||||
|
||||
-- CREATE USER MAPPING
|
||||
CREATE USER MAPPING FOR CURRENT_USER
|
||||
SERVER server_name
|
||||
OPTIONS ( option 'value');
|
||||
CREATE USER MAPPING FOR PUBLIC
|
||||
SERVER server_name;
|
||||
|
||||
-- CREATE VIEW
|
||||
CREATE OR REPLACE TEMP RECURSIVE VIEW name ( column_name, column_name2)
|
||||
WITH ( view_option_name = 2321, view_option_name2='ewfwe')
|
||||
AS SELECT * FROM my_view;
|
||||
CREATE VIEW view_name AS SELECT * FROM my_view;
|
||||
|
13
test/parser/postgresql/syntax/fixtures/delete.sql
Normal file
13
test/parser/postgresql/syntax/fixtures/delete.sql
Normal file
@ -0,0 +1,13 @@
|
||||
DELETE FROM weather WHERE city = 'Hayward';
|
||||
|
||||
DELETE FROM products
|
||||
WHERE obsoletion_date = 'today'
|
||||
RETURNING ab AS abc_name;
|
||||
|
||||
WITH RECURSIVE a AS (SELECT * from bt )
|
||||
DELETE FROM ONLY table_name * AS alias
|
||||
USING using_list
|
||||
WHERE y > 4
|
||||
RETURNING *;
|
||||
|
||||
DELETE FROM table_name;
|
177
test/parser/postgresql/syntax/fixtures/drop.sql
Normal file
177
test/parser/postgresql/syntax/fixtures/drop.sql
Normal file
@ -0,0 +1,177 @@
|
||||
-- Drop Table
|
||||
DROP TABLE IF EXISTS table1, table2 RESTRICT;
|
||||
DROP TABLE products CASCADE;
|
||||
DROP TABLE products1;
|
||||
|
||||
-- DROP ACCESS METHOD
|
||||
DROP ACCESS METHOD IF EXISTS name CASCADE;
|
||||
DROP ACCESS METHOD name;
|
||||
|
||||
-- DROP AGGREGATE
|
||||
DROP AGGREGATE IF EXISTS aggname1(int, integer), aggname2(IN argname integer) CASCADE;
|
||||
DROP AGGREGATE aggname2(int);
|
||||
|
||||
-- DROP CAST
|
||||
DROP CAST IF EXISTS (integer AS int) RESTRICT;
|
||||
DROP CAST (int AS integer);
|
||||
|
||||
-- DROP COLLATION
|
||||
DROP COLLATION IF EXISTS collation_name CASCADE;
|
||||
DROP COLLATION collation_name1;
|
||||
|
||||
-- DROP CONVERSION
|
||||
DROP CONVERSION IF EXISTS conver_name RESTRICT;
|
||||
DROP CONVERSION conver_name1;
|
||||
|
||||
-- DROP DATABASE
|
||||
DROP DATABASE IF EXISTS db_name WITH (FORCE);
|
||||
DROP DATABASE db_name;
|
||||
|
||||
-- DROP DOMAIN
|
||||
DROP DOMAIN IF EXISTS domain_name1, domain_name2 CASCADE;
|
||||
DROP DOMAIN domain_name3;
|
||||
|
||||
-- DROP EVENT TRIGGER
|
||||
DROP EVENT TRIGGER IF EXISTS trigger_name RESTRICT;
|
||||
DROP EVENT TRIGGER trigger_name1;
|
||||
|
||||
-- DROP EXTENSION
|
||||
DROP EXTENSION IF EXISTS extension_name1, extension_name2 CASCADE;
|
||||
DROP EXTENSION extension_name3;
|
||||
|
||||
-- DROP FOREIGN DATA WRAPPER
|
||||
DROP FOREIGN DATA WRAPPER IF EXISTS foreigndata_name RESTRICT;
|
||||
DROP FOREIGN DATA WRAPPER foreigndata_name2;
|
||||
|
||||
-- DROP FOREIGN TABLE
|
||||
DROP FOREIGN TABLE IF EXISTS foreigntable_name1, foreigntable_name2 CASCADE;
|
||||
DROP FOREIGN TABLE foreigntable_name3;
|
||||
|
||||
-- DROP FUNCTION
|
||||
DROP FUNCTION IF EXISTS function_name (IN argname integer) RESTRICT;
|
||||
DROP FUNCTION function_name (integer);
|
||||
|
||||
-- DROP GROUP
|
||||
DROP GROUP IF EXISTS group_name1, group_name2;
|
||||
DROP GROUP group_name3;
|
||||
|
||||
-- DROP INDEX
|
||||
DROP INDEX CONCURRENTLY IF EXISTS index_name1, index_name2 CASCADE;
|
||||
DROP INDEX index_name3;
|
||||
|
||||
-- DROP LANGUAGE
|
||||
DROP PROCEDURAL LANGUAGE IF EXISTS lan_name RESTRICT;
|
||||
DROP LANGUAGE lan_name1;
|
||||
|
||||
-- DROP MATERIALIZED VIEW
|
||||
DROP MATERIALIZED VIEW IF EXISTS view_name1, view_name2 CASCADE;
|
||||
DROP MATERIALIZED VIEW view_name3;
|
||||
|
||||
-- DROP OPERATOR
|
||||
DROP OPERATOR IF EXISTS ! ( NONE , int ) RESTRICT;
|
||||
DROP OPERATOR - (int, int);
|
||||
DROP OPERATOR ^ (int, int);
|
||||
|
||||
-- DROP OPERATOR CLASS
|
||||
DROP OPERATOR CLASS IF EXISTS classs_name USING btree CASCADE;
|
||||
DROP OPERATOR CLASS classs_name USING index_method;
|
||||
|
||||
-- DROP OPERATOR FAMILY
|
||||
DROP OPERATOR FAMILY IF EXISTS family_name USING index_method RESTRICT;
|
||||
DROP OPERATOR FAMILY family_name1 USING index_method;
|
||||
|
||||
-- DROP OWNED
|
||||
DROP OWNED BY owner_name1, CURRENT_ROLE CASCADE;
|
||||
DROP OWNED BY CURRENT_USER;
|
||||
|
||||
-- DROP POLICY
|
||||
DROP POLICY IF EXISTS name ON table_name RESTRICT;
|
||||
DROP POLICY name1 ON table_name1;
|
||||
|
||||
-- DROP PROCEDURE
|
||||
DROP PROCEDURE IF EXISTS name( IN argname int, OUT argname integer), name1( OUT argname int, OUT argname int) CASCADE;
|
||||
DROP PROCEDURE name1;
|
||||
|
||||
-- DROP PUBLICATION
|
||||
DROP PUBLICATION IF EXISTS name CASCADE;
|
||||
DROP PUBLICATION name;
|
||||
|
||||
-- DROP ROLE
|
||||
DROP ROLE IF EXISTS role_name1, role_name2;
|
||||
DROP ROLE role_name3;
|
||||
|
||||
-- DROP ROUTINE
|
||||
DROP ROUTINE IF EXISTS name (IN argname int, OUT argname integer),name1( OUT argname int, OUT argname int) RESTRICT;
|
||||
DROP ROUTINE name1;
|
||||
|
||||
-- DROP RULE
|
||||
DROP RULE IF EXISTS rule_name ON table_name CASCADE;
|
||||
DROP RULE rule_name1 ON table_name1;
|
||||
|
||||
-- DROP SCHEMA
|
||||
DROP SCHEMA IF EXISTS schema_name1, schema_name2 RESTRICT;
|
||||
DROP SCHEMA myschema;
|
||||
|
||||
-- DROP SEQUENCE
|
||||
DROP SEQUENCE IF EXISTS con_name1, con_name2 CASCADE;
|
||||
DROP SEQUENCE con_name3;
|
||||
|
||||
-- DROP SERVER
|
||||
DROP SERVER IF EXISTS server_name RESTRICT;
|
||||
DROP SERVER server_name1;
|
||||
|
||||
-- DROP STATISTICS
|
||||
DROP STATISTICS IF EXISTS name, name1 CASCADE;
|
||||
DROP STATISTICS name1;
|
||||
|
||||
-- DROP SUBSCRIPTION
|
||||
DROP SUBSCRIPTION IF EXISTS name RESTRICT;
|
||||
DROP SUBSCRIPTION name1;
|
||||
|
||||
-- DROP TABLESPACE
|
||||
DROP TABLESPACE IF EXISTS tbspace_name1;
|
||||
DROP TABLESPACE tbspace_name2;
|
||||
|
||||
-- DROP TEXT SEARCH CONFIGURATION
|
||||
DROP TEXT SEARCH CONFIGURATION IF EXISTS text_name CASCADE;
|
||||
DROP TEXT SEARCH CONFIGURATION text_name1;
|
||||
|
||||
-- DROP TEXT SEARCH DICTIONARY
|
||||
DROP TEXT SEARCH DICTIONARY IF EXISTS dic_name1 RESTRICT;
|
||||
DROP TEXT SEARCH DICTIONARY dic_name2;
|
||||
|
||||
-- DROP TEXT SEARCH PARSER
|
||||
DROP TEXT SEARCH PARSER IF EXISTS parser_name1 CASCADE;
|
||||
DROP TEXT SEARCH PARSER parser_name2;
|
||||
|
||||
-- DROP TEXT SEARCH TEMPLATE
|
||||
DROP TEXT SEARCH TEMPLATE IF EXISTS temp_name1 RESTRICT;
|
||||
DROP TEXT SEARCH TEMPLATE temp_name2;
|
||||
|
||||
-- DROP TRANSFORM
|
||||
DROP TRANSFORM IF EXISTS FOR type_name LANGUAGE lang_name RESTRICT;
|
||||
DROP TRANSFORM FOR type_name LANGUAGE lang_name;
|
||||
|
||||
-- DROP TRIGGER
|
||||
DROP TRIGGER IF EXISTS trigger_name1 ON table_name1 CASCADE;
|
||||
DROP TRIGGER trigger_name2 ON table_name2;
|
||||
|
||||
-- DROP TYPE
|
||||
DROP TYPE IF EXISTS type_name1, type_name2 RESTRICT;
|
||||
DROP TYPE type_name3;
|
||||
|
||||
-- DROP USER
|
||||
DROP USER IF EXISTS user_name1, user_name2;
|
||||
DROP USER user_name3;
|
||||
|
||||
-- DROP USER MAPPING
|
||||
DROP USER MAPPING IF EXISTS FOR user_name SERVER server_name;
|
||||
DROP USER MAPPING IF EXISTS FOR USER SERVER server_name;
|
||||
DROP USER MAPPING IF EXISTS FOR CURRENT_USER SERVER server_name;
|
||||
DROP USER MAPPING IF EXISTS FOR CURRENT_ROLE SERVER server_name;
|
||||
DROP USER MAPPING IF EXISTS FOR PUBLIC SERVER server_name;
|
||||
DROP USER MAPPING FOR PUBLIC SERVER server_name;
|
||||
|
||||
-- DROP VIEW
|
||||
DROP VIEW IF EXISTS view_name1, view_name2 RESTRICT;
|
||||
DROP VIEW view_name3;
|
37
test/parser/postgresql/syntax/fixtures/insert.sql
Normal file
37
test/parser/postgresql/syntax/fixtures/insert.sql
Normal file
@ -0,0 +1,37 @@
|
||||
-- Inserting Data
|
||||
INSERT INTO weather VALUES ('San Francisco', 46, 50, 0.25, '1994-11-27');
|
||||
|
||||
INSERT INTO weather (city, temp_lo, temp_hi, prcp, date)
|
||||
VALUES ('San Francisco', 43, 57, 0.0, '1994-11-29');
|
||||
|
||||
INSERT INTO weather (date, city, temp_hi, temp_lo)
|
||||
VALUES ('1994-11-29', 'Hayward', 54, 37);
|
||||
|
||||
INSERT INTO products (product_no, name, price)
|
||||
SELECT product_no, name, price FROM new_products
|
||||
WHERE release_date = 'today';
|
||||
|
||||
INSERT INTO products DEFAULT VALUES;
|
||||
|
||||
INSERT INTO cities (name, population, altitude, state)
|
||||
VALUES ('New York', NULL, NULL, 'NY');
|
||||
|
||||
-- Boolean Type
|
||||
INSERT INTO test1 VALUES (FALSE, TRUE, 'non est', 't', 'true', 'y', 'on', '1', 'f', 'false', 'n', 'no', 'off', 0);
|
||||
|
||||
-- Arrays -- . Array Value Input
|
||||
INSERT INTO sal_emp
|
||||
VALUES ('Bill',
|
||||
'{10000, 10000, 10000, 10000}',
|
||||
'{{"meeting", "lunch"}, {"training", "presentation"}}');
|
||||
|
||||
-- Range Types
|
||||
INSERT INTO reservation VALUES
|
||||
(1108, '[2010-01-01 14:30, 2010-01-01 15:30)');
|
||||
|
||||
-- INSERT
|
||||
WITH RECURSIVE a AS (SELECT * from bt )
|
||||
INSERT INTO table_name.dt ( column_name, column_name2)
|
||||
VALUES (1, 2)
|
||||
RETURNING * ;
|
||||
INSERT INTO films DEFAULT VALUES;
|
321
test/parser/postgresql/syntax/fixtures/others.sql
Normal file
321
test/parser/postgresql/syntax/fixtures/others.sql
Normal file
@ -0,0 +1,321 @@
|
||||
-- ABORT
|
||||
ABORT WORK;
|
||||
ABORT TRANSACTION;
|
||||
ABORT AND NO CHAIN;
|
||||
|
||||
-- ANALYZE
|
||||
ANALYZE VERBOSE table_name ( column_name, column_name2);
|
||||
ANALYZE VERBOSE;
|
||||
ANALYZE (VERBOSE false);
|
||||
ANALYZE (SKIP_LOCKED true);
|
||||
ANALYZE (BUFFER_USAGE_LIMIT 4);
|
||||
ANALYZE (SKIP_LOCKED false, SKIP_LOCKED false, BUFFER_USAGE_LIMIT '4KB');
|
||||
ANALYZE (SKIP_LOCKED false, SKIP_LOCKED false, BUFFER_USAGE_LIMIT '4KB') table_name ( column_name, column_name2);
|
||||
ANALYZE;
|
||||
|
||||
-- BEGIN
|
||||
BEGIN WORK ISOLATION LEVEL READ UNCOMMITTED
|
||||
READ WRITE
|
||||
NOT DEFERRABLE;
|
||||
|
||||
-- CALL
|
||||
CALL name (name => value);
|
||||
CALL name;
|
||||
|
||||
-- CHECKPOINT
|
||||
CHECKPOINT;
|
||||
|
||||
-- CLUSTER
|
||||
CLUSTER VERBOSE table_name USING index_name;
|
||||
CLUSTER (VERBOSE, VERBOSE TRUE) table_name USING index_name;
|
||||
CLUSTER VERBOSE;
|
||||
CLUSTER;
|
||||
|
||||
-- CLOSE
|
||||
CLOSE ALL;
|
||||
CLOSE name_2;
|
||||
|
||||
-- COMMENT
|
||||
COMMENT ON
|
||||
ACCESS METHOD object_name IS 'text';
|
||||
COMMENT ON
|
||||
AGGREGATE agg_name (agg_type, agg_type2) IS 'text';
|
||||
COMMENT ON CAST (source_type AS target_type) IS 'text';
|
||||
COMMENT ON COLLATION object_name IS 'text';
|
||||
COMMENT ON COLUMN relation_name.column_name IS 'text';
|
||||
COMMENT ON CONSTRAINT constraint_name ON table_name IS 'text'
|
||||
COMMENT ON CONSTRAINT constraint_name ON DOMAIN domain_name IS 'text'
|
||||
COMMENT ON CONVERSION object_name IS 'text';
|
||||
COMMENT ON CONSTRAINT constraint_name ON table_name IS 'text';
|
||||
COMMENT ON DATABASE object_name IS 'text';
|
||||
COMMENT ON DOMAIN object_name IS 'text';
|
||||
COMMENT ON EXTENSION object_name IS 'text';
|
||||
COMMENT ON EVENT TRIGGER object_name IS 'text';
|
||||
COMMENT ON FOREIGN DATA WRAPPER object_name IS 'text';
|
||||
COMMENT ON FOREIGN TABLE object_name IS 'text';
|
||||
COMMENT ON FUNCTION function_name ( INOUT argname timestamp) IS 'text';
|
||||
COMMENT ON INDEX object_name IS 'text';
|
||||
COMMENT ON LARGE OBJECT 346344 IS 'text';
|
||||
COMMENT ON MATERIALIZED VIEW object_name IS 'text';
|
||||
COMMENT ON OPERATOR -(int, NONE) IS 'text';
|
||||
COMMENT ON OPERATOR CLASS object_name USING index_method IS 'text';
|
||||
COMMENT ON OPERATOR FAMILY object_name USING index_method IS 'text';
|
||||
COMMENT ON POLICY policy_name ON table_name IS 'text';
|
||||
COMMENT ON PROCEDURAL LANGUAGE object_name IS 'text';
|
||||
COMMENT ON PROCEDURE procedure_name IS 'text';
|
||||
COMMENT ON PUBLICATION object_name IS 'text';
|
||||
COMMENT ON ROLE object_name IS 'text';
|
||||
COMMENT ON ROUTINE routine_name IS 'text';
|
||||
COMMENT ON RULE rule_name ON table_name IS 'text';
|
||||
COMMENT ON SCHEMA object_name IS 'text';
|
||||
COMMENT ON SEQUENCE object_name IS 'text';
|
||||
COMMENT ON SERVER object_name IS 'text';
|
||||
COMMENT ON STATISTICS object_name IS 'text';
|
||||
COMMENT ON SUBSCRIPTION object_name IS 'text';
|
||||
COMMENT ON TABLE object_name IS 'text';
|
||||
COMMENT ON TABLESPACE object_name IS 'text';
|
||||
COMMENT ON TEXT SEARCH CONFIGURATION object_name IS 'text';
|
||||
COMMENT ON TEXT SEARCH DICTIONARY object_name IS 'text';
|
||||
COMMENT ON TEXT SEARCH PARSER object_name IS 'text';
|
||||
COMMENT ON TEXT SEARCH TEMPLATE object_name IS 'text';
|
||||
COMMENT ON TRIGGER trigger_name ON table_name IS 'text';
|
||||
COMMENT ON TYPE object_name IS 'text';
|
||||
COMMENT ON VIEW object_name IS 'text';
|
||||
|
||||
-- COMMIT
|
||||
COMMIT TRANSACTION;
|
||||
COMMIT WORK;
|
||||
COMMIT AND NO CHAIN;
|
||||
|
||||
-- COMMIT PREPARED
|
||||
COMMIT PREPARED 'foobar';
|
||||
|
||||
-- COPY
|
||||
COPY table_name ( column_name, column_name2)
|
||||
FROM PROGRAM 'command'
|
||||
WITH ( FORMAT format_name);
|
||||
COPY (SELECT * FROM td)
|
||||
TO STDOUT
|
||||
WITH (DELIMITER 'delimiter_character');
|
||||
|
||||
|
||||
-- DEALLOCATE
|
||||
DEALLOCATE PREPARE name;
|
||||
DEALLOCATE PREPARE ALL;
|
||||
|
||||
-- DECLARE
|
||||
DECLARE name BINARY INSENSITIVE NO SCROLL CURSOR WITH HOLD FOR
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
tb;
|
||||
|
||||
DECLARE name CURSOR FOR
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
abs;
|
||||
|
||||
-- DISCARD
|
||||
DISCARD TEMPORARY;
|
||||
DISCARD PLANS;
|
||||
DISCARD ALL;
|
||||
DISCARD TEMP;
|
||||
|
||||
-- DO
|
||||
DO LANGUAGE lang_name '$$DECLARE';
|
||||
DO '$$DECLARE';
|
||||
|
||||
-- END
|
||||
END TRANSACTION;
|
||||
END WORK;
|
||||
|
||||
-- EXECUTE
|
||||
EXECUTE name ( parameter, parameter2);
|
||||
|
||||
-- EXPLAIN
|
||||
EXPLAIN ( ANALYZE 'true',VERBOSE true, COSTS TRUE, FORMAT TEXT) SELECT * FROM no_nw;
|
||||
EXPLAIN ANALYZE VERBOSE SELECT * FROM no_nw;
|
||||
EXPLAIN SELECT * FROM no_nw;
|
||||
|
||||
-- FETCH
|
||||
FETCH NEXT FROM cursor_name;
|
||||
|
||||
-- GRANT
|
||||
GRANT SELECT, INSERT, UPDATE, DELETE, TRUNCATE, REFERENCES, TRIGGER
|
||||
ON TABLE table_name, table_name2
|
||||
TO GROUP role_name,PUBLIC WITH GRANT OPTION;
|
||||
GRANT ALL PRIVILEGES ON ALL TABLES IN SCHEMA schema_name, schema_name2 TO PUBLIC;
|
||||
GRANT SELECT( column_name, column_name2), INSERT( column_name, column_name2), UPDATE( column_name, column_name2), REFERENCES ( column_name, column_name2)
|
||||
ON TABLE table_name
|
||||
TO GROUP role_name WITH GRANT OPTION;
|
||||
GRANT ALL PRIVILEGES ( column_name, column_name2) ON table_name TO role_name;
|
||||
GRANT USAGE, SELECT, UPDATE
|
||||
ON SEQUENCE sequence_name
|
||||
TO GROUP role_name, PUBLIC WITH GRANT OPTION;
|
||||
GRANT ALL PRIVILEGES
|
||||
ON ALL SEQUENCES IN SCHEMA schema_name
|
||||
TO PUBLIC WITH GRANT OPTION;
|
||||
GRANT CREATE, CONNECT, TEMPORARY, TEMP
|
||||
ON DATABASE database_name
|
||||
TO GROUP role_name, PUBLIC WITH GRANT OPTION;
|
||||
GRANT role_name TO role_name;
|
||||
|
||||
-- IMPORT FOREIGN SCHEMA
|
||||
IMPORT FOREIGN SCHEMA remote_schema
|
||||
LIMIT TO ( table_name)
|
||||
FROM SERVER server_name
|
||||
INTO local_schema
|
||||
OPTIONS ( option 'value');
|
||||
|
||||
-- LISTEN
|
||||
LISTEN channel;
|
||||
|
||||
-- LOAD
|
||||
LOAD 'filename';
|
||||
|
||||
-- LOCK
|
||||
LOCK TABLE ONLY name * IN ACCESS SHARE MODE NOWAIT;
|
||||
|
||||
-- MOVE
|
||||
MOVE NEXT FROM cursor_name;
|
||||
|
||||
-- MERGE
|
||||
WITH query_name (id) AS (SELECT id FROM table_expression)
|
||||
MERGE INTO ONLY target_table_name * AS target_alias
|
||||
USING ONLY source_table_name * ON s.winename = w.winename
|
||||
WHEN MATCHED AND s.winename = w.winename THEN UPDATE SET column_name = stock + 3
|
||||
WHEN NOT MATCHED AND stock_delta + stock > 0 THEN INSERT ( column_name) OVERRIDING SYSTEM VALUE VALUES (s.winename)
|
||||
WHEN MATCHED THEN DELETE;
|
||||
|
||||
-- NOTIFY
|
||||
NOTIFY virtual, 'This is the payload';
|
||||
|
||||
-- PREPARE
|
||||
PREPARE name ( int, numeric) AS INSERT INTO foo VALUES($1, $2, $3, $4);
|
||||
|
||||
-- PREPARE TRANSACTION
|
||||
PREPARE TRANSACTION 'foobar';
|
||||
|
||||
-- REASSIGN OWNED
|
||||
REASSIGN OWNED BY old_role TO new_role;
|
||||
|
||||
-- REFRESH MATERIALIZED VIEW
|
||||
REFRESH MATERIALIZED VIEW name WITH NO DATA;
|
||||
|
||||
-- REINDEX
|
||||
REINDEX DATABASE CONCURRENTLY dbname;
|
||||
REINDEX TABLE tbname;
|
||||
REINDEX INDEX indexname;
|
||||
REINDEX SYSTEM sysname;
|
||||
|
||||
-- RELEASE SAVEPOINT
|
||||
RELEASE SAVEPOINT savepoint_name;
|
||||
|
||||
-- RESET
|
||||
RESET configuration_parameter;
|
||||
RESET ALL;
|
||||
|
||||
-- REVOKE
|
||||
REVOKE GRANT OPTION FOR
|
||||
REFERENCES, CREATE
|
||||
ON TABLE table_name
|
||||
FROM GROUP role_name, PUBLIC, SESSION_USER
|
||||
RESTRICT;
|
||||
REVOKE ALL PRIVILEGES ON accounts FROM CURRENT_USER;
|
||||
REVOKE CREATE ON SCHEMA public_name FROM CURRENT_ROLE;
|
||||
|
||||
-- ROLLBACK
|
||||
ROLLBACK TRANSACTION AND NO CHAIN;
|
||||
ROLLBACK WORK;
|
||||
|
||||
-- ROLLBACK PREPARED
|
||||
ROLLBACK PREPARED 'foobar';
|
||||
|
||||
-- ROLLBACK TO SAVEPOINT
|
||||
ROLLBACK TRANSACTION TO SAVEPOINT savepoint_name;
|
||||
ROLLBACK WORK TO SAVEPOINT savepoint_name;
|
||||
ROLLBACK TO savepoint_name;
|
||||
|
||||
-- SAVEPOINT
|
||||
SAVEPOINT savepoint_name;
|
||||
|
||||
-- SECURITY LABEL
|
||||
SECURITY LABEL FOR provider ON TABLE object_name IS 'label';
|
||||
SECURITY LABEL FOR provider ON COLUMN table_name.column_name IS 'label';
|
||||
SECURITY LABEL FOR provider ON AGGREGATE agg_name (agg_type, agg_type2) IS 'label';
|
||||
SECURITY LABEL FOR provider ON DATABASE object_name IS 'label';
|
||||
SECURITY LABEL FOR provider ON DOMAIN object_name IS 'label';
|
||||
SECURITY LABEL FOR provider ON EVENT TRIGGER object_name IS 'label';
|
||||
SECURITY LABEL FOR provider ON FOREIGN TABLE object_name IS 'label';
|
||||
SECURITY LABEL FOR provider ON FUNCTION function_name ( VARIADIC arg_name timestamp) IS 'label';
|
||||
SECURITY LABEL FOR provider ON LARGE OBJECT 2432 IS 'label';
|
||||
SECURITY LABEL FOR provider ON MATERIALIZED VIEW object_name IS 'label';
|
||||
SECURITY LABEL FOR provider ON PROCEDURAL LANGUAGE object_name IS 'label';
|
||||
SECURITY LABEL FOR provider ON ROLE object_name IS 'label';
|
||||
SECURITY LABEL FOR provider ON SCHEMA object_name IS 'label';
|
||||
SECURITY LABEL FOR provider ON SEQUENCE object_name IS 'label';
|
||||
SECURITY LABEL FOR provider ON TABLESPACE object_name IS 'label';
|
||||
SECURITY LABEL FOR provider ON TYPE object_name IS 'label';
|
||||
SECURITY LABEL FOR provider ON VIEW object_name IS 'label';
|
||||
|
||||
-- SET
|
||||
SET SESSION configuration_parameter TO DEFAULT;
|
||||
SET LOCAL TIME ZONE LOCAL;
|
||||
|
||||
-- SET CONSTRAINTS
|
||||
SET CONSTRAINTS ALL IMMEDIATE;
|
||||
SET CONSTRAINTS name1, name2 DEFERRED;
|
||||
|
||||
-- SET ROLE
|
||||
SET SESSION ROLE role_name;
|
||||
SET LOCAL ROLE NONE;
|
||||
RESET ROLE;
|
||||
|
||||
-- SET SESSION AUTHORIZATION
|
||||
SET SESSION SESSION AUTHORIZATION user_name;
|
||||
SET LOCAL SESSION AUTHORIZATION DEFAULT;
|
||||
RESET SESSION AUTHORIZATION;
|
||||
|
||||
-- SET TRANSACTION
|
||||
SET TRANSACTION ISOLATION LEVEL SERIALIZABLE
|
||||
READ WRITE
|
||||
NOT DEFERRABLE;
|
||||
SET TRANSACTION SNAPSHOT '000003A1-1';
|
||||
SET SESSION CHARACTERISTICS AS TRANSACTION ISOLATION LEVEL REPEATABLE READ READ ONLY DEFERRABLE;
|
||||
|
||||
-- SHOW
|
||||
SHOW name;
|
||||
SHOW ALL;
|
||||
|
||||
-- START TRANSACTION
|
||||
START TRANSACTION ISOLATION LEVEL SERIALIZABLE
|
||||
READ WRITE
|
||||
NOT DEFERRABLE, ISOLATION LEVEL REPEATABLE READ READ ONLY DEFERRABLE;
|
||||
START TRANSACTION;
|
||||
|
||||
-- TRUNCATE
|
||||
TRUNCATE TABLE ONLY name
|
||||
RESTART IDENTITY CASCADE;
|
||||
TRUNCATE TABLE ONLY name
|
||||
CONTINUE IDENTITY RESTRICT;
|
||||
TRUNCATE name;
|
||||
|
||||
-- UNLISTEN
|
||||
UNLISTEN *;
|
||||
UNLISTEN channel;
|
||||
|
||||
-- VACUUM
|
||||
VACUUM ( FULL, FREEZE, VERBOSE, ANALYZE, DISABLE_PAGE_SKIPPING, SKIP_LOCKED, INDEX_CLEANUP, PROCESS_MAIN, PROCESS_TOAST, TRUNCATE, PARALLEL 4,SKIP_DATABASE_STATS, ONLY_DATABASE_STATS, BUFFER_USAGE_LIMIT) table_name (column_name, column_name2);
|
||||
VACUUM FULL FREEZE VERBOSE table_name;
|
||||
VACUUM FULL FREEZE VERBOSE ANALYZE table_name (column_name,column_name2);
|
||||
VACUUM ANALYZE;
|
||||
ANALYZE;
|
||||
|
||||
-- VALUES
|
||||
VALUES (1, '3'), (3, 'sdsd')
|
||||
ORDER BY sort_expression ASC
|
||||
LIMIT 20
|
||||
OFFSET 324 ROWS;
|
||||
VALUES (1, '3'), (3, 'sdsd');
|
||||
|
189
test/parser/postgresql/syntax/fixtures/select.sql
Normal file
189
test/parser/postgresql/syntax/fixtures/select.sql
Normal file
@ -0,0 +1,189 @@
|
||||
-- SELECT
|
||||
WITH RECURSIVE query_name (id) AS (SELECT id FROM table_expression)
|
||||
SELECT DISTINCT ON (col1,col2) random() AS name1 FROM table_expression
|
||||
WHERE name1=name1
|
||||
GROUP BY DISTINCT id
|
||||
HAVING sum(len) < interval '5 hours'
|
||||
WINDOW w AS (PARTITION BY depname ORDER BY salary DESC)
|
||||
UNION ALL (SELECT * FROM others)
|
||||
ORDER BY salary DESC
|
||||
FETCH NEXT ROWS ONLY
|
||||
OFFSET start ROWS
|
||||
FOR UPDATE OF table_name, table_name2 NOWAIT;
|
||||
SELECT;
|
||||
|
||||
SELECT * FROM db.tbs GROUP BY (col1 > 3, col2 < 8) ORDER BY col3 > 9;
|
||||
|
||||
WITH query_name (id) AS (SELECT id FROM table_expression) SELECT DISTINCT random() AS name1 FROM table_expression WHERE name1=name1 GROUP BY id HAVING sum(len) < interval '5 hours' WINDOW w AS (PARTITION BY depname ORDER BY salary DESC) INTERSECT DISTINCT (SELECT * FROM others) ORDER BY salary ASC OFFSET start FETCH NEXT ROW ONLY FOR NO KEY UPDATE;
|
||||
|
||||
WITH query_name (id) AS (SELECT id FROM table_expression) SELECT DISTINCT ON (col1) random() AS name1 FROM table_expression WHERE name1=name1 GROUP BY id HAVING sum(len) < interval '5 hours' WINDOW w AS (PARTITION BY depname ORDER BY salary DESC) EXCEPT (SELECT * FROM others) ORDER BY salary USING > NULLS FIRST OFFSET start FETCH NEXT ROW ONLY FOR SHARE;
|
||||
|
||||
WITH query_name (id) AS (SELECT id FROM table_expression) SELECT DISTINCT ON (col1) random() AS name1 FROM table_expression WHERE name1=name1 GROUP BY id HAVING sum(len) < interval '5 hours' WINDOW w AS (PARTITION BY depname ORDER BY salary DESC) EXCEPT (SELECT * FROM others) ORDER BY salary USING > NULLS FIRST OFFSET start FETCH NEXT ROW ONLY FOR KEY SHARE OF table_name NOWAIT;
|
||||
|
||||
-- SELECT INTO
|
||||
WITH RECURSIVE query_name (id) AS (SELECT id FROM table_expression)
|
||||
SELECT DISTINCT ON (col2, col3)
|
||||
INTO TEMPORARY TABLE new_table
|
||||
FROM from_item
|
||||
WHERE name2=name1
|
||||
GROUP BY name
|
||||
HAVING s > 8
|
||||
WINDOW window_name AS ( RANGE UNBOUNDED PRECEDING )
|
||||
INTERSECT DISTINCT (SELECT * FROM tb)
|
||||
ORDER BY expression_1 USING > NULLS FIRST
|
||||
LIMIT ALL
|
||||
OFFSET start ROW
|
||||
FOR UPDATE OF table_name NOWAIT;
|
||||
SELECT INTO new_table;
|
||||
|
||||
-- The Most Easy
|
||||
SELECT * ;
|
||||
|
||||
-- Querying a Table
|
||||
|
||||
SELECT city, temp_lo, temp_hi, prcp, date FROM weather;
|
||||
|
||||
SELECT city, (temp_hi+temp_lo)/2 AS temp_avg, date FROM weather;
|
||||
|
||||
SELECT * FROM weather
|
||||
WHERE city = 'San Francisco' AND prcp > 0.0;
|
||||
|
||||
SELECT * FROM weather
|
||||
ORDER BY city;
|
||||
|
||||
SELECT * FROM weather
|
||||
ORDER BY city, temp_lo;
|
||||
|
||||
SELECT DISTINCT city
|
||||
FROM weather;
|
||||
|
||||
SELECT DISTINCT city
|
||||
FROM weather
|
||||
ORDER BY city;
|
||||
|
||||
-- Joins Between Tables
|
||||
SELECT weather.city, weather.temp_lo, weather.temp_hi,
|
||||
weather.prcp, weather.date, cities.location
|
||||
FROM weather NATURAL LEFT OUTER JOIN cities
|
||||
WHERE cities.name = weather.city;
|
||||
|
||||
SELECT W1.city, W1.temp_lo AS low, W1.temp_hi AS high,
|
||||
W2.city, W2.temp_lo AS low, W2.temp_hi AS high
|
||||
FROM weather W1 NATURAL RIGHT OUTER JOIN weather W2
|
||||
WHERE W1.temp_lo < W2.temp_lo
|
||||
AND W1.temp_hi > W2.temp_hi;
|
||||
|
||||
SELECT *
|
||||
FROM weather w NATURAL FULL OUTER JOIN cities c
|
||||
WHERE w.city = c.name;
|
||||
|
||||
SELECT *
|
||||
FROM weather w CROSS JOIN cities c
|
||||
WHERE w.city = c.name;
|
||||
|
||||
-- Aggregate Functions
|
||||
|
||||
SELECT city, max(temp_lo)
|
||||
FROM weather
|
||||
WHERE city LIKE 'S%'
|
||||
GROUP BY city
|
||||
HAVING max(temp_lo) < 40;
|
||||
|
||||
-- Window Functions
|
||||
|
||||
SELECT depname, empno, salary, enroll_date
|
||||
FROM
|
||||
(SELECT depname, empno, salary, enroll_date,
|
||||
rank() OVER (PARTITION BY depname ORDER BY salary DESC, empno) AS pos
|
||||
FROM empsalary
|
||||
) AS ss
|
||||
WHERE pos < 3;
|
||||
|
||||
SELECT sum(salary) OVER w, avg(salary) OVER w
|
||||
FROM empsalary
|
||||
WINDOW w AS (PARTITION BY depname ORDER BY salary DESC);
|
||||
|
||||
SELECT name, altitude
|
||||
FROM ONLY cities
|
||||
WHERE altitude > 500;
|
||||
|
||||
SELECT name, altitude
|
||||
FROM cities*
|
||||
WHERE altitude > 500;
|
||||
|
||||
SELECT c.tableoid, c.name, c.altitude
|
||||
FROM cities c
|
||||
WHERE c.altitude > 500;
|
||||
|
||||
-- Overview
|
||||
WITH query_name (id) AS (SELECT id FROM table_expression) SELECT random() FROM table_expression ORDER BY salary DESC
|
||||
|
||||
-- Partitioning and Constraint Exclusion
|
||||
SET constraint_exclusion = on;
|
||||
SELECT count(*) FROM measurement WHERE logdate >= DATE '2008-01-01';
|
||||
|
||||
-- Table Functions
|
||||
|
||||
SELECT * FROM foo
|
||||
WHERE foosubid IN (
|
||||
SELECT foosubid
|
||||
FROM getfoo(foo.fooid) z
|
||||
WHERE z.fooid = foo.fooid
|
||||
);
|
||||
|
||||
SELECT *
|
||||
FROM dblink('dbname=mydb', 'SELECT proname, prosrc FROM pg_proc')
|
||||
AS t1(proname name, prosrc text)
|
||||
WHERE proname LIKE 'bytea%';
|
||||
|
||||
-- The Schema Search Path
|
||||
SELECT 3 OPERATOR(pg_catalog.+) 4;
|
||||
|
||||
|
||||
-- Monetary Types
|
||||
SELECT '12.34'::float8::numeric::money;
|
||||
|
||||
-- Binary Data Types
|
||||
SELECT '\xDEADBEEF';
|
||||
|
||||
-- bytea Escape Format
|
||||
SELECT 'abc \153\154\155 \052\251\124'::bytea;
|
||||
|
||||
-- Date/Time Types Interval Input
|
||||
SELECT EXTRACT(days from '80 hours'::interval);
|
||||
|
||||
-- Text Search Types
|
||||
SELECT 'a fat cat sat on a mat and ate a fat rat'::tsvector;
|
||||
|
||||
SELECT $$the lexeme ' ' contains spaces$$::tsvector;
|
||||
|
||||
SELECT $$the lexeme 'Joe''s' contains a quote$$::tsvector;
|
||||
|
||||
SELECT 'a:1 fat:2 cat:3 sat:4 on:5 a:6 mat:7 and:8 ate:9 a:10 fat:11 rat:12'::tsvector;
|
||||
|
||||
SELECT to_tsvector('english', 'The Fat Rats');
|
||||
|
||||
SELECT 'fat & rat & ! cat'::tsquery;
|
||||
|
||||
SELECT to_tsquery('Fat:ab & Cats');
|
||||
|
||||
-- Arrays --Accessing Arrays
|
||||
SELECT name FROM sal_emp WHERE pay_by_quarter[1] <> pay_by_quarter[2];
|
||||
|
||||
SELECT schedule[1:2][2] FROM sal_emp WHERE name = 'Bill';
|
||||
|
||||
-- Arrays -- Searching in Arrays
|
||||
SELECT * FROM sal_emp WHERE pay_by_quarter[1] = 10000 OR
|
||||
pay_by_quarter[2] = 10000 OR
|
||||
pay_by_quarter[3] = 10000 OR
|
||||
pay_by_quarter[4] = 10000;
|
||||
|
||||
-- Array Input and Output Syntax
|
||||
SELECT f1[1][-2][3] AS e1, f1[1][-1][5] AS e2
|
||||
FROM (SELECT '[1:1][-2:-1][3:5]={{{1,2,3},{4,5,6}}}'::int[] AS f1) AS ss;
|
||||
|
||||
-- Containment
|
||||
SELECT int4range(10, 20) @> 3;
|
||||
|
||||
-- Object Identifier Types
|
||||
SELECT * FROM pg_attribute WHERE attrelid = 'mytable'::regclass;
|
38
test/parser/postgresql/syntax/fixtures/update.sql
Normal file
38
test/parser/postgresql/syntax/fixtures/update.sql
Normal file
@ -0,0 +1,38 @@
|
||||
UPDATE MY_TABLE SET A = 5;
|
||||
|
||||
UPDATE weather
|
||||
SET temp_hi = temp_hi - 2, temp_lo = temp_lo - 2
|
||||
WHERE date > '1994-11-28';
|
||||
|
||||
-- Transactions
|
||||
UPDATE accounts SET balance = balance - 100.00
|
||||
WHERE name = 'Alice';
|
||||
UPDATE branches SET balance = balance - 100.00
|
||||
WHERE name = (SELECT branch_name FROM accounts WHERE name = 'Alice');
|
||||
UPDATE accounts SET balance = balance + 100.00
|
||||
WHERE name = 'Bob';
|
||||
UPDATE branches SET balance = balance + 100.00
|
||||
WHERE name = (SELECT branch_name FROM accounts WHERE name = 'Bob');
|
||||
|
||||
UPDATE accounts SET balance = balance - 100.00
|
||||
WHERE name = 'Alice';
|
||||
|
||||
UPDATE mytable SET a = 5, b = b * 3, c = c + 1 WHERE a > 0;
|
||||
|
||||
-- Arrays -- Modifying Arrays
|
||||
UPDATE sal_emp SET pay_by_quarter = ARRAY[25000,25000,27000,27000]
|
||||
WHERE name = 'Carol';
|
||||
|
||||
UPDATE sal_emp SET pay_by_quarter[1:2] = '{27000,27000}'
|
||||
WHERE name = 'Carol';
|
||||
|
||||
--
|
||||
|
||||
-- UPDATE
|
||||
WITH RECURSIVE query_name (id) AS (SELECT id FROM table_expression)
|
||||
UPDATE ONLY table_name * AS alias
|
||||
SET column_name = DEFAULT, (column_name, column_nam2) = ROW ( a+1,DEFAULT)
|
||||
FROM from_list
|
||||
WHERE a=b
|
||||
RETURNING column_name AS output_name;
|
||||
UPDATE table_name SET column_name = a + 3;
|
16
test/parser/postgresql/syntax/insertStatement.test.ts
Normal file
16
test/parser/postgresql/syntax/insertStatement.test.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import { PostgreSQL } from 'src/parser/postgresql';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const postgresql = new PostgreSQL();
|
||||
|
||||
const features = {
|
||||
inserts: readSQL(__dirname, 'insert.sql'),
|
||||
};
|
||||
|
||||
describe('PgSQL Insert Syntax Tests', () => {
|
||||
features.inserts.forEach((insertItem) => {
|
||||
it(insertItem, () => {
|
||||
expect(postgresql.validate(insertItem).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
16
test/parser/postgresql/syntax/others.test.ts
Normal file
16
test/parser/postgresql/syntax/others.test.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import { PostgreSQL } from 'src/parser/postgresql';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const postgresql = new PostgreSQL();
|
||||
|
||||
const features = {
|
||||
others: readSQL(__dirname, 'others.sql'),
|
||||
};
|
||||
|
||||
describe('PgSQL Other SQL Syntax Tests', () => {
|
||||
features.others.forEach((other) => {
|
||||
it(other, () => {
|
||||
expect(postgresql.validate(other).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
16
test/parser/postgresql/syntax/selectStatement.test.ts
Normal file
16
test/parser/postgresql/syntax/selectStatement.test.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import { PostgreSQL } from 'src/parser/postgresql';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const postgresql = new PostgreSQL();
|
||||
|
||||
const features = {
|
||||
selects: readSQL(__dirname, 'select.sql'),
|
||||
};
|
||||
|
||||
describe('PgSQL Select Syntax Tests', () => {
|
||||
features.selects.forEach((select) => {
|
||||
it(select, () => {
|
||||
expect(postgresql.validate(select).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
16
test/parser/postgresql/syntax/updateStatement.test.ts
Normal file
16
test/parser/postgresql/syntax/updateStatement.test.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import { PostgreSQL } from 'src/parser/postgresql';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const postgresql = new PostgreSQL();
|
||||
|
||||
const features = {
|
||||
updates: readSQL(__dirname, 'update.sql'),
|
||||
};
|
||||
|
||||
describe('PgSQL Update Syntax Tests', () => {
|
||||
features.updates.forEach((update) => {
|
||||
it(update, () => {
|
||||
expect(postgresql.validate(update).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
16
test/parser/postgresql/validateInvalidSql.test.ts
Normal file
16
test/parser/postgresql/validateInvalidSql.test.ts
Normal file
@ -0,0 +1,16 @@
|
||||
import { PostgreSQL } from 'src/parser/postgresql';
|
||||
|
||||
const randomText = `dhsdansdnkla ndjnsla ndnalks`;
|
||||
const unCompleteSQL = `CREATE TABLE`;
|
||||
|
||||
describe('Postgres SQL validate invalid sql', () => {
|
||||
const postgresql = new PostgreSQL();
|
||||
|
||||
test('validate random text', () => {
|
||||
expect(postgresql.validate(randomText).length).not.toBe(0);
|
||||
});
|
||||
|
||||
test('validate unComplete sql', () => {
|
||||
expect(postgresql.validate(unCompleteSQL).length).not.toBe(0);
|
||||
});
|
||||
});
|
33
test/parser/postgresql/visitor.test.ts
Normal file
33
test/parser/postgresql/visitor.test.ts
Normal file
@ -0,0 +1,33 @@
|
||||
import { PostgreSQL } from 'src/parser/postgresql';
|
||||
import { AbstractParseTreeVisitor } from 'antlr4ng';
|
||||
import { PostgreSqlParserVisitor } from 'src/lib/postgresql/PostgreSqlParserVisitor';
|
||||
|
||||
describe('MySQL Visitor Tests', () => {
|
||||
const expectTableName = 'user1';
|
||||
const sql = `select id,name,sex from ${expectTableName};`;
|
||||
const postgresql = new PostgreSQL();
|
||||
|
||||
const parseTree = postgresql.parse(sql, (error) => {
|
||||
console.error('Parse error:', error);
|
||||
});
|
||||
|
||||
test('Visitor visitTableName', () => {
|
||||
let result = '';
|
||||
class MyVisitor
|
||||
extends AbstractParseTreeVisitor<any>
|
||||
implements PostgreSqlParserVisitor<any>
|
||||
{
|
||||
protected defaultResult() {
|
||||
return result;
|
||||
}
|
||||
|
||||
visitTable_ref(ctx) {
|
||||
result = ctx.getText().toLowerCase();
|
||||
}
|
||||
}
|
||||
const visitor: any = new MyVisitor();
|
||||
visitor.visit(parseTree);
|
||||
|
||||
expect(result).toBe(expectTableName);
|
||||
});
|
||||
});
|
Reference in New Issue
Block a user