feat: support trino(presto) sql language (#105)

* feat(trino): intergrate prestoGrammar to dt-sql-parser

* feat(trino): add trinoSQl test framework

* feat(trino): intergrate test files for trinoSQL

* test(trino): support alterStatement test

* test(trino): support alter table set authorization statement

* feat(trino): complete trinosql alter unit tests

* test(trino): complete dropStatement unit cases for trinosql

* test(trino): complete create statement unit cases for trinoSQL

* test(trino): complete insertStatement unit cases for trinoSQl

* test(trino): dropStatement test files changed to short line split

* test(trino): complete selectStatement unit cases and grammar check

* test(trino): complete commentStatement unit case for trinoSQL

* test(trino): complete analyze commit and call clause unit case

* test(trino): complete delete deny and describe statement unit case

* test(trino): complete explain execute and grant statement unit case

* feat(trino): improve GRANT Role grammar

* test(trino): complete show statement unit case

* test(trino): complete truncateTable startTransaction update and values  statement unit case

* test(trino): improve update statement test cases

* test(trino): complete revoke revoke roles  and rollback statement unit case

* test(trino): add set statement test case

* feat: generator new trino parser and lexer file

* feat(trino): improve alter statement grammar

* test(trino): complete alter statement unit cases

* feat(trino): support case-insensitive lexers

* fix(trino): rm unless gen files

* test(trino): complete merge and reset session statement unit cases

* test(trino): complete merge anduse statement unit cases

* test(trino): complete prepare and refresh materialized view statement unit cases

* test(trino): improve statement unit cases

* test(trino): complete match recognize statement unit cases

* test(trino): complete window with row pattern recognition statement unit cases
This commit is contained in:
mumiao 2023-05-24 15:07:02 +08:00 committed by GitHub
parent 0924acf730
commit 9c82a5d248
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
119 changed files with 35039 additions and 2 deletions

View File

@ -13,6 +13,7 @@ const entry = [
'plsql',
'spark',
'flinksql',
'trinosql',
];
function compile(language) {

View File

@ -182,7 +182,7 @@ module.exports = {
},
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
transformIgnorePatterns: ["/node_modules/(?!antlr4)"],
transformIgnorePatterns: ["/node_modules/.pnpm/(?!antlr4)"],
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
// unmockedModulePathPatterns: undefined,
@ -196,6 +196,6 @@ module.exports = {
// Whether to use watchman for file crawling
// watchman: true,
moduleNameMapper: {
"^antlr4$": "<rootDir>/node_modules/antlr4/src/antlr4/index.web.js",
"^antlr4$": "<rootDir>/node_modules/antlr4/dist/antlr4.web.js",
},
};

File diff suppressed because it is too large Load Diff

View File

@ -12,3 +12,5 @@ export * from './lib/spark/SparkSqlVisitor';
export * from './lib/spark/SparkSqlListener';
export * from './lib/pgsql/PostgreSQLParserListener';
export * from './lib/pgsql/PostgreSQLParserVisitor';
export * from './lib/trinosql/trinoSqlParserListener';
export * from './lib/trinosql/trinoSqlParserVisitor';

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,538 @@
T__0=1
T__1=2
T__2=3
T__3=4
T__4=5
T__5=6
T__6=7
T__7=8
T__8=9
T__9=10
T__10=11
T__11=12
T__12=13
T__13=14
T__14=15
T__15=16
ADD=17
ADMIN=18
AFTER=19
ALL=20
ALTER=21
ANALYZE=22
AND=23
ANY=24
ARRAY=25
AS=26
ASC=27
AT=28
AUTHORIZATION=29
BERNOULLI=30
BETWEEN=31
BY=32
CALL=33
CASCADE=34
CASE=35
CAST=36
CATALOGS=37
COLUMN=38
COLUMNS=39
COMMENT=40
COMMIT=41
COMMITTED=42
CONSTRAINT=43
CREATE=44
CROSS=45
CUBE=46
CURRENT=47
CURRENT_CATALOG=48
CURRENT_DATE=49
CURRENT_PATH=50
CURRENT_ROLE=51
CURRENT_SCHEMA=52
CURRENT_TIME=53
CURRENT_TIMESTAMP=54
CURRENT_USER=55
DATA=56
DATE=57
DAY=58
DEFAULT=59
DEALLOCATE=60
DEFINER=61
DELETE=62
DESC=63
DESCRIBE=64
DEFINE=65
DISTINCT=66
DISTRIBUTED=67
DOUBLE=68
DROP=69
ELSE=70
EMPTY=71
END=72
ESCAPE=73
EXCEPT=74
EXCLUDING=75
EXECUTE=76
EXISTS=77
EXPLAIN=78
EXTRACT=79
FALSE=80
FETCH=81
FILTER=82
FINAL=83
FIRST=84
FOLLOWING=85
FOR=86
FORMAT=87
FROM=88
FULL=89
FUNCTIONS=90
GRANT=91
GRANTED=92
GRANTS=93
DENY=94
GRAPHVIZ=95
GROUP=96
GROUPING=97
GROUPS=98
HAVING=99
HOUR=100
IF=101
IGNORE=102
IN=103
INCLUDING=104
INITIAL=105
INNER=106
INPUT=107
INSERT=108
INTERSECT=109
INTERVAL=110
INTO=111
INVOKER=112
IO=113
IS=114
ISOLATION=115
JOIN=116
JSON=117
LAST=118
LATERAL=119
LEFT=120
LEVEL=121
LIKE=122
LIMIT=123
LOCAL=124
LOCALTIME=125
LOCALTIMESTAMP=126
LOGICAL=127
MAP=128
MATCH=129
MATCHED=130
MATCHES=131
MATCH_RECOGNIZE=132
MATERIALIZED=133
MEASURES=134
MERGE=135
MINUTE=136
MONTH=137
NATURAL=138
NEXT=139
NFC=140
NFD=141
NFKC=142
NFKD=143
NO=144
NONE=145
NORMALIZE=146
NOT=147
NULL=148
NULLIF=149
NULLS=150
OFFSET=151
OMIT=152
ON=153
ONE=154
ONLY=155
OPTION=156
OR=157
ORDER=158
ORDINALITY=159
OUTER=160
OUTPUT=161
OVER=162
PARTITION=163
PARTITIONS=164
PAST=165
PATH=166
PATTERN=167
PER=168
PERMUTE=169
POSITION=170
PRECEDING=171
PRECISION=172
PREPARE=173
PRIVILEGES=174
PROPERTIES=175
RANGE=176
READ=177
RECURSIVE=178
REFRESH=179
RENAME=180
REPEATABLE=181
REPLACE=182
RESET=183
RESPECT=184
RESTRICT=185
REVOKE=186
RIGHT=187
ROLE=188
ROLES=189
ROLLBACK=190
ROLLUP=191
ROW=192
ROWS=193
RUNNING=194
SCHEMA=195
SCHEMAS=196
SECOND=197
SECURITY=198
SEEK=199
SELECT=200
SERIALIZABLE=201
SESSION=202
SET=203
SETS=204
SHOW=205
SOME=206
START=207
STATS=208
SUBSET=209
SUBSTRING=210
SYSTEM=211
TABLE=212
TABLES=213
TABLESAMPLE=214
TEXT=215
THEN=216
TIES=217
TIME=218
TIMESTAMP=219
TO=220
TRANSACTION=221
TRUNCATE=222
TRUE=223
TRY_CAST=224
TYPE=225
UESCAPE=226
UNBOUNDED=227
UNCOMMITTED=228
UNION=229
UNMATCHED=230
UNNEST=231
UPDATE=232
USE=233
USER=234
USING=235
VALIDATE=236
VALUES=237
VERBOSE=238
VIEW=239
WHEN=240
WHERE=241
WINDOW=242
WITH=243
WITHOUT=244
WORK=245
WRITE=246
YEAR=247
ZONE=248
EQ=249
NEQ=250
LT=251
LTE=252
GT=253
GTE=254
PLUS=255
MINUS=256
ASTERISK=257
SLASH=258
PERCENT=259
CONCAT=260
QUESTION_MARK=261
STRING=262
UNICODE_STRING=263
BINARY_LITERAL=264
INTEGER_VALUE=265
DECIMAL_VALUE=266
DOUBLE_VALUE=267
IDENTIFIER=268
DIGIT_IDENTIFIER=269
QUOTED_IDENTIFIER=270
BACKQUOTED_IDENTIFIER=271
SEMICOLON=272
SIMPLE_COMMENT=273
BRACKETED_COMMENT=274
WS=275
UNRECOGNIZED=276
DELIMITER=277
'.'=1
'('=2
')'=3
','=4
'SKIP'=5
'->'=6
'['=7
']'=8
'|'=9
'^'=10
'$'=11
'{-'=12
'-}'=13
'{'=14
'}'=15
'=>'=16
'ADD'=17
'ADMIN'=18
'AFTER'=19
'ALL'=20
'ALTER'=21
'ANALYZE'=22
'AND'=23
'ANY'=24
'ARRAY'=25
'AS'=26
'ASC'=27
'AT'=28
'AUTHORIZATION'=29
'BERNOULLI'=30
'BETWEEN'=31
'BY'=32
'CALL'=33
'CASCADE'=34
'CASE'=35
'CAST'=36
'CATALOGS'=37
'COLUMN'=38
'COLUMNS'=39
'COMMENT'=40
'COMMIT'=41
'COMMITTED'=42
'CONSTRAINT'=43
'CREATE'=44
'CROSS'=45
'CUBE'=46
'CURRENT'=47
'CURRENT_CATALOG'=48
'CURRENT_DATE'=49
'CURRENT_PATH'=50
'CURRENT_ROLE'=51
'CURRENT_SCHEMA'=52
'CURRENT_TIME'=53
'CURRENT_TIMESTAMP'=54
'CURRENT_USER'=55
'DATA'=56
'DATE'=57
'DAY'=58
'DEFAULT'=59
'DEALLOCATE'=60
'DEFINER'=61
'DELETE'=62
'DESC'=63
'DESCRIBE'=64
'DEFINE'=65
'DISTINCT'=66
'DISTRIBUTED'=67
'DOUBLE'=68
'DROP'=69
'ELSE'=70
'EMPTY'=71
'END'=72
'ESCAPE'=73
'EXCEPT'=74
'EXCLUDING'=75
'EXECUTE'=76
'EXISTS'=77
'EXPLAIN'=78
'EXTRACT'=79
'FALSE'=80
'FETCH'=81
'FILTER'=82
'FINAL'=83
'FIRST'=84
'FOLLOWING'=85
'FOR'=86
'FORMAT'=87
'FROM'=88
'FULL'=89
'FUNCTIONS'=90
'GRANT'=91
'GRANTED'=92
'GRANTS'=93
'DENY'=94
'GRAPHVIZ'=95
'GROUP'=96
'GROUPING'=97
'GROUPS'=98
'HAVING'=99
'HOUR'=100
'IF'=101
'IGNORE'=102
'IN'=103
'INCLUDING'=104
'INITIAL'=105
'INNER'=106
'INPUT'=107
'INSERT'=108
'INTERSECT'=109
'INTERVAL'=110
'INTO'=111
'INVOKER'=112
'IO'=113
'IS'=114
'ISOLATION'=115
'JOIN'=116
'JSON'=117
'LAST'=118
'LATERAL'=119
'LEFT'=120
'LEVEL'=121
'LIKE'=122
'LIMIT'=123
'LOCAL'=124
'LOCALTIME'=125
'LOCALTIMESTAMP'=126
'LOGICAL'=127
'MAP'=128
'MATCH'=129
'MATCHED'=130
'MATCHES'=131
'MATCH_RECOGNIZE'=132
'MATERIALIZED'=133
'MEASURES'=134
'MERGE'=135
'MINUTE'=136
'MONTH'=137
'NATURAL'=138
'NEXT'=139
'NFC'=140
'NFD'=141
'NFKC'=142
'NFKD'=143
'NO'=144
'NONE'=145
'NORMALIZE'=146
'NOT'=147
'NULL'=148
'NULLIF'=149
'NULLS'=150
'OFFSET'=151
'OMIT'=152
'ON'=153
'ONE'=154
'ONLY'=155
'OPTION'=156
'OR'=157
'ORDER'=158
'ORDINALITY'=159
'OUTER'=160
'OUTPUT'=161
'OVER'=162
'PARTITION'=163
'PARTITIONS'=164
'PAST'=165
'PATH'=166
'PATTERN'=167
'PER'=168
'PERMUTE'=169
'POSITION'=170
'PRECEDING'=171
'PRECISION'=172
'PREPARE'=173
'PRIVILEGES'=174
'PROPERTIES'=175
'RANGE'=176
'READ'=177
'RECURSIVE'=178
'REFRESH'=179
'RENAME'=180
'REPEATABLE'=181
'REPLACE'=182
'RESET'=183
'RESPECT'=184
'RESTRICT'=185
'REVOKE'=186
'RIGHT'=187
'ROLE'=188
'ROLES'=189
'ROLLBACK'=190
'ROLLUP'=191
'ROW'=192
'ROWS'=193
'RUNNING'=194
'SCHEMA'=195
'SCHEMAS'=196
'SECOND'=197
'SECURITY'=198
'SEEK'=199
'SELECT'=200
'SERIALIZABLE'=201
'SESSION'=202
'SET'=203
'SETS'=204
'SHOW'=205
'SOME'=206
'START'=207
'STATS'=208
'SUBSET'=209
'SUBSTRING'=210
'SYSTEM'=211
'TABLE'=212
'TABLES'=213
'TABLESAMPLE'=214
'TEXT'=215
'THEN'=216
'TIES'=217
'TIME'=218
'TIMESTAMP'=219
'TO'=220
'TRANSACTION'=221
'TRUNCATE'=222
'TRUE'=223
'TRY_CAST'=224
'TYPE'=225
'UESCAPE'=226
'UNBOUNDED'=227
'UNCOMMITTED'=228
'UNION'=229
'UNMATCHED'=230
'UNNEST'=231
'UPDATE'=232
'USE'=233
'USER'=234
'USING'=235
'VALIDATE'=236
'VALUES'=237
'VERBOSE'=238
'VIEW'=239
'WHEN'=240
'WHERE'=241
'WINDOW'=242
'WITH'=243
'WITHOUT'=244
'WORK'=245
'WRITE'=246
'YEAR'=247
'ZONE'=248
'='=249
'<'=251
'<='=252
'>'=253
'>='=254
'+'=255
'-'=256
'*'=257
'/'=258
'%'=259
'||'=260
'?'=261
';'=272

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,537 @@
T__0=1
T__1=2
T__2=3
T__3=4
T__4=5
T__5=6
T__6=7
T__7=8
T__8=9
T__9=10
T__10=11
T__11=12
T__12=13
T__13=14
T__14=15
T__15=16
ADD=17
ADMIN=18
AFTER=19
ALL=20
ALTER=21
ANALYZE=22
AND=23
ANY=24
ARRAY=25
AS=26
ASC=27
AT=28
AUTHORIZATION=29
BERNOULLI=30
BETWEEN=31
BY=32
CALL=33
CASCADE=34
CASE=35
CAST=36
CATALOGS=37
COLUMN=38
COLUMNS=39
COMMENT=40
COMMIT=41
COMMITTED=42
CONSTRAINT=43
CREATE=44
CROSS=45
CUBE=46
CURRENT=47
CURRENT_CATALOG=48
CURRENT_DATE=49
CURRENT_PATH=50
CURRENT_ROLE=51
CURRENT_SCHEMA=52
CURRENT_TIME=53
CURRENT_TIMESTAMP=54
CURRENT_USER=55
DATA=56
DATE=57
DAY=58
DEFAULT=59
DEALLOCATE=60
DEFINER=61
DELETE=62
DESC=63
DESCRIBE=64
DEFINE=65
DISTINCT=66
DISTRIBUTED=67
DOUBLE=68
DROP=69
ELSE=70
EMPTY=71
END=72
ESCAPE=73
EXCEPT=74
EXCLUDING=75
EXECUTE=76
EXISTS=77
EXPLAIN=78
EXTRACT=79
FALSE=80
FETCH=81
FILTER=82
FINAL=83
FIRST=84
FOLLOWING=85
FOR=86
FORMAT=87
FROM=88
FULL=89
FUNCTIONS=90
GRANT=91
GRANTED=92
GRANTS=93
DENY=94
GRAPHVIZ=95
GROUP=96
GROUPING=97
GROUPS=98
HAVING=99
HOUR=100
IF=101
IGNORE=102
IN=103
INCLUDING=104
INITIAL=105
INNER=106
INPUT=107
INSERT=108
INTERSECT=109
INTERVAL=110
INTO=111
INVOKER=112
IO=113
IS=114
ISOLATION=115
JOIN=116
JSON=117
LAST=118
LATERAL=119
LEFT=120
LEVEL=121
LIKE=122
LIMIT=123
LOCAL=124
LOCALTIME=125
LOCALTIMESTAMP=126
LOGICAL=127
MAP=128
MATCH=129
MATCHED=130
MATCHES=131
MATCH_RECOGNIZE=132
MATERIALIZED=133
MEASURES=134
MERGE=135
MINUTE=136
MONTH=137
NATURAL=138
NEXT=139
NFC=140
NFD=141
NFKC=142
NFKD=143
NO=144
NONE=145
NORMALIZE=146
NOT=147
NULL=148
NULLIF=149
NULLS=150
OFFSET=151
OMIT=152
ON=153
ONE=154
ONLY=155
OPTION=156
OR=157
ORDER=158
ORDINALITY=159
OUTER=160
OUTPUT=161
OVER=162
PARTITION=163
PARTITIONS=164
PAST=165
PATH=166
PATTERN=167
PER=168
PERMUTE=169
POSITION=170
PRECEDING=171
PRECISION=172
PREPARE=173
PRIVILEGES=174
PROPERTIES=175
RANGE=176
READ=177
RECURSIVE=178
REFRESH=179
RENAME=180
REPEATABLE=181
REPLACE=182
RESET=183
RESPECT=184
RESTRICT=185
REVOKE=186
RIGHT=187
ROLE=188
ROLES=189
ROLLBACK=190
ROLLUP=191
ROW=192
ROWS=193
RUNNING=194
SCHEMA=195
SCHEMAS=196
SECOND=197
SECURITY=198
SEEK=199
SELECT=200
SERIALIZABLE=201
SESSION=202
SET=203
SETS=204
SHOW=205
SOME=206
START=207
STATS=208
SUBSET=209
SUBSTRING=210
SYSTEM=211
TABLE=212
TABLES=213
TABLESAMPLE=214
TEXT=215
THEN=216
TIES=217
TIME=218
TIMESTAMP=219
TO=220
TRANSACTION=221
TRUNCATE=222
TRUE=223
TRY_CAST=224
TYPE=225
UESCAPE=226
UNBOUNDED=227
UNCOMMITTED=228
UNION=229
UNMATCHED=230
UNNEST=231
UPDATE=232
USE=233
USER=234
USING=235
VALIDATE=236
VALUES=237
VERBOSE=238
VIEW=239
WHEN=240
WHERE=241
WINDOW=242
WITH=243
WITHOUT=244
WORK=245
WRITE=246
YEAR=247
ZONE=248
EQ=249
NEQ=250
LT=251
LTE=252
GT=253
GTE=254
PLUS=255
MINUS=256
ASTERISK=257
SLASH=258
PERCENT=259
CONCAT=260
QUESTION_MARK=261
STRING=262
UNICODE_STRING=263
BINARY_LITERAL=264
INTEGER_VALUE=265
DECIMAL_VALUE=266
DOUBLE_VALUE=267
IDENTIFIER=268
DIGIT_IDENTIFIER=269
QUOTED_IDENTIFIER=270
BACKQUOTED_IDENTIFIER=271
SEMICOLON=272
SIMPLE_COMMENT=273
BRACKETED_COMMENT=274
WS=275
UNRECOGNIZED=276
'.'=1
'('=2
')'=3
','=4
'SKIP'=5
'->'=6
'['=7
']'=8
'|'=9
'^'=10
'$'=11
'{-'=12
'-}'=13
'{'=14
'}'=15
'=>'=16
'ADD'=17
'ADMIN'=18
'AFTER'=19
'ALL'=20
'ALTER'=21
'ANALYZE'=22
'AND'=23
'ANY'=24
'ARRAY'=25
'AS'=26
'ASC'=27
'AT'=28
'AUTHORIZATION'=29
'BERNOULLI'=30
'BETWEEN'=31
'BY'=32
'CALL'=33
'CASCADE'=34
'CASE'=35
'CAST'=36
'CATALOGS'=37
'COLUMN'=38
'COLUMNS'=39
'COMMENT'=40
'COMMIT'=41
'COMMITTED'=42
'CONSTRAINT'=43
'CREATE'=44
'CROSS'=45
'CUBE'=46
'CURRENT'=47
'CURRENT_CATALOG'=48
'CURRENT_DATE'=49
'CURRENT_PATH'=50
'CURRENT_ROLE'=51
'CURRENT_SCHEMA'=52
'CURRENT_TIME'=53
'CURRENT_TIMESTAMP'=54
'CURRENT_USER'=55
'DATA'=56
'DATE'=57
'DAY'=58
'DEFAULT'=59
'DEALLOCATE'=60
'DEFINER'=61
'DELETE'=62
'DESC'=63
'DESCRIBE'=64
'DEFINE'=65
'DISTINCT'=66
'DISTRIBUTED'=67
'DOUBLE'=68
'DROP'=69
'ELSE'=70
'EMPTY'=71
'END'=72
'ESCAPE'=73
'EXCEPT'=74
'EXCLUDING'=75
'EXECUTE'=76
'EXISTS'=77
'EXPLAIN'=78
'EXTRACT'=79
'FALSE'=80
'FETCH'=81
'FILTER'=82
'FINAL'=83
'FIRST'=84
'FOLLOWING'=85
'FOR'=86
'FORMAT'=87
'FROM'=88
'FULL'=89
'FUNCTIONS'=90
'GRANT'=91
'GRANTED'=92
'GRANTS'=93
'DENY'=94
'GRAPHVIZ'=95
'GROUP'=96
'GROUPING'=97
'GROUPS'=98
'HAVING'=99
'HOUR'=100
'IF'=101
'IGNORE'=102
'IN'=103
'INCLUDING'=104
'INITIAL'=105
'INNER'=106
'INPUT'=107
'INSERT'=108
'INTERSECT'=109
'INTERVAL'=110
'INTO'=111
'INVOKER'=112
'IO'=113
'IS'=114
'ISOLATION'=115
'JOIN'=116
'JSON'=117
'LAST'=118
'LATERAL'=119
'LEFT'=120
'LEVEL'=121
'LIKE'=122
'LIMIT'=123
'LOCAL'=124
'LOCALTIME'=125
'LOCALTIMESTAMP'=126
'LOGICAL'=127
'MAP'=128
'MATCH'=129
'MATCHED'=130
'MATCHES'=131
'MATCH_RECOGNIZE'=132
'MATERIALIZED'=133
'MEASURES'=134
'MERGE'=135
'MINUTE'=136
'MONTH'=137
'NATURAL'=138
'NEXT'=139
'NFC'=140
'NFD'=141
'NFKC'=142
'NFKD'=143
'NO'=144
'NONE'=145
'NORMALIZE'=146
'NOT'=147
'NULL'=148
'NULLIF'=149
'NULLS'=150
'OFFSET'=151
'OMIT'=152
'ON'=153
'ONE'=154
'ONLY'=155
'OPTION'=156
'OR'=157
'ORDER'=158
'ORDINALITY'=159
'OUTER'=160
'OUTPUT'=161
'OVER'=162
'PARTITION'=163
'PARTITIONS'=164
'PAST'=165
'PATH'=166
'PATTERN'=167
'PER'=168
'PERMUTE'=169
'POSITION'=170
'PRECEDING'=171
'PRECISION'=172
'PREPARE'=173
'PRIVILEGES'=174
'PROPERTIES'=175
'RANGE'=176
'READ'=177
'RECURSIVE'=178
'REFRESH'=179
'RENAME'=180
'REPEATABLE'=181
'REPLACE'=182
'RESET'=183
'RESPECT'=184
'RESTRICT'=185
'REVOKE'=186
'RIGHT'=187
'ROLE'=188
'ROLES'=189
'ROLLBACK'=190
'ROLLUP'=191
'ROW'=192
'ROWS'=193
'RUNNING'=194
'SCHEMA'=195
'SCHEMAS'=196
'SECOND'=197
'SECURITY'=198
'SEEK'=199
'SELECT'=200
'SERIALIZABLE'=201
'SESSION'=202
'SET'=203
'SETS'=204
'SHOW'=205
'SOME'=206
'START'=207
'STATS'=208
'SUBSET'=209
'SUBSTRING'=210
'SYSTEM'=211
'TABLE'=212
'TABLES'=213
'TABLESAMPLE'=214
'TEXT'=215
'THEN'=216
'TIES'=217
'TIME'=218
'TIMESTAMP'=219
'TO'=220
'TRANSACTION'=221
'TRUNCATE'=222
'TRUE'=223
'TRY_CAST'=224
'TYPE'=225
'UESCAPE'=226
'UNBOUNDED'=227
'UNCOMMITTED'=228
'UNION'=229
'UNMATCHED'=230
'UNNEST'=231
'UPDATE'=232
'USE'=233
'USER'=234
'USING'=235
'VALIDATE'=236
'VALUES'=237
'VERBOSE'=238
'VIEW'=239
'WHEN'=240
'WHERE'=241
'WINDOW'=242
'WITH'=243
'WITHOUT'=244
'WORK'=245
'WRITE'=246
'YEAR'=247
'ZONE'=248
'='=249
'<'=251
'<='=252
'>'=253
'>='=254
'+'=255
'-'=256
'*'=257
'/'=258
'%'=259
'||'=260
'?'=261
';'=272

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

17
src/parser/trinosql.ts Normal file
View File

@ -0,0 +1,17 @@
import { CharStream, CommonTokenStream, Lexer } from 'antlr4';
import trinoSqlLexer from '../lib/trinosql/trinoSqlParserLexer';
import trinoSqlParser from '../lib/trinosql/trinoSqlParserParser';
import BasicParser from './common/basicParser';
export default class trinoSQL extends BasicParser {
public createLexer(input: string): trinoSqlLexer {
const chars = new CharStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = new trinoSqlLexer(chars);
return lexer;
}
public createParserFromLexer(lexer: Lexer): trinoSqlParser {
const tokens = new CommonTokenStream(lexer);
const parser = new trinoSqlParser(tokens);
return parser;
}
}

View File

@ -0,0 +1,12 @@
import trinoSQL from '../../../src/parser/trinosql';
describe('trinoSQL Lexer tests', () => {
const parser = new trinoSQL();
const sql = 'SELECT * FROM table1';
const tokens = parser.getAllTokens(sql);
test('token counts', () => {
expect(tokens.length - 1).toBe(7);
});
});

View File

@ -0,0 +1,29 @@
import trinoSQL from '../../../src/parser/trinosql';
import trinoSqlParserListener from '../../../src/lib/trinosql/trinoSqlParserListener';
import { TableExpressionContext } from '../../../src/lib/trinosql/trinoSqlParser';
describe('trino SQL Listener Tests', () => {
const expectTableName = 'user1';
const sql = `select id,name,sex from ${expectTableName};`;
const parser = new trinoSQL();
const parserTree = parser.parse(sql);
test('Listener enterTableName', async () => {
let result = '';
class MyListener extends trinoSqlParserListener {
constructor() {
super()
}
enterTableExpression = (ctx: TableExpressionContext): void => {
result = ctx.getText().toLowerCase();
}
}
const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree);
expect(result).toBe(expectTableName);
});
});

View File

@ -0,0 +1,34 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
const features = {
table: readSQL(__dirname, 'alter_table.sql'),
view: readSQL(__dirname, 'alter_view.sql'),
schema: readSQL(__dirname, 'alter_schema.sql'),
materializedView: readSQL(__dirname, 'alter_materialized_view.sql')
};
describe('TrinoSQL Alter Statements Syntax Tests', () => {
const parser = new TrinoSQL();
features.table.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.view.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.schema.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.materializedView.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,18 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
const features = {
analyze: readSQL(__dirname, 'analyze.sql'),
};
describe('TrinoSQL Analyze Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// analyze statements
features.analyze.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,18 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
const features = {
call: readSQL(__dirname, 'call.sql'),
};
describe('TrinoSQL Call Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// call statements
features.call.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,18 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
const features = {
comment: readSQL(__dirname, 'comment.sql'),
};
describe('TrinoSQL Comment Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// Comment statements
features.comment.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,17 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
const features = {
commit: readSQL(__dirname, 'commit.sql'),
};
describe('TrinoSQL Commit Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// commit statements
features.commit.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,48 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
const features = {
table: readSQL(__dirname, 'create_table.sql'),
view: readSQL(__dirname, 'create_view.sql'),
schema: readSQL(__dirname, 'create_schema.sql'),
role: readSQL(__dirname, 'create_role.sql'),
tableAsSelect: readSQL(__dirname, 'create_table_as_select.sql'),
materializedView: readSQL(__dirname, 'create_materialized_view.sql'),
};
describe('TrinoSQL Create Statements Syntax Tests', () => {
const parser = new TrinoSQL();
features.table.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.view.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.schema.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.tableAsSelect.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.role.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.materializedView.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,17 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
const features = {
deallocatePrepare: readSQL(__dirname, 'deallocate_prepare.sql'),
};
describe('TrinoSQL deallocatePrepare Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// deallocate_prepare statements
features.deallocatePrepare.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,17 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
const features = {
delete: readSQL(__dirname, 'delete.sql'),
};
describe('TrinoSQL Delete Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// delete statements
features.delete.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,17 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
const features = {
deny: readSQL(__dirname, 'deny.sql'),
};
describe('TrinoSQL Deny Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// deny statements
features.deny.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,17 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
const features = {
describe: readSQL(__dirname, 'describe.sql'),
};
describe('TrinoSQL Describe Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// describe statements
features.describe.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,48 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
const features = {
table: readSQL(__dirname, 'drop_table.sql'),
view: readSQL(__dirname, 'drop_view.sql'),
schema: readSQL(__dirname, 'drop_schema.sql'),
role: readSQL(__dirname, 'drop_role.sql'),
column: readSQL(__dirname, 'drop_column.sql'),
materializedView: readSQL(__dirname, 'drop_materialized_view.sql'),
};
describe('TrinoSQL Drop Statements Syntax Tests', () => {
const parser = new TrinoSQL();
features.table.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.view.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.schema.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.column.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.role.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.materializedView.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,17 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
const features = {
execute: readSQL(__dirname, 'execute.sql'),
};
describe('TrinoSQL Execute Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// execute statements
features.execute.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,17 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
const features = {
explain: readSQL(__dirname, 'explain.sql'),
};
describe('TrinoSQL Explain Statements Syntax Tests', () => {
const parser = new TrinoSQL();
// explain statements
features.explain.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -0,0 +1,5 @@
ALTER MATERIALIZED VIEW people RENAME TO users;
ALTER MATERIALIZED VIEW IF EXISTS people RENAME TO users;
ALTER MATERIALIZED VIEW people SET PROPERTIES x = 'y';
ALTER MATERIALIZED VIEW people SET PROPERTIES foo = 123, bar = 456;
ALTER MATERIALIZED VIEW people SET PROPERTIES x = DEFAULT;

View File

@ -0,0 +1,7 @@
ALTER SCHEMA foo RENAME TO bar;
ALTER SCHEMA foo.bar RENAME TO baz;
ALTER SCHEMA "awesome schema"."awesome table" RENAME TO "even more awesome table";
ALTER SCHEMA web SET AUTHORIZATION alice;
ALTER SCHEMA web SET AUTHORIZATION ROLE alice;
ALTER SCHEMA web SET AUTHORIZATION USER alice;

View File

@ -0,0 +1,15 @@
ALTER TABLE users RENAME TO people;
ALTER TABLE IF EXISTS users RENAME TO people;
ALTER TABLE users ADD COLUMN zip varchar;
ALTER TABLE IF EXISTS users ADD COLUMN IF NOT EXISTS zip varchar;
ALTER TABLE users DROP COLUMN zip;
ALTER TABLE IF EXISTS users DROP COLUMN IF EXISTS zip;
ALTER TABLE users RENAME COLUMN id TO user_id;
ALTER TABLE IF EXISTS users RENAME column IF EXISTS id to user_id;
ALTER TABLE people SET AUTHORIZATION alice;
ALTER TABLE people SET AUTHORIZATION ROLE PUBLIC;
ALTER TABLE people SET PROPERTIES x = 'y';
ALTER TABLE people SET PROPERTIES foo = 123, "foo bar" = 456;
ALTER TABLE people SET PROPERTIES x = DEFAULT;
ALTER TABLE hive.schema.test_table EXECUTE optimize(file_size_threshold => '10MB');

View File

@ -0,0 +1,4 @@
ALTER VIEW people RENAME TO users;
ALTER VIEW people SET AUTHORIZATION alice;
ALTER VIEW people SET AUTHORIZATION USER alice;
ALTER VIEW people SET AUTHORIZATION ROLE alice;

View File

@ -0,0 +1,4 @@
ANALYZE foo;
ANALYZE foo WITH ( "string" = 'bar', "long" = 42, computed = concat('ban', 'ana'), a = ARRAY[ 'v1', 'v2' ] );
EXPLAIN ANALYZE foo;
EXPLAIN ANALYZE ANALYZE foo;

View File

@ -0,0 +1,3 @@
CALL foo();
CALL foo(123, a => 1, b => 'go', 456);
CALL catalog.schema.test();

View File

@ -0,0 +1,7 @@
COMMENT ON TABLE users IS 'master table';
COMMENT ON COLUMN users.name IS 'full name';
SHOW COMMENT ON COLUMN column1;
SHOW COMMENT ON TABLE table1;

View File

@ -0,0 +1,2 @@
COMMIT;
COMMIT WORK;

View File

@ -0,0 +1,5 @@
CREATE MATERIALIZED VIEW a AS SELECT * FROM t;
CREATE OR REPLACE MATERIALIZED VIEW catalog.schema.matview COMMENT 'A simple materialized view' AS SELECT * FROM catalog2.schema2.tab;
CREATE OR REPLACE MATERIALIZED VIEW catalog.schema.matview COMMENT 'A simple materialized view' AS SELECT * FROM catalog2.schema2.tab;
CREATE OR REPLACE MATERIALIZED VIEW catalog.schema.matview COMMENT 'A simple materialized view'WITH (partitioned_by = ARRAY ['dateint']) AS SELECT * FROM catalog2.schema2.tab;
CREATE OR REPLACE MATERIALIZED VIEW catalog.schema.matview COMMENT 'A partitioned materialized view' WITH (partitioned_by = ARRAY ['dateint']) AS WITH a (t, u) AS (SELECT * FROM x), b AS (SELECT * FROM a) TABLE b;

View File

@ -0,0 +1,10 @@
CREATE ROLE role;
CREATE ROLE role1 WITH ADMIN admin;
CREATE ROLE "role" WITH ADMIN "admin";
CREATE ROLE "ro le" WITH ADMIN "ad min";
CREATE ROLE "!@#$%^&*'" WITH ADMIN "ад""мін";
CREATE ROLE role2 WITH ADMIN USER admin1;
CREATE ROLE role2 WITH ADMIN ROLE role1;
CREATE ROLE role2 WITH ADMIN CURRENT_USER;
CREATE ROLE role2 WITH ADMIN CURRENT_ROLE;
CREATE ROLE role WITH ADMIN CURRENT_ROLE IN my_catalog;

View File

@ -0,0 +1,4 @@
CREATE SCHEMA test;
CREATE SCHEMA IF NOT EXISTS test;
CREATE SCHEMA test WITH (a = 'apple', b = 123);
CREATE SCHEMA "some name that contains space";

View File

@ -0,0 +1,2 @@
CREATE TABLE IF NOT EXISTS bar (LIKE like_table);
CREATE TABLE IF NOT EXISTS bar (LIKE like_table INCLUDING PROPERTIES);

View File

@ -0,0 +1,19 @@
CREATE TABLE foo AS SELECT * FROM t;
CREATE TABLE foo(x) AS SELECT a FROM t;
CREATE TABLE foo(x,y) AS SELECT a,b FROM t;
CREATE TABLE IF NOT EXISTS foo AS SELECT * FROM t;
CREATE TABLE IF NOT EXISTS foo(x) AS SELECT a FROM t;
CREATE TABLE IF NOT EXISTS foo(x,y) AS SELECT a,b FROM t;
CREATE TABLE foo AS SELECT * FROM t WITH NO DATA;
CREATE TABLE foo(x) AS SELECT a FROM t WITH NO DATA;
CREATE TABLE foo(x,y) AS SELECT a,b FROM t WITH NO DATA;
CREATE TABLE foo WITH ( string = 'bar', long = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT * FROM t;
CREATE TABLE foo(x) WITH ( string = 'bar', long = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT a FROM t;
CREATE TABLE foo(x,y) WITH ( string = 'bar', long = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT a,b FROM t;
CREATE TABLE foo WITH ( string = 'bar', long = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT * FROM t WITH NO DATA;
CREATE TABLE foo(x) WITH ( string = 'bar', long = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT a FROM t WITH NO DATA;
CREATE TABLE foo(x,y) WITH ( string = 'bar', long = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT a,b FROM t WITH NO DATA;
CREATE TABLE foo COMMENT 'test'WITH ( string = 'bar', long = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT * FROM t WITH NO DATA;
CREATE TABLE foo(x) COMMENT 'test'WITH ( string = 'bar', long = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT a FROM t WITH NO DATA;
CREATE TABLE foo(x,y) COMMENT 'test'WITH ( string = 'bar', long = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT a,b FROM t WITH NO DATA;
CREATE TABLE foo(x,y) COMMENT 'test'WITH ( "string" = 'bar', "long" = 42, computed = 'ban' || 'ana', a = ARRAY[ 'v1', 'v2' ] ) AS SELECT a,b FROM t WITH NO DATA;

View File

@ -0,0 +1,11 @@
CREATE VIEW a AS SELECT * FROM t;
CREATE OR REPLACE VIEW a AS SELECT * FROM t;
CREATE VIEW a SECURITY DEFINER AS SELECT * FROM t;
CREATE VIEW a SECURITY INVOKER AS SELECT * FROM t;
CREATE VIEW a COMMENT 'comment' SECURITY DEFINER AS SELECT * FROM t;
CREATE VIEW a COMMENT '' SECURITY INVOKER AS SELECT * FROM t;
CREATE VIEW a COMMENT 'comment' AS SELECT * FROM t;
CREATE VIEW a COMMENT '' AS SELECT * FROM t;
CREATE VIEW bar.foo AS SELECT * FROM t;
CREATE VIEW "awesome view" AS SELECT * FROM t;
CREATE VIEW "awesome schema"."awesome view" AS SELECT * FROM t;

View File

@ -0,0 +1 @@
DEALLOCATE PREPARE my_query;

View File

@ -0,0 +1,5 @@
DELETE FROM t;
DELETE FROM "awesome table";
DELETE FROM t WHERE a = b;
DELETE FROM lineitem
WHERE orderkey IN (SELECT orderkey FROM orders WHERE priority = 'LOW');

View File

@ -0,0 +1,4 @@
DENY INSERT, DELETE ON t TO u;
DENY UPDATE ON t TO u;
DENY ALL PRIVILEGES ON TABLE t TO USER u;
DENY SELECT ON SCHEMA s TO USER u;

View File

@ -0,0 +1,6 @@
-- DESCRIBE INPUT
DESCRIBE INPUT myquery;
-- DESCRIBE OUTPUT
DESCRIBE OUTPUT myquery;
-- DESCRIBLE table_name
DESCRIBE table_name;

View File

@ -0,0 +1,5 @@
ALTER TABLE foo.t DROP COLUMN c;
ALTER TABLE "t x" DROP COLUMN "c d";
ALTER TABLE IF EXISTS foo.t DROP COLUMN c;
ALTER TABLE foo.t DROP COLUMN IF EXISTS c;
ALTER TABLE IF EXISTS foo.t DROP COLUMN IF EXISTS c;

View File

@ -0,0 +1,6 @@
DROP MATERIALIZED VIEW a;
DROP MATERIALIZED VIEW a.b;
DROP MATERIALIZED VIEW a.b.c;
DROP MATERIALIZED VIEW IF EXISTS a;
DROP MATERIALIZED VIEW IF EXISTS a.b;
DROP MATERIALIZED VIEW IF EXISTS a.b.c;

View File

@ -0,0 +1,4 @@
DROP ROLE role;
DROP ROLE "role";
DROP ROLE "ro le";
DROP ROLE "!@#$%^&*'ад""мін";

View File

@ -0,0 +1,5 @@
DROP SCHEMA test;
DROP SCHEMA test CASCADE;
DROP SCHEMA IF EXISTS test;
DROP SCHEMA IF EXISTS test RESTRICT;
DROP SCHEMA "some schema that contains space";

View File

@ -0,0 +1,8 @@
DROP TABLE a;
DROP TABLE a.b;
DROP TABLE a.b.c;
DROP TABLE a."b/y".c;
DROP TABLE IF EXISTS a;
DROP TABLE IF EXISTS a.b;
DROP TABLE IF EXISTS a.b.c;
DROP TABLE IF EXISTS a."b/y".c;

View File

@ -0,0 +1,6 @@
DROP VIEW a;
DROP VIEW a.b;
DROP VIEW a.b.c;
DROP VIEW IF EXISTS a;
DROP VIEW IF EXISTS a.b;
DROP VIEW IF EXISTS a.b.c;

View File

@ -0,0 +1,11 @@
PREPARE my_select1 FROM
SELECT name FROM nation;
EXECUTE my_select1;
-- execute with using
PREPARE my_select2 FROM
SELECT name FROM nation WHERE regionkey = ? and nationkey < ?;
EXECUTE my_select2 USING 1, 3;

View File

@ -0,0 +1,6 @@
EXPLAIN SELECT * FROM t;
EXPLAIN (TYPE LOGICAL) SELECT * FROM t;
EXPLAIN (TYPE LOGICAL, FORMAT TEXT) SELECT * FROM t;
-- EXPLAIN ANALYZE
EXPLAIN ANALYZE SELECT * FROM t;
EXPLAIN ANALYZE VERBOSE SELECT * FROM t;

View File

@ -0,0 +1,15 @@
GRANT INSERT, DELETE ON t TO u;
GRANT UPDATE ON t TO u;
GRANT SELECT ON t TO ROLE PUBLIC WITH GRANT OPTION;
GRANT ALL PRIVILEGES ON TABLE t TO USER u;
GRANT DELETE ON "t" TO ROLE "public" WITH GRANT OPTION;
GRANT SELECT ON SCHEMA s TO USER u;
-- GRANT role
GRANT role1 TO user1;
GRANT role1, role2, role3 TO user1, USER user2, ROLE role4 WITH ADMIN OPTION;
GRANT role1 TO user1 WITH ADMIN OPTION GRANTED BY admin;
GRANT role1 TO USER user1 WITH ADMIN OPTION GRANTED BY USER admin;
GRANT role1 TO ROLE role2 WITH ADMIN OPTION GRANTED BY ROLE admin;
GRANT role1 TO ROLE role2 GRANTED BY ROLE admin;
GRANT "role1" TO ROLE "role2" GRANTED BY ROLE "admin";
GRANT role1 TO user1 IN my_catalog;

View File

@ -0,0 +1 @@
SELECT * FROM a, b;

View File

@ -0,0 +1,12 @@
INSERT INTO orders
SELECT * FROM new_orders;
INSERT INTO cities VALUES (1, 'San Francisco');
INSERT INTO cities VALUES (2, 'San Jose'), (3, 'Oakland');
INSERT INTO nation (nationkey, name, regionkey, comment)
VALUES (26, 'POLAND', 3, 'no comment');
INSERT INTO nation (nationkey, name, regionkey)
VALUES (26, 'POLAND', 3);

View File

@ -0,0 +1,16 @@
SELECT * FROM orders MATCH_RECOGNIZE(
PARTITION BY custkey
ORDER BY orderdate
MEASURES
A.totalprice AS starting_price,
LAST(B.totalprice) AS bottom_price,
LAST(U.totalprice) AS top_price
ONE ROW PER MATCH
AFTER MATCH SKIP PAST LAST ROW
PATTERN (A B+ C+ D+)
SUBSET U = (C, D)
DEFINE
B AS totalprice < PREV(totalprice),
C AS totalprice > PREV(totalprice) AND totalprice <= A.totalprice,
D AS totalprice > PREV(totalprice)
);

View File

@ -0,0 +1 @@
MERGE INTO inventory AS i USING changes AS c ON i.part = c.part WHEN MATCHED AND c.action = 'mod' THEN UPDATE SET qty = qty + c.qty , ts = CURRENT_TIMESTAMP WHEN MATCHED AND c.action = 'del' THEN DELETE WHEN NOT MATCHED AND c.action = 'new' THEN INSERT (part, qty) VALUES (c.part, c.qty);

View File

@ -0,0 +1,9 @@
PREPARE myquery FROM select * from foo;
PREPARE myquery FROM SELECT ?, ? FROM foo;
PREPARE myquery FROM SELECT * FROM foo LIMIT ?;
PREPARE myquery FROM SELECT ?, ? FROM foo LIMIT ?;
PREPARE myquery FROM SELECT ? FROM foo FETCH FIRST ? ROWS ONLY;
PREPARE myquery FROM SELECT ?, ? FROM foo FETCH NEXT ? ROWS WITH TIES;
PREPARE myquery FROM SELECT ?, ? FROM foo OFFSET ? ROWS;
PREPARE myquery FROM SELECT ? FROM foo OFFSET ? ROWS LIMIT ?;
PREPARE myquery FROM SELECT ? FROM foo OFFSET ? ROWS FETCH FIRST ? ROWS WITH TIES;

View File

@ -0,0 +1,2 @@
REFRESH MATERIALIZED VIEW test;
REFRESH MATERIALIZED VIEW "some name that contains space";

View File

@ -0,0 +1,2 @@
RESET SESSION foo.bar;
RESET SESSION foo;

View File

@ -0,0 +1,6 @@
REVOKE INSERT, DELETE ON t FROM u;
REVOKE UPDATE ON t FROM u;
REVOKE GRANT OPTION FOR SELECT ON t FROM ROLE PUBLIC;
REVOKE ALL PRIVILEGES ON TABLE t FROM USER u;
REVOKE DELETE ON TABLE "t" FROM "u";
REVOKE SELECT ON SCHEMA s FROM USER u;

View File

@ -0,0 +1,7 @@
REVOKE role1 FROM user1;
REVOKE ADMIN OPTION FOR role1, role2, role3 FROM user1, USER user2, ROLE role4;
REVOKE ADMIN OPTION FOR role1 FROM user1 GRANTED BY admin;
REVOKE ADMIN OPTION FOR role1 FROM USER user1 GRANTED BY USER admin;
REVOKE role1 FROM ROLE role2 GRANTED BY ROLE admin;
REVOKE "role1" FROM ROLE "role2" GRANTED BY ROLE "admin";
REVOKE role1 FROM user1 IN my_catalog;

View File

@ -0,0 +1,2 @@
ROLLBACK;
ROLLBACK WORK;

View File

@ -0,0 +1,117 @@
-- DOUBLE IN Query
SELECT 123.456E7 FROM DUAL;
-- GROUP BY
SELECT * FROM table1 GROUP BY a;
SELECT * FROM table1 GROUP BY a, b;
SELECT * FROM table1 GROUP BY ();
-- GROUP BY GROUPING SETS
SELECT * FROM table1 GROUP BY GROUPING SETS (a);
SELECT a, b, GROUPING(a, b) FROM table1 GROUP BY GROUPING SETS ((a), (b));
-- GROUP BY ROLLUP
SELECT * FROM table1 GROUP BY ALL GROUPING SETS ((a, b), (a), ()), CUBE (c), ROLLUP (d);
SELECT * FROM table1 GROUP BY DISTINCT GROUPING SETS ((a, b), (a), ()), CUBE (c), ROLLUP (d);
-- GROUP BY CUBE
SELECT origin_state, destination_state, sum(package_weight)
FROM shipping
GROUP BY CUBE (origin_state, destination_state);
-- GROUP BY Combining multiple grouping expressions
SELECT origin_state, destination_state, origin_zip, sum(package_weight)
FROM shipping
GROUP BY
GROUPING SETS ((origin_state, destination_state)),
ROLLUP (origin_zip);
SELECT origin_state, destination_state, origin_zip, sum(package_weight)
FROM shipping
GROUP BY GROUPING SETS (
(origin_state, destination_state, origin_zip),
(origin_state, destination_state)
);
SELECT origin_state, destination_state, origin_zip, sum(package_weight)
FROM shipping
GROUP BY
GROUPING SETS ((origin_state, destination_state)),
GROUPING SETS ((origin_zip), ());
-- GROUP BY ALL and DISTINCT quantifiers
SELECT origin_state, destination_state, origin_zip, sum(package_weight)
FROM shipping
GROUP BY ALL
CUBE (origin_state, destination_state),
ROLLUP (origin_state, origin_zip);
SELECT origin_state, destination_state, origin_zip, sum(package_weight)
FROM shipping
GROUP BY GROUPING SETS (
(origin_state, destination_state, origin_zip),
(origin_state, origin_zip),
(origin_state, destination_state, origin_zip),
(origin_state, origin_zip),
(origin_state, destination_state),
(origin_state),
(origin_state, destination_state),
(origin_state),
(origin_state, destination_state),
(origin_state),
(destination_state),
()
);
SELECT origin_state, destination_state, origin_zip, sum(package_weight)
FROM shipping
GROUP BY DISTINCT
CUBE (origin_state, destination_state),
ROLLUP (origin_state, origin_zip);
SELECT origin_state, destination_state, origin_zip, sum(package_weight)
FROM shipping
GROUP BY GROUPING SETS (
(origin_state, destination_state, origin_zip),
(origin_state, origin_zip),
(origin_state, destination_state),
(origin_state),
(destination_state),
()
);
-- GROUP BY GROUPING operation
SELECT origin_state, origin_zip, destination_state, sum(package_weight),
grouping(origin_state, origin_zip, destination_state)
FROM shipping
GROUP BY GROUPING SETS (
(origin_state),
(origin_state, origin_zip),
(destination_state)
);
-- ORDER BY
SELECT * FROM table1 ORDER BY a;
-- Select expressions
SELECT (CAST(ROW(1, true) AS ROW(field1 bigint, field2 boolean))).* AS (alias1, alias2);
SELECT (CAST(ROW(1, true) AS ROW(field1 bigint, field2 boolean))).*;
SELECT (ROW(1, true)).*;
-- LIMIT
SELECT * FROM table1 LIMIT 2;
SELECT * FROM table1 LIMIT ALL;
SELECT * FROM (VALUES (1, '1'), (2, '2')) LIMIT ALL;
-- HAVING
SELECT count(*), mktsegment, nationkey,
CAST(sum(acctbal) AS bigint) AS totalbal
FROM customer
GROUP BY mktsegment, nationkey
HAVING sum(acctbal) > 5700000
ORDER BY totalbal DESC;
-- WINDOW
SELECT orderkey, clerk, totalprice,
rank() OVER w AS rnk
FROM orders
WINDOW w AS (PARTITION BY clerk ORDER BY totalprice DESC)
ORDER BY count() OVER w, clerk, rnk
-- AGGREGATION FILTER/ ORDER BY
SELECT SUM(x) FILTER (WHERE x > 4);
SELECT array_agg(x ORDER BY t.y) FROM t;
-- INTERSECT
SELECT 123 INTERSECT DISTINCT SELECT 123 INTERSECT ALL SELECT 123;
-- substring_built_in_function
SELECT substring('string' FROM 2);
SELECT substring('string' FROM 2 FOR 3);

View File

@ -0,0 +1,56 @@
SELECT * FROM UNNEST(ARRAY[1,2]) AS t(number);
SELECT * FROM UNNEST(
map_from_entries(
ARRAY[
('SQL',1974),
('Java', 1995)
]
)
) AS t(language, first_appeared_year);
SELECT *
FROM UNNEST(
ARRAY[
ROW('Java', 1995),
ROW('SQL' , 1974)],
ARRAY[
ROW(false),
ROW(true)]
) as t(language,first_appeared_year,declarative);
SELECT a, b, rownumber
FROM UNNEST (
ARRAY[2, 5],
ARRAY[7, 8, 9]
) WITH ORDINALITY AS t(a, b, rownumber);
SELECT * FROM UNNEST (ARRAY[]) AS t(value);
SELECT * FROM UNNEST (CAST(null AS ARRAY(integer))) AS t(number);
SELECT student, score
FROM (
VALUES
('John', ARRAY[7, 10, 9]),
('Mary', ARRAY[4, 8, 9])
) AS tests (student, scores)
CROSS JOIN UNNEST(scores) AS t(score);
SELECT numbers, animals, n, a
FROM (
VALUES
(ARRAY[2, 5], ARRAY['dog', 'cat', 'bird']),
(ARRAY[7, 8, 9], ARRAY['cow', 'pig'])
) AS x (numbers, animals)
CROSS JOIN UNNEST(numbers, animals) AS t (n, a);
SELECT runner, checkpoint
FROM (
VALUES
('Joe', ARRAY[10, 20, 30, 42]),
('Roger', ARRAY[10]),
('Dave', ARRAY[]),
('Levi', NULL)
) AS marathon (runner, checkpoints)
LEFT JOIN UNNEST(checkpoints) AS t(checkpoint) ON TRUE;

View File

@ -0,0 +1,28 @@
SELECT a, b
FROM (
SELECT a, MAX(b) AS b FROM t GROUP BY a
) AS x;
WITH x AS (SELECT a, MAX(b) AS b FROM t GROUP BY a)
SELECT a, b FROM x;
WITH
t1 AS (SELECT a, MAX(b) AS b FROM x GROUP BY a),
t2 AS (SELECT a, AVG(d) AS d FROM y GROUP BY a)
SELECT t1.*, t2.*
FROM t1
JOIN t2 ON t1.a = t2.a;
WITH
x AS (SELECT a FROM t),
y AS (SELECT a AS b FROM x),
z AS (SELECT b AS c FROM y)
SELECT c FROM z;
WITH RECURSIVE t(n) AS (
VALUES (1)
UNION ALL
SELECT n + 1 FROM t WHERE n < 4
)
SELECT sum(n) FROM t;

View File

@ -0,0 +1,4 @@
SELECT EXISTS(SELECT 1);
SELECT EXISTS(SELECT 1) = EXISTS(SELECT 2);
SELECT NOT EXISTS(SELECT 1) = EXISTS(SELECT 2);
SELECT (NOT EXISTS(SELECT 1)) = EXISTS(SELECT 2);

View File

@ -0,0 +1,6 @@
SELECT * FROM table1 FETCH FIRST 2 ROWS ONLY;
SELECT * FROM table1 FETCH NEXT ROW ONLY;
SELECT * FROM (VALUES (1, '1'), (2, '2')) FETCH FIRST ROW ONLY;
SELECT * FROM (VALUES (1, '1'), (2, '2')) FETCH FIRST ROW WITH TIES;
SELECT * FROM table1 FETCH FIRST 2 ROWS WITH TIES;
SELECT * FROM table1 FETCH NEXT ROW WITH TIES;

View File

@ -0,0 +1,23 @@
SELECT * FROM users CROSS JOIN UNNEST(friends) WITH ordinality;
-- LATERAL
SELECT name, x, y
FROM nation
CROSS JOIN LATERAL (SELECT name || ' :-' AS x)
CROSS JOIN LATERAL (SELECT x || ')' AS y);
-- Qualifying column names#
SELECT nation.name, region.name
FROM nation
CROSS JOIN region;
SELECT n.name, r.name
FROM nation AS n
CROSS JOIN region AS r;
SELECT n.name, r.name
FROM nation n
CROSS JOIN region r;
SELECT * FROM a CROSS JOIN b LEFT JOIN c ON true;
SELECT * FROM a CROSS JOIN b NATURAL JOIN c CROSS JOIN d NATURAL JOIN e;

View File

@ -0,0 +1,4 @@
SELECT * FROM table1 OFFSET 2 ROWS;
SELECT * FROM table1 OFFSET 2;
SELECT * FROM (VALUES (1, '1'), (2, '2')) OFFSET 2 ROWS;
SELECT * FROM (VALUES (1, '1'), (2, '2')) OFFSET 2;

View File

@ -0,0 +1,6 @@
SELECT col1.f1, col2, col3.f1.f2.f3 FROM table1;
SELECT col1.f1[0], col2, col3[2].f2.f3, col4[4] FROM table1;
SELECT CAST(ROW(11, 12) AS ROW(COL0 INTEGER, COL1 INTEGER)).col0;
-- ALL COLUMNS
SELECT ROW (1, 'a', true).*;
SELECT ROW (1, 'a', true).* AS (f1, f2, f3);

View File

@ -0,0 +1,16 @@
-- UNION
SELECT 13
UNION
SELECT 42;
SELECT 13
UNION
SELECT * FROM (VALUES 42, 13);
-- INTERSECT
SELECT * FROM (VALUES 13, 42)
INTERSECT
SELECT 13;
--EXCEPT
SELECT * FROM (VALUES 13, 42)
EXCEPT
SELECT 13;

View File

@ -0,0 +1,20 @@
-- EXISTS
SELECT name
FROM nation
WHERE EXISTS (
SELECT *
FROM region
WHERE region.regionkey = nation.regionkey
);
-- IN
SELECT name
FROM nation
WHERE regionkey IN (
SELECT regionkey
FROM region
WHERE name = 'AMERICA' OR name = 'AFRICA'
);
-- Scalar subquery
SELECT name
FROM nation
WHERE regionkey = (SELECT max(regionkey) FROM region);

View File

@ -0,0 +1,10 @@
SELECT *
FROM users TABLESAMPLE BERNOULLI (50);
SELECT *
FROM users TABLESAMPLE SYSTEM (75);
SELECT o.*, i.*
FROM orders o TABLESAMPLE SYSTEM (10)
JOIN lineitem i TABLESAMPLE BERNOULLI (40)
ON o.orderkey = i.orderkey;

View File

@ -0,0 +1,2 @@
SELECT 123 UNION DISTINCT
SELECT 123 UNION ALL SELECT 123;

View File

@ -0,0 +1,2 @@
SET PATH iLikeToEat.apples, andBananas;
SET PATH "schemas,with"."grammar.in", "their!names";

View File

@ -0,0 +1,5 @@
SET ROLE ALL;
SET ROLE NONE;
SET ROLE role;
SET ROLE "role";
SET ROLE role IN my_catalog;

View File

@ -0,0 +1,4 @@
SET SESSION foo = 'bar';
SET SESSION foo.bar = 'baz';
SET SESSION foo.bar.boo = 'baz';
SET SESSION foo.bar = 'ban' || 'ana';

View File

@ -0,0 +1,10 @@
SET TIME ZONE LOCAL;
SET TIME ZONE '-08:00';
SET TIME ZONE INTERVAL '10' HOUR;
SET TIME ZONE INTERVAL -'08:00' HOUR TO MINUTE;
SET TIME ZONE 'America/Los_Angeles';
SET TIME ZONE concat_ws('/', 'America', 'Los_Angeles');

View File

@ -0,0 +1,3 @@
SHOW CATALOGS;
SHOW CATALOGS LIKE '%';
SHOW CATALOGS LIKE '%$_%' ESCAPE '$';

View File

@ -0,0 +1,5 @@
SHOW COLUMNS FROM a;
SHOW COLUMNS FROM a.b;
SHOW COLUMNS FROM "awesome table";
SHOW COLUMNS FROM "awesome schema"."awesome table";
SHOW COLUMNS FROM a.b LIKE '%$_%' ESCAPE '$';

View File

@ -0,0 +1,16 @@
SHOW CREATE TABLE sf1.orders;
SHOW CREATE SCHEMA IF NOT EXISTS traffic;
SHOW CREATE VIEW test AS
SELECT orderkey, orderstatus, totalprice / 2 AS half
FROM orders;
SHOW CREATE MATERIALIZED VIEW cancelled_orders
AS
SELECT orderkey, totalprice
FROM orders
WHERE orderstatus = 3;

View File

@ -0,0 +1,3 @@
SHOW FUNCTIONS;
SHOW FUNCTIONS LIKE '%';
SHOW FUNCTIONS LIKE '%' ESCAPE '$';

View File

@ -0,0 +1,3 @@
SHOW GRANTS ON TABLE t;
SHOW GRANTS ON t;
SHOW GRANTS;

View File

@ -0,0 +1,2 @@
SHOW ROLE GRANTS;
SHOW ROLE GRANTS FROM catalog;

View File

@ -0,0 +1,6 @@
SHOW ROLES;
SHOW ROLES FROM foo;
SHOW ROLES IN foo;
SHOW CURRENT ROLES;
SHOW CURRENT ROLES FROM foo;
SHOW CURRENT ROLES IN foo;

View File

@ -0,0 +1,4 @@
SHOW SCHEMAS;
SHOW SCHEMAS FROM foo;
SHOW SCHEMAS IN foo LIKE '%';
SHOW SCHEMAS IN foo LIKE '%$_%' ESCAPE '$';

View File

@ -0,0 +1,3 @@
SHOW SESSION;
SHOW SESSION LIKE '%';
SHOW SESSION LIKE '%' ESCAPE '$';

View File

@ -0,0 +1 @@
SHOW STATS FOR a;

View File

@ -0,0 +1,3 @@
SHOW STATS FOR (SELECT * FROM a);
SHOW STATS FOR (SELECT * FROM a WHERE field > 0);
SHOW STATS FOR (SELECT * FROM a WHERE field > 0 or field < 0);

View File

@ -0,0 +1,4 @@
SHOW TABLES;
SHOW TABLES FROM a;
SHOW TABLES FROM "awesome schema";
SHOW TABLES IN a LIKE '%$_%' ESCAPE '$';

View File

@ -0,0 +1,10 @@
START TRANSACTION;
START TRANSACTION ISOLATION LEVEL READ UNCOMMITTED;
START TRANSACTION ISOLATION LEVEL READ COMMITTED;
START TRANSACTION ISOLATION LEVEL REPEATABLE READ;
START TRANSACTION ISOLATION LEVEL SERIALIZABLE;
START TRANSACTION READ ONLY;
START TRANSACTION READ WRITE;
START TRANSACTION ISOLATION LEVEL READ COMMITTED, READ ONLY;
START TRANSACTION READ ONLY, ISOLATION LEVEL READ COMMITTED;
START TRANSACTION READ WRITE, ISOLATION LEVEL SERIALIZABLE;

View File

@ -0,0 +1,2 @@
SELECT substring('string' FROM 2);
SELECT substring('string' FROM 2 FOR 3);

View File

@ -0,0 +1,3 @@
TRUNCATE TABLE a;
TRUNCATE TABLE a.b;
TRUNCATE TABLE a.b.c;

View File

@ -0,0 +1,10 @@
UPDATE foo_tablen SET bar = 23, baz = 3.1415E0, bletch = 'barf' WHERE (nothing = 'fun');
UPDATE new_hires SET manager = (
SELECT
e.name
FROM
employees e
WHERE
e.employee_id = new_hires.manager_id
);

View File

@ -0,0 +1,2 @@
USE hive.finance;
USE information_schema;

View File

@ -0,0 +1,2 @@
VALUES ('a', 1, 2.2e0), ('b', 2, 3.3e0);
SELECT * FROM (VALUES ('a', 1, 2.2e0), ('b', 2, 3.3e0));

View File

@ -0,0 +1,14 @@
SELECT cust_key, value OVER w, label OVER w
FROM orders
WINDOW w AS (
PARTITION BY cust_key
ORDER BY order_date
MEASURES
RUNNING LAST(total_price) AS value,
CLASSIFIER() AS label
ROWS BETWEEN CURRENT ROW AND UNBOUNDED FOLLOWING
PATTERN (A B+ C+)
DEFINE
B AS B.value < PREV (B.value),
C AS C.value > PREV (C.value)
);

Some files were not shown because too many files have changed in this diff Show More