refactor: improve keywords definations (#100)

* feat: improve keywords definations and add prefix named KW_

* test: capitalize keywords in alter sql test cases

* test: capitalize keywords in create sql test cases

* feat: merge interval rule and timeIntervalExpression rule in flinksql grammar

* test: put keywords in quotation marks around insert sql test cases

* test: put keywords in quotation marks around select sql test cases

* feat: improve uid rule

* test: rename sum to _sum because it is a reserved keyword

* docs: improve comments in flinkSqlLexer
This commit is contained in:
Hayden 2023-05-19 14:28:25 +08:00 committed by GitHub
parent a026ae0592
commit 40c911597b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
19 changed files with 12652 additions and 11856 deletions

View File

@ -9,320 +9,379 @@ LINE_COMMENT: (
| '--' ('\r'? '\n' | EOF)
) -> channel(HIDDEN);
// List of keywords that are not reserved.
// Please keep the keyword in alphabetical order if new keyword is added.
// Please add it to the nonReservedKeywords rule if new keyword is added.
// Reference link https://github.com/apache/flink/blob/release-1.16/flink-table/flink-sql-parser/src/main/codegen/data/Parser.tdd
// Common Keywords
KW_ADD: 'ADD';
KW_ADMIN: 'ADMIN';
KW_AFTER: 'AFTER';
KW_ANALYZE: 'ANALYZE';
KW_ASC: 'ASC';
KW_BEFORE: 'BEFORE';
KW_BYTES: 'BYTES';
KW_CASCADE: 'CASCADE';
KW_CATALOG: 'CATALOG';
KW_CATALOGS: 'CATALOGS';
KW_CENTURY: 'CENTURY';
KW_CHAIN: 'CHAIN';
KW_CHANGELOG_MODE: 'CHANGELOG_MODE';
KW_CHARACTERS: 'CHARACTERS';
KW_COMMENT: 'COMMENT';
KW_COMPACT: 'COMPACT';
KW_COLUMNS: 'COLUMNS';
KW_CONSTRAINTS: 'CONSTRAINTS';
KW_CONSTRUCTOR: 'CONSTRUCTOR';
KW_CUMULATE: 'CUMULATE';
KW_DATA: 'DATA';
KW_DATABASE: 'DATABASE';
KW_DATABASES: 'DATABASES';
KW_DAYS: 'DAYS';
KW_DECADE: 'DECADE';
KW_DEFINED: 'DEFINED';
KW_DESC: 'DESC';
KW_DESCRIPTOR: 'DESCRIPTOR';
KW_DIV: 'DIV';
KW_ENCODING: 'ENCODING';
KW_ENFORCED: 'ENFORCED';
KW_ENGINE: 'ENGINE';
KW_ERROR: 'ERROR';
KW_ESTIMATED_COST: 'ESTIMATED_COST';
KW_EXCEPTION: 'EXCEPTION';
KW_EXCLUDE: 'EXCLUDE';
KW_EXCLUDING: 'EXCLUDING';
KW_EXTENDED: 'EXTENDED';
KW_FILE: 'FILE';
KW_FINAL: 'FINAL';
KW_FIRST: 'FIRST';
KW_FOLLOWING: 'FOLLOWING';
KW_FORMAT: 'FORMAT';
KW_FORTRAN: 'FORTRAN';
KW_FOUND: 'FOUND';
KW_FRAC_SECOND: 'FRAC_SECOND';
KW_FUNCTIONS: 'FUNCTIONS';
KW_GENERAL: 'GENERAL';
KW_GENERATED: 'GENERATED';
KW_GO: 'GO';
KW_GOTO: 'GOTO';
KW_GRANTED: 'GRANTED';
KW_HOP: 'HOP';
KW_HOURS: 'HOURS';
KW_IF: 'IF';
KW_IGNORE: 'IGNORE';
KW_INCREMENT: 'INCREMENT';
KW_INPUT: 'INPUT';
KW_INVOKER: 'INVOKER';
KW_JAR: 'JAR';
KW_JARS: 'JARS';
KW_JAVA: 'JAVA';
KW_JSON: 'JSON';
KW_JSON_EXECUTION_PLAN: 'JSON_EXECUTION_PLAN';
KW_KEY: 'KEY';
KW_KEY_MEMBER: 'KEY_MEMBER';
KW_KEY_TYPE: 'KEY_TYPE';
KW_LABEL: 'LABEL';
KW_LAST: 'LAST';
KW_LENGTH: 'LENGTH';
KW_LEVEL: 'LEVEL';
KW_LOAD: 'LOAD';
KW_MAP: 'MAP';
KW_MICROSECOND: 'MICROSECOND';
KW_MILLENNIUM: 'MILLENNIUM';
KW_MILLISECOND: 'MILLISECOND';
KW_MINUTES: 'MINUTES';
KW_MINVALUE: 'MINVALUE';
KW_MODIFY: 'MODIFY';
KW_MODULES: 'MODULES';
KW_MONTHS: 'MONTHS';
KW_NANOSECOND: 'NANOSECOND';
KW_NULLS: 'NULLS';
KW_NUMBER: 'NUMBER';
KW_OPTION: 'OPTION';
KW_OPTIONS: 'OPTIONS';
KW_ORDERING: 'ORDERING';
KW_OUTPUT: 'OUTPUT';
KW_OVERWRITE: 'OVERWRITE';
KW_OVERWRITING: 'OVERWRITING';
KW_PARTITIONED: 'PARTITIONED';
KW_PARTITIONS: 'PARTITIONS';
KW_PASSING: 'PASSING';
KW_PAST: 'PAST';
KW_PATH: 'PATH';
KW_PLACING: 'PLACING';
KW_PLAN: 'PLAN';
KW_PRECEDING: 'PRECEDING';
KW_PRESERVE: 'PRESERVE';
KW_PRIOR: 'PRIOR';
KW_PRIVILEGES: 'PRIVILEGES';
KW_PUBLIC: 'PUBLIC';
KW_PYTHON: 'PYTHON';
KW_PYTHON_FILES: 'PYTHON_FILES';
KW_PYTHON_REQUIREMENTS: 'PYTHON_REQUIREMENTS';
KW_PYTHON_DEPENDENCIES: 'PYTHON_DEPENDENCIES';
KW_PYTHON_JAR: 'PYTHON_JAR';
KW_PYTHON_ARCHIVES: 'PYTHON_ARCHIVES';
KW_PYTHON_PARAMETER: 'PYTHON_PARAMETER';
KW_QUARTER: 'QUARTER';
KW_RAW: 'RAW';
KW_READ: 'READ';
KW_RELATIVE: 'RELATIVE';
KW_REMOVE: 'REMOVE';
KW_RENAME: 'RENAME';
KW_REPLACE: 'REPLACE';
KW_RESPECT: 'RESPECT';
KW_RESTART: 'RESTART';
KW_RESTRICT: 'RESTRICT';
KW_ROLE: 'ROLE';
KW_ROW_COUNT: 'ROW_COUNT';
KW_SCALA: 'SCALA';
KW_SCALAR: 'SCALAR';
KW_SCALE: 'SCALE';
KW_SCHEMA: 'SCHEMA';
KW_SECONDS: 'SECONDS';
KW_SECTION: 'SECTION';
KW_SECURITY: 'SECURITY';
KW_SELF: 'SELF';
KW_SERVER: 'SERVER';
KW_SERVER_NAME: 'SERVER_NAME';
KW_SESSION: 'SESSION';
KW_SETS: 'SETS';
KW_SIMPLE: 'SIMPLE';
KW_SIZE: 'SIZE';
KW_SLIDE: 'SLIDE';
KW_SOURCE: 'SOURCE';
KW_SPACE: 'SPACE';
KW_STATE: 'STATE';
KW_STATEMENT: 'STATEMENT';
KW_STEP: 'STEP';
KW_STRING: 'STRING';
KW_STRUCTURE: 'STRUCTURE';
KW_STYLE: 'STYLE';
KW_TABLES: 'TABLES';
KW_TEMPORARY: 'TEMPORARY';
KW_TIMECOL: 'TIMECOL';
KW_TIMESTAMP_LTZ: 'TIMESTAMP_LTZ';
KW_TIMESTAMPADD: 'TIMESTAMPADD';
KW_TIMESTAMPDIFF: 'TIMESTAMPDIFF';
KW_TRANSFORM: 'TRANSFORM';
KW_TUMBLE: 'TUMBLE';
KW_TYPE: 'TYPE';
KW_UNDER: 'UNDER';
KW_UNLOAD: 'UNLOAD';
KW_USAGE: 'USAGE';
KW_USE: 'USE';
KW_UTF16: 'UTF16';
KW_UTF32: 'UTF32';
KW_UTF8: 'UTF8';
KW_VERSION: 'VERSION';
KW_VIEW: 'VIEW';
KW_VIEWS: 'VIEWS';
KW_VIRTUAL: 'VIRTUAL';
KW_WATERMARK: 'WATERMARK';
KW_WATERMARKS: 'WATERMARKS';
KW_WEEK: 'WEEK';
KW_WORK: 'WORK';
KW_WRAPPER: 'WRAPPER';
KW_YEARS: 'YEARS';
KW_ZONE: 'ZONE';
SELECT: 'SELECT';
FROM: 'FROM';
ADD: 'ADD';
AS: 'AS';
ALL: 'ALL';
ANY: 'ANY';
DISTINCT: 'DISTINCT';
WHERE: 'WHERE';
GROUP: 'GROUP';
BY: 'BY';
GROUPING: 'GROUPING';
SETS: 'SETS';
CUBE: 'CUBE';
ROLLUP: 'ROLLUP';
ORDER: 'ORDER';
HAVING: 'HAVING';
LIMIT: 'LIMIT';
AT: 'AT';
OR: 'OR';
AND: 'AND';
IN: 'IN';
NOT: 'NOT';
NO: 'NO';
EXISTS: 'EXISTS';
BETWEEN: 'BETWEEN';
LIKE: 'LIKE';
RLIKE: 'RLIKE';
IS: 'IS';
TRUE: 'TRUE';
FALSE: 'FALSE';
NULLS: 'NULLS';
ASC: 'ASC';
DESC: 'DESC';
FOR: 'FOR';
INTERVAL: 'INTERVAL';
CASE: 'CASE';
WHEN: 'WHEN';
THEN: 'THEN';
ELSE: 'ELSE';
BEGIN: 'BEGIN';
END: 'END';
JOIN: 'JOIN';
CROSS: 'CROSS';
OUTER: 'OUTER';
INNER: 'INNER';
LEFT: 'LEFT';
SEMI: 'SEMI';
RIGHT: 'RIGHT';
FULL: 'FULL';
USER: 'USER';
NATURAL: 'NATURAL';
ON: 'ON';
PIVOT: 'PIVOT';
LATERAL: 'LATERAL';
WINDOW: 'WINDOW';
OVER: 'OVER';
PARTITION: 'PARTITION';
RANGE: 'RANGE';
ROWS: 'ROWS';
UNBOUNDED: 'UNBOUNDED';
PRECEDING: 'PRECEDING';
FOLLOWING: 'FOLLOWING';
CURRENT: 'CURRENT';
FIRST: 'FIRST';
AFTER: 'AFTER';
LAST: 'LAST';
WITH: 'WITH';
WITHOUT: 'WITHOUT';
VALUES: 'VALUES';
CREATE: 'CREATE';
TABLE: 'TABLE';
DIRECTORY: 'DIRECTORY';
VIEW: 'VIEW';
REPLACE: 'REPLACE';
EXECUTE: 'EXECUTE';
STATEMENT: 'STATEMENT';
INSERT: 'INSERT';
DELETE: 'DELETE';
REMOVE: 'REMOVE';
INTO: 'INTO';
DESCRIBE: 'DESCRIBE';
EXPLAIN: 'EXPLAIN';
PLAN: 'PLAN';
CHANGELOG_MODE: 'CHANGELOG_MODE';
JSON_EXECUTION_PLAN: 'JSON_EXECUTION_PLAN';
ESTIMATED_COST: 'ESTIMATED_COST';
FORMAT: 'FORMAT';
LOGICAL: 'LOGICAL';
CODEGEN: 'CODEGEN';
COST: 'COST';
CAST: 'CAST';
SHOW: 'SHOW';
TABLES: 'TABLES';
COLUMNS: 'COLUMNS';
COLUMN: 'COLUMN';
USE: 'USE';
MODULE: 'MODULE';
MODULES: 'MODULES';
PARTITIONS: 'PARTITIONS';
FUNCTIONS: 'FUNCTIONS';
DROP: 'DROP';
UNION: 'UNION';
EXCEPT: 'EXCEPT';
SETMINUS: 'SETMINUS';
INTERSECT: 'INTERSECT';
TO: 'TO';
TABLESAMPLE: 'TABLESAMPLE';
STRATIFY: 'STRATIFY';
ALTER: 'ALTER';
RENAME: 'RENAME';
STRUCT: 'STRUCT';
COMMENT: 'COMMENT';
SET: 'SET';
RESET: 'RESET';
DATA: 'DATA';
START: 'START';
TRANSACTION: 'TRANSACTION';
COMMIT: 'COMMIT';
ROLLBACK: 'ROLLBACK';
MACRO: 'MACRO';
IGNORE: 'IGNORE';
BOTH: 'BOTH';
LEADING: 'LEADING';
TRAILING: 'TRAILING';
IF: 'IF';
POSITION: 'POSITION';
EXTRACT: 'EXTRACT';
MINUS: 'MINUS';
DIV: 'DIV';
PERCENTLIT: 'PERCENTLIT';
BUCKET: 'BUCKET';
OUT: 'OUT';
OF: 'OF';
SORT: 'SORT';
CLUSTER: 'CLUSTER';
DISTRIBUTE: 'DISTRIBUTE';
OVERWRITE: 'OVERWRITE';
TRANSFORM: 'TRANSFORM';
REDUCE: 'REDUCE';
USING: 'USING';
SERDE: 'SERDE';
SERDEPROPERTIES: 'SERDEPROPERTIES';
RECORDREADER: 'RECORDREADER';
RECORDWRITER: 'RECORDWRITER';
DELIMITED: 'DELIMITED';
FIELDS: 'FIELDS';
TERMINATED: 'TERMINATED';
COLLECTION: 'COLLECTION';
ITEMS: 'ITEMS';
KEYS: 'KEYS';
ESCAPED: 'ESCAPED';
LINES: 'LINES';
SEPARATED: 'SEPARATED';
FUNCTION: 'FUNCTION';
EXTENDED: 'EXTENDED';
REFRESH: 'REFRESH';
CLEAR: 'CLEAR';
CACHE: 'CACHE';
UNCACHE: 'UNCACHE';
LAZY: 'LAZY';
FORMATTED: 'FORMATTED';
GLOBAL: 'GLOBAL';
TEMPORARY: 'TEMPORARY';
OPTIONS: 'OPTIONS';
UNSET: 'UNSET';
TBLPROPERTIES: 'TBLPROPERTIES';
DBPROPERTIES: 'DBPROPERTIES';
BUCKETS: 'BUCKETS';
SKEWED: 'SKEWED';
STORED: 'STORED';
DIRECTORIES: 'DIRECTORIES';
LOCATION: 'LOCATION';
EXCHANGE: 'EXCHANGE';
ARCHIVE: 'ARCHIVE';
UNARCHIVE: 'UNARCHIVE';
FILEFORMAT: 'FILEFORMAT';
TOUCH: 'TOUCH';
COMPACT: 'COMPACT';
CONCATENATE: 'CONCATENATE';
CHANGE: 'CHANGE';
CASCADE: 'CASCADE';
CONSTRAINT: 'CONSTRAINT';
RESTRICT: 'RESTRICT';
CLUSTERED: 'CLUSTERED';
SORTED: 'SORTED';
PURGE: 'PURGE';
INPUTFORMAT: 'INPUTFORMAT';
OUTPUTFORMAT: 'OUTPUTFORMAT';
DATABASE: 'DATABASE';
DATABASES: 'DATABASES';
DFS: 'DFS';
TRUNCATE: 'TRUNCATE';
ANALYZE: 'ANALYZE';
COMPUTE: 'COMPUTE';
LIST: 'LIST';
STATISTICS: 'STATISTICS';
PARTITIONED: 'PARTITIONED';
EXTERNAL: 'EXTERNAL';
DEFINED: 'DEFINED';
REVOKE: 'REVOKE';
GRANT: 'GRANT';
LOCK: 'LOCK';
UNLOCK: 'UNLOCK';
MSCK: 'MSCK';
REPAIR: 'REPAIR';
RECOVER: 'RECOVER';
EXPORT: 'EXPORT';
IMPORT: 'IMPORT';
LOAD: 'LOAD';
UNLOAD: 'UNLOAD';
ROLE: 'ROLE';
ROLES: 'ROLES';
COMPACTIONS: 'COMPACTIONS';
PRINCIPALS: 'PRINCIPALS';
TRANSACTIONS: 'TRANSACTIONS';
INDEX: 'INDEX';
INDEXES: 'INDEXES';
LOCKS: 'LOCKS';
OPTION: 'OPTION';
ANTI: 'ANTI';
LOCAL: 'LOCAL';
INPATH: 'INPATH';
WATERMARK: 'WATERMARK';
UNNEST: 'UNNEST';
MATCH: 'MATCH';
NEXT: 'NEXT';
WITHIN: 'WITHIN';
WS: 'WS';
SYSTEM: 'SYSTEM';
INCLUDING: 'INCLUDING';
EXCLUDING: 'EXCLUDING';
OVERWRITING: 'OVERWRITING';
CONSTRAINTS: 'CONSTRAINTS';
GENERATED: 'GENERATED';
WATERMARKS: 'WATERMARKS';
CATALOG: 'CATALOG';
LANGUAGE: 'LANGUAGE';
JAVA: 'JAVA';
SCALA: 'SCALA';
PYTHON: 'PYTHON';
JAR: 'JAR';
FILE: 'FILE';
PYTHON_FILES: 'PYTHON_FILES';
PYTHON_REQUIREMENTS: 'PYTHON_REQUIREMENTS';
PYTHON_DEPENDENCIES: 'PYTHON_DEPENDENCIES';
PYTHON_JAR: 'PYTHON_JAR';
PYTHON_ARCHIVES: 'PYTHON_ARCHIVES';
PYTHON_PARAMETER: 'PYTHON_PARAMETER';
ENGINE: 'ENGINE';
CATALOGS: 'CATALOGS';
VIEWS: 'VIEWS';
JARS: 'JARS';
PRIMARY: 'PRIMARY';
UNIQUE: 'UNIQUE';
KEY: 'KEY';
PERIOD: 'PERIOD';
SYSTEM_TIME: 'SYSTEM_TIME';
ENFORCED: 'ENFORCED';
METADATA: 'METADATA';
VIRTUAL: 'VIRTUAL';
ZONE: 'ZONE';
TUMBLE: 'TUMBLE';
HOP: 'HOP';
CUMULATE: 'CUMULATE';
DESCRIPTOR: 'DESCRIPTOR';
TIMECOL: 'TIMECOL';
SIZE: 'SIZE';
OFFSET: 'OFFSET';
STEP: 'STEP';
SLIDE: 'SLIDE';
SESSION: 'SESSION';
MATCH_RECOGNIZE: 'MATCH_RECOGNIZE';
MEASURES: 'MEASURES';
PATTERN: 'PATTERN';
ONE: 'ONE';
PER: 'PER';
KW_SKIP: 'SKIP';
PAST: 'PAST';
DEFINE: 'DEFINE';
// DATA TYPE Keywords
STRING: 'STRING';
ARRAY: 'ARRAY';
MAP: 'MAP';
CHAR: 'CHAR';
VARCHAR: 'VARCHAR';
BINARY: 'BINARY';
VARBINARY: 'VARBINARY';
BYTES: 'BYTES';
DECIMAL: 'DECIMAL';
DEC: 'DEC';
NUMERIC: 'NUMERIC';
TINYINT: 'TINYINT';
SMALLINT: 'SMALLINT';
INT: 'INT';
INTEGER: 'INTEGER';
BIGINT: 'BIGINT';
FLOAT: 'FLOAT';
DOUBLE: 'DOUBLE';
DATE: 'DATE';
TIME: 'TIME';
TIMESTAMP: 'TIMESTAMP';
TIMESTAMP_LTZ: 'TIMESTAMP_LTZ';
MULTISET: 'MULTISET';
BOOLEAN: 'BOOLEAN';
RAW: 'RAW';
ROW: 'ROW';
NULL: 'NULL';
DATETIME: 'DATETIME'; // 数栈自定义类型
// Reserved Keywords
// Please keep the keyword in alphabetical order if new keyword is added.
// Please add it to the reservedKeywords rule if new keyword is added.
// Reference link https://calcite.apache.org/docs/reference.html
KW_ABS: 'ABS';
KW_ALL: 'ALL';
ALLOW: 'ALLOW';
KW_ALTER : 'ALTER';
KW_AND: 'AND';
KW_ANY: 'ANY';
KW_ARE: 'ARE';
KW_ARRAY: 'ARRAY';
KW_AS: 'AS';
KW_AT: 'AT';
KW_AVG: 'AVG';
KW_BEGIN: 'BEGIN';
KW_BETWEEN: 'BETWEEN';
KW_BIGINT: 'BIGINT';
KW_BINARY: 'BINARY';
KW_BIT: 'BIT';
KW_BLOB: 'BLOB';
KW_BOOLEAN: 'BOOLEAN';
KW_BOTH: 'BOTH';
KW_BY: 'BY';
KW_CALL: 'CALL';
KW_CALLED: 'CALLED';
KW_CASCADED: 'CASCADED';
KW_CASE: 'CASE';
KW_CAST: 'CAST';
KW_CEIL: 'CEIL';
KW_CHAR: 'CHAR';
KW_CHARACTER: 'CHARACTER';
KW_CHECK: 'CHECK';
KW_CLOB: 'CLOB';
KW_CLOSE: 'CLOSE';
KW_COALESCE: 'COALESCE';
KW_COLLATE: 'COLLATE';
KW_COLLECT: 'COLLECT';
KW_COLUMN: 'COLUMN';
KW_COMMIT: 'COMMIT';
KW_CONNECT: 'CONNECT';
KW_CONSTRAINT: 'CONSTRAINT';
KW_CONTAINS: 'CONTAINS';
KW_CONVERT: 'CONVERT';
KW_COUNT: 'COUNT';
KW_CREATE: 'CREATE';
KW_CROSS: 'CROSS';
KW_CUBE: 'CUBE';
KW_CURRENT: 'CURRENT';
KW_CURSOR: 'CURSOR';
KW_CYCLE: 'CYCLE';
KW_DATE: 'DATE';
KW_DATETIME: 'DATETIME';
KW_DAY: 'DAY';
KW_DEC: 'DEC';
KW_DECIMAL: 'DECIMAL';
KW_DECLARE: 'DECLARE';
KW_DEFAULT: 'DEFAULT';
KW_DEFINE: 'DEFINE';
KW_DELETE: 'DELETE';
KW_DESCRIBE: 'DESCRIBE';
KW_DISTINCT: 'DISTINCT';
KW_DOUBLE: 'DOUBLE';
KW_DROP: 'DROP';
KW_EACH: 'EACH';
KW_ELSE: 'ELSE';
KW_END: 'END';
KW_EQUALS: 'EQUALS';
KW_EXCEPT: 'EXCEPT';
KW_EXECUTE: 'EXECUTE';
KW_EXISTS: 'EXISTS';
KW_EXPLAIN: 'EXPLAIN';
KW_EXTERNAL: 'EXTERNAL';
KW_EXTRACT: 'EXTRACT';
KW_FALSE: 'FALSE';
KW_FLOAT: 'FLOAT';
KW_FOR: 'FOR';
KW_FROM: 'FROM';
KW_FULL: 'FULL';
KW_FUNCTION: 'FUNCTION';
KW_GLOBAL: 'GLOBAL';
KW_GRANT: 'GRANT';
KW_GROUP: 'GROUP';
KW_GROUPING: 'GROUPING';
KW_GROUPS: 'GROUPS';
KW_HAVING: 'HAVING';
KW_HOUR: 'HOUR';
KW_IMPORT: 'IMPORT';
KW_IN: 'IN';
KW_INCLUDING: 'INCLUDING';
KW_INNER: 'INNER';
KW_INOUT: 'INOUT';
KW_INSERT: 'INSERT';
KW_INT: 'INT';
KW_INTEGER: 'INTEGER';
KW_INTERSECT: 'INTERSECT';
KW_INTERVAL: 'INTERVAL';
KW_INTO: 'INTO';
KW_IS: 'IS';
KW_JOIN: 'JOIN';
KW_LAG: 'LAG';
KW_LANGUAGE: 'LANGUAGE';
KW_LATERAL: 'LATERAL';
KW_LEADING: 'LEADING';
KW_LEFT: 'LEFT';
KW_LIKE: 'LIKE';
KW_LIMIT: 'LIMIT';
KW_LOCAL: 'LOCAL';
KW_MATCH: 'MATCH';
KW_MATCH_RECOGNIZE: 'MATCH_RECOGNIZE';
KW_MEASURES: 'MEASURES';
KW_MERGE: 'MERGE';
KW_METADATA: 'METADATA';
KW_MINUS: 'MINUS';
KW_MINUTE: 'MINUTE';
KW_MODIFIES: 'MODIFIES';
KW_MODULE: 'MODULE';
KW_MONTH: 'MONTH';
KW_MULTISET: 'MULTISET';
KW_NATURAL: 'NATURAL';
KW_NEXT: 'NEXT';
KW_NO: 'NO';
KW_NONE: 'NONE';
KW_NOT: 'NOT';
KW_NULL: 'NULL';
KW_NUMERIC: 'NUMERIC';
KW_OF: 'OF';
KW_OFFSET: 'OFFSET';
KW_ON: 'ON';
KW_ONE: 'ONE';
KW_OR: 'OR';
KW_ORDER: 'ORDER';
KW_OUT: 'OUT';
KW_OUTER: 'OUTER';
KW_OVER: 'OVER';
KW_OVERLAY: 'OVERLAY';
KW_PARTITION: 'PARTITION';
KW_PATTERN: 'PATTERN';
KW_PER: 'PER';
KW_PERCENT: 'PERCENT';
KW_PERIOD: 'PERIOD';
KW_POSITION: 'POSITION';
KW_PRIMARY: 'PRIMARY';
KW_RANGE: 'RANGE';
KW_RANK: 'RANK';
KW_RESET: 'RESET';
KW_REVOKE: 'REVOKE';
KW_RIGHT: 'RIGHT';
KW_RLIKE: 'RLIKE';
KW_ROLLBACK: 'ROLLBACK';
KW_ROLLUP: 'ROLLUP';
KW_ROW: 'ROW';
KW_ROWS: 'ROWS';
KW_SECOND: 'SECOND';
KW_SELECT: 'SELECT';
KW_SET: 'SET';
KW_SHOW: 'SHOW';
KW_SKIP: 'SKIP';
KW_SMALLINT: 'SMALLINT';
KW_START: 'START';
KW_STATIC: 'STATIC';
KW_SUBSTRING: 'SUBSTRING';
KW_SUM: 'SUM';
KW_SYSTEM: 'SYSTEM';
KW_SYSTEM_TIME: 'SYSTEM_TIME';
KW_SYSTEM_USER: 'SYSTEM_USER';
KW_TABLE: 'TABLE';
KW_TABLESAMPLE: 'TABLESAMPLE';
KW_THEN: 'THEN';
KW_TIME: 'TIME';
KW_TIMESTAMP: 'TIMESTAMP';
KW_TINYINT: 'TINYINT';
KW_TO: 'TO';
KW_TRUE: 'TRUE';
KW_TRUNCATE: 'TRUNCATE';
KW_UNION: 'UNION';
KW_UNIQUE: 'UNIQUE';
KW_UNKNOWN: 'UNKNOWN';
KW_UNNEST: 'UNNEST';
KW_UPPER: 'UPPER';
KW_UPSERT: 'UPSERT';
KW_USER: 'USER';
KW_USING: 'USING';
KW_VALUE: 'VALUE';
KW_VALUES: 'VALUES';
KW_VARBINARY: 'VARBINARY';
KW_VARCHAR: 'VARCHAR';
KW_WHEN: 'WHEN';
KW_WHERE: 'WHERE';
KW_WINDOW: 'WINDOW';
KW_WITH: 'WITH';
KW_WITHIN: 'WITHIN';
KW_WITHOUT: 'WITHOUT';
KW_YEAR: 'YEAR';
// Operators. Comparation
@ -366,7 +425,7 @@ DOUBLE_VERTICAL_SIGN: '||';
DOUBLE_HYPNEN_SIGN: '--';
SLASH_SIGN: '/';
QUESTION_MARK_SIGN: '?';
DOT_ID: '.' ID_LITERAL_FRAG;
DOUBLE_RIGHT_ARROW: '=>';
STRING_LITERAL: DQUOTA_STRING | SQUOTA_STRING | BQUOTA_STRING;
DIG_LITERAL: DEC_DIGIT+;
REAL_LITERAL: (DEC_DIGIT+)? '.' DEC_DIGIT+
@ -376,7 +435,6 @@ REAL_LITERAL: (DEC_DIGIT+)? '.' DEC_DIGIT+
BIT_STRING: BIT_STRING_L;
ID_LITERAL: ID_LITERAL_FRAG;
FILE_PATH: FILE_PATH_STRING;
DOUBLE_ARROW: '=>';
fragment FILE_PATH_STRING: ([/\\] (~([/\\ ]))*)+;
fragment JAR_FILE_PARTTARN: '`' ( '\\'. | '``' | ~('`'|'\\'))* '`';

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -133,7 +133,6 @@ import { PatternVariablesDefinationContext } from "./FlinkSqlParser";
import { WindowFrameContext } from "./FlinkSqlParser";
import { FrameBoundContext } from "./FlinkSqlParser";
import { WithinClauseContext } from "./FlinkSqlParser";
import { TimeIntervalExpressionContext } from "./FlinkSqlParser";
import { ExpressionContext } from "./FlinkSqlParser";
import { LogicalNotContext } from "./FlinkSqlParser";
import { PredicatedContext } from "./FlinkSqlParser";
@ -163,12 +162,13 @@ import { FunctionNameContext } from "./FlinkSqlParser";
import { DereferenceDefinitionContext } from "./FlinkSqlParser";
import { CorrelationNameContext } from "./FlinkSqlParser";
import { QualifiedNameContext } from "./FlinkSqlParser";
import { IntervalContext } from "./FlinkSqlParser";
import { TimeIntervalExpressionContext } from "./FlinkSqlParser";
import { ErrorCapturingMultiUnitsIntervalContext } from "./FlinkSqlParser";
import { MultiUnitsIntervalContext } from "./FlinkSqlParser";
import { ErrorCapturingUnitToUnitIntervalContext } from "./FlinkSqlParser";
import { UnitToUnitIntervalContext } from "./FlinkSqlParser";
import { IntervalValueContext } from "./FlinkSqlParser";
import { IntervalTimeUnitContext } from "./FlinkSqlParser";
import { ColumnAliasContext } from "./FlinkSqlParser";
import { TableAliasContext } from "./FlinkSqlParser";
import { ErrorCapturingIdentifierContext } from "./FlinkSqlParser";
@ -176,15 +176,12 @@ import { ErrorIdentContext } from "./FlinkSqlParser";
import { RealIdentContext } from "./FlinkSqlParser";
import { IdentifierListContext } from "./FlinkSqlParser";
import { IdentifierSeqContext } from "./FlinkSqlParser";
import { IdentifierContext } from "./FlinkSqlParser";
import { UnquotedIdentifierAlternativeContext } from "./FlinkSqlParser";
import { QuotedIdentifierAlternativeContext } from "./FlinkSqlParser";
import { AnsiNonReservedKeywordsContext } from "./FlinkSqlParser";
import { NonReservedKeywordsContext } from "./FlinkSqlParser";
import { NonReservedKeywordsAlternativeContext } from "./FlinkSqlParser";
import { UnquotedIdentifierContext } from "./FlinkSqlParser";
import { QuotedIdentifierContext } from "./FlinkSqlParser";
import { WhenClauseContext } from "./FlinkSqlParser";
import { UidListContext } from "./FlinkSqlParser";
import { UidContext } from "./FlinkSqlParser";
import { WithOptionContext } from "./FlinkSqlParser";
import { IfNotExistsContext } from "./FlinkSqlParser";
@ -198,15 +195,13 @@ import { ComparisonOperatorContext } from "./FlinkSqlParser";
import { BitOperatorContext } from "./FlinkSqlParser";
import { MathOperatorContext } from "./FlinkSqlParser";
import { UnaryOperatorContext } from "./FlinkSqlParser";
import { FullColumnNameContext } from "./FlinkSqlParser";
import { ConstantContext } from "./FlinkSqlParser";
import { StringLiteralContext } from "./FlinkSqlParser";
import { DecimalLiteralContext } from "./FlinkSqlParser";
import { BooleanLiteralContext } from "./FlinkSqlParser";
import { SetQuantifierContext } from "./FlinkSqlParser";
import { AnsiNonReservedContext } from "./FlinkSqlParser";
import { StrictNonReservedContext } from "./FlinkSqlParser";
import { NonReservedContext } from "./FlinkSqlParser";
import { ReservedKeywordsContext } from "./FlinkSqlParser";
import { NonReservedKeywordsContext } from "./FlinkSqlParser";
/**
@ -1518,16 +1513,6 @@ export default class FlinkSqlParserListener extends ParseTreeListener {
* @param ctx the parse tree
*/
exitWithinClause?: (ctx: WithinClauseContext) => void;
/**
* Enter a parse tree produced by `FlinkSqlParser.timeIntervalExpression`.
* @param ctx the parse tree
*/
enterTimeIntervalExpression?: (ctx: TimeIntervalExpressionContext) => void;
/**
* Exit a parse tree produced by `FlinkSqlParser.timeIntervalExpression`.
* @param ctx the parse tree
*/
exitTimeIntervalExpression?: (ctx: TimeIntervalExpressionContext) => void;
/**
* Enter a parse tree produced by `FlinkSqlParser.expression`.
* @param ctx the parse tree
@ -1863,15 +1848,15 @@ export default class FlinkSqlParserListener extends ParseTreeListener {
*/
exitQualifiedName?: (ctx: QualifiedNameContext) => void;
/**
* Enter a parse tree produced by `FlinkSqlParser.interval`.
* Enter a parse tree produced by `FlinkSqlParser.timeIntervalExpression`.
* @param ctx the parse tree
*/
enterInterval?: (ctx: IntervalContext) => void;
enterTimeIntervalExpression?: (ctx: TimeIntervalExpressionContext) => void;
/**
* Exit a parse tree produced by `FlinkSqlParser.interval`.
* Exit a parse tree produced by `FlinkSqlParser.timeIntervalExpression`.
* @param ctx the parse tree
*/
exitInterval?: (ctx: IntervalContext) => void;
exitTimeIntervalExpression?: (ctx: TimeIntervalExpressionContext) => void;
/**
* Enter a parse tree produced by `FlinkSqlParser.errorCapturingMultiUnitsInterval`.
* @param ctx the parse tree
@ -1922,6 +1907,16 @@ export default class FlinkSqlParserListener extends ParseTreeListener {
* @param ctx the parse tree
*/
exitIntervalValue?: (ctx: IntervalValueContext) => void;
/**
* Enter a parse tree produced by `FlinkSqlParser.intervalTimeUnit`.
* @param ctx the parse tree
*/
enterIntervalTimeUnit?: (ctx: IntervalTimeUnitContext) => void;
/**
* Exit a parse tree produced by `FlinkSqlParser.intervalTimeUnit`.
* @param ctx the parse tree
*/
exitIntervalTimeUnit?: (ctx: IntervalTimeUnitContext) => void;
/**
* Enter a parse tree produced by `FlinkSqlParser.columnAlias`.
* @param ctx the parse tree
@ -1996,64 +1991,42 @@ export default class FlinkSqlParserListener extends ParseTreeListener {
* @param ctx the parse tree
*/
exitIdentifierSeq?: (ctx: IdentifierSeqContext) => void;
/**
* Enter a parse tree produced by `FlinkSqlParser.identifier`.
* @param ctx the parse tree
*/
enterIdentifier?: (ctx: IdentifierContext) => void;
/**
* Exit a parse tree produced by `FlinkSqlParser.identifier`.
* @param ctx the parse tree
*/
exitIdentifier?: (ctx: IdentifierContext) => void;
/**
* Enter a parse tree produced by the `unquotedIdentifierAlternative`
* labeled alternative in `FlinkSqlParser.strictIdentifier`.
* labeled alternative in `FlinkSqlParser.identifier`.
* @param ctx the parse tree
*/
enterUnquotedIdentifierAlternative?: (ctx: UnquotedIdentifierAlternativeContext) => void;
/**
* Exit a parse tree produced by the `unquotedIdentifierAlternative`
* labeled alternative in `FlinkSqlParser.strictIdentifier`.
* labeled alternative in `FlinkSqlParser.identifier`.
* @param ctx the parse tree
*/
exitUnquotedIdentifierAlternative?: (ctx: UnquotedIdentifierAlternativeContext) => void;
/**
* Enter a parse tree produced by the `quotedIdentifierAlternative`
* labeled alternative in `FlinkSqlParser.strictIdentifier`.
* labeled alternative in `FlinkSqlParser.identifier`.
* @param ctx the parse tree
*/
enterQuotedIdentifierAlternative?: (ctx: QuotedIdentifierAlternativeContext) => void;
/**
* Exit a parse tree produced by the `quotedIdentifierAlternative`
* labeled alternative in `FlinkSqlParser.strictIdentifier`.
* labeled alternative in `FlinkSqlParser.identifier`.
* @param ctx the parse tree
*/
exitQuotedIdentifierAlternative?: (ctx: QuotedIdentifierAlternativeContext) => void;
/**
* Enter a parse tree produced by the `ansiNonReservedKeywords`
* labeled alternative in `FlinkSqlParser.strictIdentifier`.
* Enter a parse tree produced by the `nonReservedKeywordsAlternative`
* labeled alternative in `FlinkSqlParser.identifier`.
* @param ctx the parse tree
*/
enterAnsiNonReservedKeywords?: (ctx: AnsiNonReservedKeywordsContext) => void;
enterNonReservedKeywordsAlternative?: (ctx: NonReservedKeywordsAlternativeContext) => void;
/**
* Exit a parse tree produced by the `ansiNonReservedKeywords`
* labeled alternative in `FlinkSqlParser.strictIdentifier`.
* Exit a parse tree produced by the `nonReservedKeywordsAlternative`
* labeled alternative in `FlinkSqlParser.identifier`.
* @param ctx the parse tree
*/
exitAnsiNonReservedKeywords?: (ctx: AnsiNonReservedKeywordsContext) => void;
/**
* Enter a parse tree produced by the `nonReservedKeywords`
* labeled alternative in `FlinkSqlParser.strictIdentifier`.
* @param ctx the parse tree
*/
enterNonReservedKeywords?: (ctx: NonReservedKeywordsContext) => void;
/**
* Exit a parse tree produced by the `nonReservedKeywords`
* labeled alternative in `FlinkSqlParser.strictIdentifier`.
* @param ctx the parse tree
*/
exitNonReservedKeywords?: (ctx: NonReservedKeywordsContext) => void;
exitNonReservedKeywordsAlternative?: (ctx: NonReservedKeywordsAlternativeContext) => void;
/**
* Enter a parse tree produced by `FlinkSqlParser.unquotedIdentifier`.
* @param ctx the parse tree
@ -2084,16 +2057,6 @@ export default class FlinkSqlParserListener extends ParseTreeListener {
* @param ctx the parse tree
*/
exitWhenClause?: (ctx: WhenClauseContext) => void;
/**
* Enter a parse tree produced by `FlinkSqlParser.uidList`.
* @param ctx the parse tree
*/
enterUidList?: (ctx: UidListContext) => void;
/**
* Exit a parse tree produced by `FlinkSqlParser.uidList`.
* @param ctx the parse tree
*/
exitUidList?: (ctx: UidListContext) => void;
/**
* Enter a parse tree produced by `FlinkSqlParser.uid`.
* @param ctx the parse tree
@ -2224,16 +2187,6 @@ export default class FlinkSqlParserListener extends ParseTreeListener {
* @param ctx the parse tree
*/
exitUnaryOperator?: (ctx: UnaryOperatorContext) => void;
/**
* Enter a parse tree produced by `FlinkSqlParser.fullColumnName`.
* @param ctx the parse tree
*/
enterFullColumnName?: (ctx: FullColumnNameContext) => void;
/**
* Exit a parse tree produced by `FlinkSqlParser.fullColumnName`.
* @param ctx the parse tree
*/
exitFullColumnName?: (ctx: FullColumnNameContext) => void;
/**
* Enter a parse tree produced by `FlinkSqlParser.constant`.
* @param ctx the parse tree
@ -2285,34 +2238,24 @@ export default class FlinkSqlParserListener extends ParseTreeListener {
*/
exitSetQuantifier?: (ctx: SetQuantifierContext) => void;
/**
* Enter a parse tree produced by `FlinkSqlParser.ansiNonReserved`.
* Enter a parse tree produced by `FlinkSqlParser.reservedKeywords`.
* @param ctx the parse tree
*/
enterAnsiNonReserved?: (ctx: AnsiNonReservedContext) => void;
enterReservedKeywords?: (ctx: ReservedKeywordsContext) => void;
/**
* Exit a parse tree produced by `FlinkSqlParser.ansiNonReserved`.
* Exit a parse tree produced by `FlinkSqlParser.reservedKeywords`.
* @param ctx the parse tree
*/
exitAnsiNonReserved?: (ctx: AnsiNonReservedContext) => void;
exitReservedKeywords?: (ctx: ReservedKeywordsContext) => void;
/**
* Enter a parse tree produced by `FlinkSqlParser.strictNonReserved`.
* Enter a parse tree produced by `FlinkSqlParser.nonReservedKeywords`.
* @param ctx the parse tree
*/
enterStrictNonReserved?: (ctx: StrictNonReservedContext) => void;
enterNonReservedKeywords?: (ctx: NonReservedKeywordsContext) => void;
/**
* Exit a parse tree produced by `FlinkSqlParser.strictNonReserved`.
* Exit a parse tree produced by `FlinkSqlParser.nonReservedKeywords`.
* @param ctx the parse tree
*/
exitStrictNonReserved?: (ctx: StrictNonReservedContext) => void;
/**
* Enter a parse tree produced by `FlinkSqlParser.nonReserved`.
* @param ctx the parse tree
*/
enterNonReserved?: (ctx: NonReservedContext) => void;
/**
* Exit a parse tree produced by `FlinkSqlParser.nonReserved`.
* @param ctx the parse tree
*/
exitNonReserved?: (ctx: NonReservedContext) => void;
exitNonReservedKeywords?: (ctx: NonReservedKeywordsContext) => void;
}

View File

@ -133,7 +133,6 @@ import { PatternVariablesDefinationContext } from "./FlinkSqlParser";
import { WindowFrameContext } from "./FlinkSqlParser";
import { FrameBoundContext } from "./FlinkSqlParser";
import { WithinClauseContext } from "./FlinkSqlParser";
import { TimeIntervalExpressionContext } from "./FlinkSqlParser";
import { ExpressionContext } from "./FlinkSqlParser";
import { LogicalNotContext } from "./FlinkSqlParser";
import { PredicatedContext } from "./FlinkSqlParser";
@ -163,12 +162,13 @@ import { FunctionNameContext } from "./FlinkSqlParser";
import { DereferenceDefinitionContext } from "./FlinkSqlParser";
import { CorrelationNameContext } from "./FlinkSqlParser";
import { QualifiedNameContext } from "./FlinkSqlParser";
import { IntervalContext } from "./FlinkSqlParser";
import { TimeIntervalExpressionContext } from "./FlinkSqlParser";
import { ErrorCapturingMultiUnitsIntervalContext } from "./FlinkSqlParser";
import { MultiUnitsIntervalContext } from "./FlinkSqlParser";
import { ErrorCapturingUnitToUnitIntervalContext } from "./FlinkSqlParser";
import { UnitToUnitIntervalContext } from "./FlinkSqlParser";
import { IntervalValueContext } from "./FlinkSqlParser";
import { IntervalTimeUnitContext } from "./FlinkSqlParser";
import { ColumnAliasContext } from "./FlinkSqlParser";
import { TableAliasContext } from "./FlinkSqlParser";
import { ErrorCapturingIdentifierContext } from "./FlinkSqlParser";
@ -176,15 +176,12 @@ import { ErrorIdentContext } from "./FlinkSqlParser";
import { RealIdentContext } from "./FlinkSqlParser";
import { IdentifierListContext } from "./FlinkSqlParser";
import { IdentifierSeqContext } from "./FlinkSqlParser";
import { IdentifierContext } from "./FlinkSqlParser";
import { UnquotedIdentifierAlternativeContext } from "./FlinkSqlParser";
import { QuotedIdentifierAlternativeContext } from "./FlinkSqlParser";
import { AnsiNonReservedKeywordsContext } from "./FlinkSqlParser";
import { NonReservedKeywordsContext } from "./FlinkSqlParser";
import { NonReservedKeywordsAlternativeContext } from "./FlinkSqlParser";
import { UnquotedIdentifierContext } from "./FlinkSqlParser";
import { QuotedIdentifierContext } from "./FlinkSqlParser";
import { WhenClauseContext } from "./FlinkSqlParser";
import { UidListContext } from "./FlinkSqlParser";
import { UidContext } from "./FlinkSqlParser";
import { WithOptionContext } from "./FlinkSqlParser";
import { IfNotExistsContext } from "./FlinkSqlParser";
@ -198,15 +195,13 @@ import { ComparisonOperatorContext } from "./FlinkSqlParser";
import { BitOperatorContext } from "./FlinkSqlParser";
import { MathOperatorContext } from "./FlinkSqlParser";
import { UnaryOperatorContext } from "./FlinkSqlParser";
import { FullColumnNameContext } from "./FlinkSqlParser";
import { ConstantContext } from "./FlinkSqlParser";
import { StringLiteralContext } from "./FlinkSqlParser";
import { DecimalLiteralContext } from "./FlinkSqlParser";
import { BooleanLiteralContext } from "./FlinkSqlParser";
import { SetQuantifierContext } from "./FlinkSqlParser";
import { AnsiNonReservedContext } from "./FlinkSqlParser";
import { StrictNonReservedContext } from "./FlinkSqlParser";
import { NonReservedContext } from "./FlinkSqlParser";
import { ReservedKeywordsContext } from "./FlinkSqlParser";
import { NonReservedKeywordsContext } from "./FlinkSqlParser";
/**
@ -999,12 +994,6 @@ export default class FlinkSqlParserVisitor<Result> extends ParseTreeVisitor<Resu
* @return the visitor result
*/
visitWithinClause?: (ctx: WithinClauseContext) => Result;
/**
* Visit a parse tree produced by `FlinkSqlParser.timeIntervalExpression`.
* @param ctx the parse tree
* @return the visitor result
*/
visitTimeIntervalExpression?: (ctx: TimeIntervalExpressionContext) => Result;
/**
* Visit a parse tree produced by `FlinkSqlParser.expression`.
* @param ctx the parse tree
@ -1202,11 +1191,11 @@ export default class FlinkSqlParserVisitor<Result> extends ParseTreeVisitor<Resu
*/
visitQualifiedName?: (ctx: QualifiedNameContext) => Result;
/**
* Visit a parse tree produced by `FlinkSqlParser.interval`.
* Visit a parse tree produced by `FlinkSqlParser.timeIntervalExpression`.
* @param ctx the parse tree
* @return the visitor result
*/
visitInterval?: (ctx: IntervalContext) => Result;
visitTimeIntervalExpression?: (ctx: TimeIntervalExpressionContext) => Result;
/**
* Visit a parse tree produced by `FlinkSqlParser.errorCapturingMultiUnitsInterval`.
* @param ctx the parse tree
@ -1237,6 +1226,12 @@ export default class FlinkSqlParserVisitor<Result> extends ParseTreeVisitor<Resu
* @return the visitor result
*/
visitIntervalValue?: (ctx: IntervalValueContext) => Result;
/**
* Visit a parse tree produced by `FlinkSqlParser.intervalTimeUnit`.
* @param ctx the parse tree
* @return the visitor result
*/
visitIntervalTimeUnit?: (ctx: IntervalTimeUnitContext) => Result;
/**
* Visit a parse tree produced by `FlinkSqlParser.columnAlias`.
* @param ctx the parse tree
@ -1281,40 +1276,27 @@ export default class FlinkSqlParserVisitor<Result> extends ParseTreeVisitor<Resu
* @return the visitor result
*/
visitIdentifierSeq?: (ctx: IdentifierSeqContext) => Result;
/**
* Visit a parse tree produced by `FlinkSqlParser.identifier`.
* @param ctx the parse tree
* @return the visitor result
*/
visitIdentifier?: (ctx: IdentifierContext) => Result;
/**
* Visit a parse tree produced by the `unquotedIdentifierAlternative`
* labeled alternative in `FlinkSqlParser.strictIdentifier`.
* labeled alternative in `FlinkSqlParser.identifier`.
* @param ctx the parse tree
* @return the visitor result
*/
visitUnquotedIdentifierAlternative?: (ctx: UnquotedIdentifierAlternativeContext) => Result;
/**
* Visit a parse tree produced by the `quotedIdentifierAlternative`
* labeled alternative in `FlinkSqlParser.strictIdentifier`.
* labeled alternative in `FlinkSqlParser.identifier`.
* @param ctx the parse tree
* @return the visitor result
*/
visitQuotedIdentifierAlternative?: (ctx: QuotedIdentifierAlternativeContext) => Result;
/**
* Visit a parse tree produced by the `ansiNonReservedKeywords`
* labeled alternative in `FlinkSqlParser.strictIdentifier`.
* Visit a parse tree produced by the `nonReservedKeywordsAlternative`
* labeled alternative in `FlinkSqlParser.identifier`.
* @param ctx the parse tree
* @return the visitor result
*/
visitAnsiNonReservedKeywords?: (ctx: AnsiNonReservedKeywordsContext) => Result;
/**
* Visit a parse tree produced by the `nonReservedKeywords`
* labeled alternative in `FlinkSqlParser.strictIdentifier`.
* @param ctx the parse tree
* @return the visitor result
*/
visitNonReservedKeywords?: (ctx: NonReservedKeywordsContext) => Result;
visitNonReservedKeywordsAlternative?: (ctx: NonReservedKeywordsAlternativeContext) => Result;
/**
* Visit a parse tree produced by `FlinkSqlParser.unquotedIdentifier`.
* @param ctx the parse tree
@ -1333,12 +1315,6 @@ export default class FlinkSqlParserVisitor<Result> extends ParseTreeVisitor<Resu
* @return the visitor result
*/
visitWhenClause?: (ctx: WhenClauseContext) => Result;
/**
* Visit a parse tree produced by `FlinkSqlParser.uidList`.
* @param ctx the parse tree
* @return the visitor result
*/
visitUidList?: (ctx: UidListContext) => Result;
/**
* Visit a parse tree produced by `FlinkSqlParser.uid`.
* @param ctx the parse tree
@ -1417,12 +1393,6 @@ export default class FlinkSqlParserVisitor<Result> extends ParseTreeVisitor<Resu
* @return the visitor result
*/
visitUnaryOperator?: (ctx: UnaryOperatorContext) => Result;
/**
* Visit a parse tree produced by `FlinkSqlParser.fullColumnName`.
* @param ctx the parse tree
* @return the visitor result
*/
visitFullColumnName?: (ctx: FullColumnNameContext) => Result;
/**
* Visit a parse tree produced by `FlinkSqlParser.constant`.
* @param ctx the parse tree
@ -1454,22 +1424,16 @@ export default class FlinkSqlParserVisitor<Result> extends ParseTreeVisitor<Resu
*/
visitSetQuantifier?: (ctx: SetQuantifierContext) => Result;
/**
* Visit a parse tree produced by `FlinkSqlParser.ansiNonReserved`.
* Visit a parse tree produced by `FlinkSqlParser.reservedKeywords`.
* @param ctx the parse tree
* @return the visitor result
*/
visitAnsiNonReserved?: (ctx: AnsiNonReservedContext) => Result;
visitReservedKeywords?: (ctx: ReservedKeywordsContext) => Result;
/**
* Visit a parse tree produced by `FlinkSqlParser.strictNonReserved`.
* Visit a parse tree produced by `FlinkSqlParser.nonReservedKeywords`.
* @param ctx the parse tree
* @return the visitor result
*/
visitStrictNonReserved?: (ctx: StrictNonReservedContext) => Result;
/**
* Visit a parse tree produced by `FlinkSqlParser.nonReserved`.
* @param ctx the parse tree
* @return the visitor result
*/
visitNonReserved?: (ctx: NonReservedContext) => Result;
visitNonReservedKeywords?: (ctx: NonReservedKeywordsContext) => Result;
}

View File

@ -39,7 +39,7 @@ describe('FlinkSQL Syntax Tests', () => {
});
test('Test Select Statement with having clause', () => {
const sql = `
SELECT city, sum(quantity) AS sum
SELECT city, sum(quantity) AS _sum
FROM dealer GROUP BY city HAVING city = 'Fremont';
`;
const result = parser.validate(sql);

View File

@ -1,3 +1,3 @@
ALTER DATABASE tempDB SET ("key1"="value1");
alter database db1 set ('key1' = 'value1','key2.a' = 'value2.a');
ALTER DATABASE db1 SET ('key1' = 'value1','key2.a' = 'value2.a');

View File

@ -1,12 +1,12 @@
ALTER FUNCTION tempFunction AS 'SimpleUdf';
alter temporary function function1 as 'org.apache.flink.function.function1';
ALTER temporary FUNCTION function1 AS 'org.apache.flink.function.function1';
alter temporary function function1 as 'org.apache.flink.function.function1' language scala;
ALTER temporary FUNCTION function1 AS 'org.apache.flink.function.function1' LANGUAGE scala;
alter temporary system function function1 as 'org.apache.flink.function.function1';
ALTER temporary SYSTEM FUNCTION function1 AS 'org.apache.flink.function.function1';
alter temporary system function function1 as 'org.apache.flink.function.function1' language java;
ALTER temporary SYSTEM FUNCTION function1 AS 'org.apache.flink.function.function1' LANGUAGE java;
ALTER TEMPORARY SYSTEM FUNCTION IF EXISTS tempFunction AS 'SimpleUdf';

View File

@ -1,65 +1,65 @@
-- Refer: https://github.com/apache/flink/blob/master/flink-table/flink-sql-parser/src/test/java/org/apache/flink/sql/parser/FlinkSqlParserImplTest.java#L2016
-- Just for simple alter table statements, it not include alter table columns statements
alter table
t1 rename to t2;
ALTER TABLE
t1 RENAME TO t2;
alter table
if exists t1 rename to t2;
ALTER TABLE
IF EXISTS t1 RENAME TO t2;
alter table
c1.d1.t1 rename to t2;
ALTER TABLE
c1.d1.t1 RENAME TO t2;
alter table
if exists c1.d1.t1 rename to t2;
ALTER TABLE
IF EXISTS c1.d1.t1 RENAME TO t2;
alter table
t1 rename a to b;
ALTER TABLE
t1 RENAME a TO b;
alter table
if exists t1 rename a to b;
ALTER TABLE
IF EXISTS t1 RENAME a TO b;
alter table
if exists t1 rename a.x to a.y;
ALTER TABLE
IF EXISTS t1 RENAME a.x TO a.y;
alter table
ALTER TABLE
t1
set
('key1' = 'value1');
alter table
if exists t1
ALTER TABLE
IF EXISTS t1
set
('key1' = 'value1');
alter table
ALTER TABLE
t1
add
constraint ct1 primary key(a, b);
ADD
CONSTRAINT ct1 PRIMARY KEY(a, b);
alter table
ALTER TABLE
t1
add
constraint ct1 primary key(a, b) not enforced;
ADD
CONSTRAINT ct1 PRIMARY KEY(a, b) NOT ENFORCED;
alter table
if exists t1
add
constraint ct1 primary key(a, b) not enforced;
ALTER TABLE
IF EXISTS t1
ADD
CONSTRAINT ct1 PRIMARY KEY(a, b) NOT ENFORCED;
alter table
ALTER TABLE
t1
add
unique(a, b);
ADD
UNIQUE(a, b);
alter table
if exists t1
add
unique(a, b);
ALTER TABLE
IF EXISTS t1
ADD
UNIQUE(a, b);
alter table
t1 drop constraint ct1;
ALTER TABLE
t1 DROP CONSTRAINT ct1;
alter table
if exists t1 drop constraint ct1;
ALTER TABLE
IF EXISTS t1 DROP CONSTRAINT ct1;

View File

@ -25,4 +25,4 @@ CREATE TEMPORARY FUNCTION function1 AS 'org.apache.flink.function.function1' LAN
CREATE TEMPORARY SYSTEM FUNCTION function1 AS 'org.apache.flink.function.function1' LANGUAGE SCALA USING JAR '/path/to/test.jar';
CREATE FUNCTION function1 AS 'org.apache.flink.function.function1' LANGUAGE JAVA USING JAR 'file:///path/to/test.jar',
jar 'hdfs:///path/to/test2.jar';
JAR 'hdfs:///path/to/test2.jar';

View File

@ -27,14 +27,14 @@ CREATE TABLE MyTable (
WATERMARK FOR order_time AS order_time - INTERVAL '5' SECOND
) WITH ('connector' = 'kafka');
CREATE TABLE MyTable (id int, PRIMARY KEY (id) NOT ENFORCED) WITH ('connector' = 'kafka');
CREATE TABLE MyTable (id INT, PRIMARY KEY (id) NOT ENFORCED) WITH ('connector' = 'kafka');
CREATE TABLE tbl1 (
a bigint,
h varchar,
a BIGINT,
h VARCHAR,
g AS 2 * (a + 1),
ts AS toTimestamp(b, 'yyyy-MM-dd HH:mm:ss'),
b varchar,
b VARCHAR,
proc AS PROCTIME(),
meta STRING METADATA,
my_meta STRING METADATA FROM 'meta',
@ -57,7 +57,7 @@ CREATE TABLE Orders_in_file (
);
CREATE TABLE Orders_with_watermark (
id int,
id INT,
-- Add watermark definition
WATERMARK FOR order_time AS order_time - INTERVAL '5' SECOND
) WITH (
@ -80,35 +80,35 @@ FROM
WHERE
mod(id, 10) = 0;
CREATE TABLE catalog1.db1.table1 (id int) WITH ('connector' = 'kafka');
CREATE TABLE catalog1.db1.table1 (id INT) WITH ('connector' = 'kafka');
CREATE TABLE catalog1.db1.table1 (
attr0 STRING,
attr1 boolean,
attr3 decimal(38, 18),
attr1 BOOLEAN,
attr3 DECIMAL(38, 18),
attr4 TINYINT,
attr5 smallint,
attr6 int,
attr7 bigint,
attr8 float,
attr9 double,
attr10 date,
attr11 time,
attr12 timestamp(3),
attr13 array<STRING>,
attr14 ROW<attr15 float, attr16 timestamp(3)>,
attr17 MAP<int, bigint>,
attr5 SMALLINT,
attr6 INT,
attr7 BIGINT,
attr8 FLOAT,
attr9 DOUBLE,
attr10 DATE,
attr11 TIME,
attr12 TIMESTAMP(3),
attr13 ARRAY<STRING>,
attr14 ROW<attr15 FLOAT, attr16 TIMESTAMP(3)>,
attr17 MAP<INT, BIGINT>,
name1 VARCHAR(64),
message ROW<data ROW<UPO_TIMESTAMP VARCHAR(20)>>,
raw RAW('class', 'snapshot')
) WITH ('connector' = 'kafka');
CREATE TABLE IF NOT EXISTS tbl1 (
a bigint,
h varchar,
a BIGINT,
h VARCHAR,
g AS 2 * (a + 1),
ts AS toTimestamp(b, 'yyyy-MM-dd HH:mm:ss'),
b varchar,
b VARCHAR,
proc AS PROCTIME(),
PRIMARY KEY (a, b) NOT ENFORCED
) PARTITIONED BY (a, h) WITH (
@ -117,11 +117,11 @@ CREATE TABLE IF NOT EXISTS tbl1 (
);
CREATE TABLE tbl1 (
a bigint COMMENT 'test column comment AAA.',
h varchar,
a BIGINT COMMENT 'test column comment AAA.',
h VARCHAR,
g AS 2 * (a + 1),
ts AS toTimestamp(b, 'yyyy-MM-dd HH:mm:ss'),
b varchar,
b VARCHAR,
proc AS PROCTIME(),
meta STRING METADATA,
my_meta STRING METADATA FROM 'meta',
@ -133,11 +133,11 @@ CREATE TABLE tbl1 (
);
CREATE TABLE tbl1 (
a bigint COMMENT 'test column comment AAA.',
h varchar,
a BIGINT COMMENT 'test column comment AAA.',
h VARCHAR,
g AS 2 * (a + 1) COMMENT 'test computed column.',
ts AS toTimestamp(b, 'yyyy-MM-dd HH:mm:ss'),
b varchar,
b VARCHAR,
proc AS PROCTIME(),
PRIMARY KEY (a, b) NOT ENFORCED
) COMMENT 'test table comment ABC.' PARTITIONED BY (a, h) WITH (
@ -146,11 +146,11 @@ CREATE TABLE tbl1 (
);
CREATE TABLE tbl1 (
a bigint,
h varchar,
a BIGINT,
h VARCHAR,
g AS 2 * (a + 1),
ts AS toTimestamp(b, 'yyyy-MM-dd HH:mm:ss'),
b varchar,
b VARCHAR,
proc AS PROCTIME(),
PRIMARY KEY (a, b) NOT ENFORCED
) WITH (
@ -159,8 +159,8 @@ CREATE TABLE tbl1 (
);
CREATE TABLE tbl1 (
a bigint PRIMARY KEY NOT ENFORCED COMMENT 'test column comment AAA.',
h varchar CONSTRAINT ct1 PRIMARY KEY NOT ENFORCED,
a BIGINT PRIMARY KEY NOT ENFORCED COMMENT 'test column comment AAA.',
h VARCHAR CONSTRAINT ct1 PRIMARY KEY NOT ENFORCED,
g AS 2 * (a + 1),
ts AS toTimestamp(b, 'yyyy-MM-dd HH:mm:ss'),
proc AS PROCTIME()
@ -170,16 +170,16 @@ CREATE TABLE tbl1 (
);
CREATE TABLE tbl1 (
ts timestamp(3),
id varchar,
watermark FOR ts AS ts - INTERVAL '3' SECOND
ts TIMESTAMP(3),
id VARCHAR,
WATERMARK FOR ts AS ts - INTERVAL '3' SECOND
) WITH (
'connector' = 'kafka',
'kafka.topic' = 'log.test'
);
CREATE TABLE tbl1 (
log_ts varchar,
log_ts VARCHAR,
ts AS to_timestamp(log_ts),
WATERMARK FOR ts AS ts + INTERVAL '1' SECOND
) WITH (
@ -188,7 +188,7 @@ CREATE TABLE tbl1 (
);
CREATE TABLE tbl1 (
f1 ROW<q1 bigint, q2 ROW<t1 timestamp, t2 varchar>, q3 boolean>,
f1 ROW<q1 BIGINT, q2 ROW<t1 TIMESTAMP, t2 VARCHAR>, q3 BOOLEAN>,
WATERMARK FOR f1.q2.t1 AS NOW()
) WITH (
'connector' = 'kafka',
@ -196,36 +196,36 @@ CREATE TABLE tbl1 (
);
CREATE TABLE tbl1 (
a ARRAY<bigint>,
b MAP<int, varchar>,
c ROW<cc0 int, cc1 float, cc2 varchar>,
d MULTISET<varchar>,
a ARRAY<BIGINT>,
b MAP<INT, VARCHAR>,
c ROW<cc0 INT, cc1 FLOAT, cc2 VARCHAR>,
d MULTISET<VARCHAR>,
PRIMARY KEY (a, b) NOT ENFORCED
) with (
) WITH (
'x' = 'y',
'asd' = 'data'
);
CREATE TABLE tbl1 (
a ARRAY<ARRAY<bigint>>,
b MAP<MAP<int, varchar>, ARRAY<varchar>>,
c ROW<cc0 ARRAY<int>, cc1 float, cc2 varchar>,
d MULTISET<ARRAY<int>>,
a ARRAY<ARRAY<BIGINT>>,
b MAP<MAP<INT, VARCHAR>, ARRAY<VARCHAR>>,
c ROW<cc0 ARRAY<INT>, cc1 FLOAT, cc2 VARCHAR>,
d MULTISET<ARRAY<INT>>,
f TIMESTAMP(9),
PRIMARY KEY (a, b) NOT ENFORCED
) with (
) WITH (
'x' = 'y',
'asd' = 'data'
);
CREATE TABLE tbl1 (
a ARRAY<ARRAY<bigint>>,
b MAP<MAP<int, varchar>, ARRAY<varchar>>,
c ROW<cc0 ARRAY<int>, cc1 float, cc2 varchar>,
d MULTISET<ARRAY<int>>,
a ARRAY<ARRAY<BIGINT>>,
b MAP<MAP<INT, VARCHAR>, ARRAY<VARCHAR>>,
c ROW<cc0 ARRAY<INT>, cc1 FLOAT, cc2 VARCHAR>,
d MULTISET<ARRAY<INT>>,
f TIMESTAMP(9),
PRIMARY KEY (a, b) NOT ENFORCED
) with (
) WITH (
'x' = 'y',
'asd' = 'data'
) LIKE Orders (

View File

@ -1,43 +1,43 @@
INSERT INTO country_page_view
SELECT user,
SELECT `user`,
cnt
FROM page_view_source;
INSERT INTO catalog1.db1.country_page_view
SELECT user,
SELECT `user`,
cnt
FROM page_view_source;
--- Execute InsertStatement
EXECUTE
INSERT INTO country_page_view PARTITION (date = '2019-8-30', country = 'China')
SELECT user,
INSERT INTO country_page_view PARTITION (`date` = '2019-8-30', country = 'China')
SELECT `user`,
cnt
FROM page_view_source;
--- Partition Clause: Static Partition
INSERT INTO country_page_view PARTITION (date = '2019-8-30', country = 'China')
SELECT user,
INSERT INTO country_page_view PARTITION (`date` = '2019-8-30', country = 'China')
SELECT `user`,
cnt
FROM page_view_source;
--- Partition Clause: Dynamic Partition
INSERT INTO country_page_view PARTITION (date = '2019-8-30')
SELECT user,
INSERT INTO country_page_view PARTITION (`date` = '2019-8-30')
SELECT `user`,
cnt,
country
FROM page_view_source;
--- Column List Statement
INSERT INTO country_page_view PARTITION (date = '2019-8-30', country = 'China') (date, country)
SELECT user,
INSERT INTO country_page_view PARTITION (`date` = '2019-8-30', country = 'China') (`date`, country)
SELECT `user`,
cnt
FROM page_view_source;
--- Insert Method: OverWrite
INSERT OVERWRITE country_page_view PARTITION (date = '2019-8-30')
SELECT user,
INSERT OVERWRITE country_page_view PARTITION (`date` = '2019-8-30')
SELECT `user`,
cnt,
country
FROM page_view_source;

View File

@ -8,7 +8,7 @@ VALUES ('Chinese', 'mumiao', 18),
('Amercian', 'georage', 22);
EXECUTE
INSERT OverWrite country_page_view
INSERT OVERWRITE country_page_view
VALUES ('Chinese', 'mumiao', 18),
('Amercian', 'georage', 22);

View File

@ -66,33 +66,33 @@ GROUP BY
-- Group Window Aggregation
SELECT
user,
`user`,
TUMBLE_START(order_time, INTERVAL '1' DAY) AS wStart,
SUM(amount) FROM Orders
GROUP BY
TUMBLE(order_time, INTERVAL '1' DAY),
user;
`user`;
SELECT
user,
`user`,
TUMBLE_START(order_time, INTERVAL '1' DAY) AS wStart,
SUM(amount) FROM Orders
GROUP BY
HOP(order_time, INTERVAL '1' DAY),
user;
`user`;
SELECT
user,
`user`,
TUMBLE_START(order_time, INTERVAL '1' DAY) AS wStart,
SUM(amount) FROM Orders
GROUP BY
SESSION(order_time, INTERVAL '1' DAY),
user;
`user`;
-- Having
SELECT SUM(amount)
FROM Orders
GROUP BY users
GROUP BY `users`
HAVING SUM(amount) > 50;
-- Over Aggregation