feat: collect entity (#265)
* feat: add text and word utils * feat: add entity collector class * refactor: rename SyntaxContextType to EntityContextType * refactor: improve EntityCollector * feat: improve mysql parser grammar * feat: add mysql entity collector * test: mysql entity collector tests * feat: remove useless method * feat: improve spark grammar file * feat: add spark entity collector * test: spark entity collector unit tests * feat: remove useless code * feat: add queryStatement label * feat: add crateDatabaseStmt * feat: add trino entity collector * feat: rename trinosql to trino * test: trino collect entity unit tests * test: fix spark test * feat(impala): support impale entity collector (#256) * Feat/collect entity hive (#263) * feat(hive): support hive collect entity * feat(hive): update tableAllColumns * feat: replace antlr4ts with antlr4ng * feat(pgsql): pgsql collect entity (#268) * feat(pgsql): pgsql collect entity * feat(pgsql): optimize some name --------- Co-authored-by: zhaoge <> * feat: get word text by token.text * feat: supprt collect db/function and add splitListener (#270) * feat: supprt collect db/function and add splitListner * feat: remove SplitListener interface in baseParser to use SplitListener in root * fix(mysql): fix show create xxx not celloct as createXXXEntity type * test: fix pgsql unit tests * Feat/error recover predicate (#274) * feat: optimize pgsql grammar * feat: add sql parser base * feat: apply SQLParserBase * feat: add geAllEntities method * test: test collect table when missing column * feat: compose collect and suggestion (#276) * feat: mark stmt which contain caret * test: correct name of getAllEntities * test: remove misscolumn unit tests * test: add suggestionWithEntity tests * feat: flink collect entity (#277) * feat: improve flink sql parser * feat: support flink entity collector * test: flink entity collect unit test * feat: move combine entities to parent class --------- Co-authored-by: 霜序 <976060700@qq.com> Co-authored-by: XCynthia <942884029@qq.com>
This commit is contained in:
parent
3f62ad0d32
commit
a99721162b
@ -7,6 +7,11 @@ parser grammar FlinkSqlParser;
|
|||||||
options {
|
options {
|
||||||
tokenVocab=FlinkSqlLexer;
|
tokenVocab=FlinkSqlLexer;
|
||||||
caseInsensitive= true;
|
caseInsensitive= true;
|
||||||
|
superClass=SQLParserBase;
|
||||||
|
}
|
||||||
|
|
||||||
|
@header {
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
}
|
}
|
||||||
|
|
||||||
program
|
program
|
||||||
@ -180,7 +185,7 @@ columnNameCreate
|
|||||||
|
|
||||||
columnName
|
columnName
|
||||||
: uid
|
: uid
|
||||||
| expression
|
| {this.shouldMatchEmpty()}?
|
||||||
;
|
;
|
||||||
|
|
||||||
columnNameList
|
columnNameList
|
||||||
@ -289,7 +294,6 @@ transformList
|
|||||||
|
|
||||||
transform
|
transform
|
||||||
: columnName # identityTransform
|
: columnName # identityTransform
|
||||||
| qualifiedName # columnTransform
|
|
||||||
| LR_BRACKET transformArgument (COMMA transformArgument)* RR_BRACKET # applyTransform
|
| LR_BRACKET transformArgument (COMMA transformArgument)* RR_BRACKET # applyTransform
|
||||||
;
|
;
|
||||||
|
|
||||||
@ -484,6 +488,7 @@ selectClause
|
|||||||
projectItemDefinition
|
projectItemDefinition
|
||||||
: overWindowItem
|
: overWindowItem
|
||||||
| columnName (KW_AS? expression)?
|
| columnName (KW_AS? expression)?
|
||||||
|
| expression (KW_AS? columnName)?
|
||||||
;
|
;
|
||||||
|
|
||||||
overWindowItem
|
overWindowItem
|
||||||
@ -583,6 +588,7 @@ groupItemDefinition
|
|||||||
| LR_BRACKET expression (COMMA expression)* RR_BRACKET
|
| LR_BRACKET expression (COMMA expression)* RR_BRACKET
|
||||||
| groupingSetsNotaionName LR_BRACKET expression (COMMA expression)* RR_BRACKET
|
| groupingSetsNotaionName LR_BRACKET expression (COMMA expression)* RR_BRACKET
|
||||||
| groupingSets LR_BRACKET groupItemDefinition (COMMA groupItemDefinition)* RR_BRACKET
|
| groupingSets LR_BRACKET groupItemDefinition (COMMA groupItemDefinition)* RR_BRACKET
|
||||||
|
| expression
|
||||||
;
|
;
|
||||||
|
|
||||||
groupingSets
|
groupingSets
|
||||||
|
@ -1,23 +1,21 @@
|
|||||||
/**
|
/**
|
||||||
Licensed to the Apache Software Foundation (ASF) under one or more
|
Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements.
|
||||||
contributor license agreements. See the NOTICE file distributed with
|
See the NOTICE file distributed with this work for additional information regarding copyright
|
||||||
this work for additional information regarding copyright ownership.
|
ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License");
|
||||||
The ASF licenses this file to You under the Apache License, Version 2.0
|
you may not use this file except in compliance with the License. You may obtain a copy of the
|
||||||
(the "License"); you may not use this file except in compliance with
|
License at
|
||||||
the License. You may obtain a copy of the License at
|
|
||||||
|
http://www.apache.org/licenses/LICENSE-2.0
|
||||||
http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
|
Unless required by applicable law or agreed to in writing, software distributed under the License
|
||||||
Unless required by applicable law or agreed to in writing, software
|
is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||||
distributed under the License is distributed on an "AS IS" BASIS,
|
implied. See the License for the specific language governing permissions and limitations under the
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
License.
|
||||||
See the License for the specific language governing permissions and
|
*/
|
||||||
limitations under the License.
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This file is an adaptation of antlr/grammars-v4's sql/hive/v4/HiveParser.g4 grammar.
|
* This file is an adaptation of antlr/grammars-v4's sql/hive/v4/HiveParser.g4 grammar. Reference:
|
||||||
* Reference: https://github.com/antlr/grammars-v4/blob/master/sql/hive/v4/HiveParser.g4
|
* https://github.com/antlr/grammars-v4/blob/master/sql/hive/v4/HiveParser.g4
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// $antlr-format alignTrailingComments true, columnLimit 150, minEmptyLines 1, maxEmptyLinesToKeep 1, reflowComments false, useTab false
|
// $antlr-format alignTrailingComments true, columnLimit 150, minEmptyLines 1, maxEmptyLinesToKeep 1, reflowComments false, useTab false
|
||||||
@ -30,6 +28,11 @@ options
|
|||||||
{
|
{
|
||||||
tokenVocab=HiveSqlLexer;
|
tokenVocab=HiveSqlLexer;
|
||||||
caseInsensitive= true;
|
caseInsensitive= true;
|
||||||
|
superClass=SQLParserBase;
|
||||||
|
}
|
||||||
|
|
||||||
|
@header {
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
}
|
}
|
||||||
|
|
||||||
program
|
program
|
||||||
@ -804,6 +807,7 @@ columnNameList
|
|||||||
|
|
||||||
columnName
|
columnName
|
||||||
: id_ (DOT id_)*
|
: id_ (DOT id_)*
|
||||||
|
| {this.shouldMatchEmpty()}?
|
||||||
;
|
;
|
||||||
|
|
||||||
columnNameCreate
|
columnNameCreate
|
||||||
@ -1096,7 +1100,10 @@ fromStatement
|
|||||||
;
|
;
|
||||||
|
|
||||||
singleFromStatement
|
singleFromStatement
|
||||||
: fromClause b+=body+
|
: fromClause insertClause selectClause lateralView? whereClause? groupByClause? havingClause? window_clause? qualifyClause? orderByClause?
|
||||||
|
clusterByClause? distributeByClause? sortByClause? limitClause? # fromInsertStmt
|
||||||
|
| fromClause selectClause lateralView? whereClause? groupByClause? havingClause? window_clause? qualifyClause? orderByClause? clusterByClause?
|
||||||
|
distributeByClause? sortByClause? limitClause? # fromSelectStmt
|
||||||
;
|
;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -1106,8 +1113,8 @@ The valuesClause rule below ensures that the parse tree for
|
|||||||
very similar to the tree for "insert into table FOO select a,b from BAR".
|
very similar to the tree for "insert into table FOO select a,b from BAR".
|
||||||
*/
|
*/
|
||||||
regularBody
|
regularBody
|
||||||
: i=insertClause s=selectStatement
|
: i=insertClause s=selectStatement # insertStmt
|
||||||
| selectStatement
|
| selectStatement # selectStmt
|
||||||
;
|
;
|
||||||
|
|
||||||
atomSelectStatement
|
atomSelectStatement
|
||||||
@ -1128,13 +1135,6 @@ selectStatementWithCTE
|
|||||||
: w=withClause? selectStatement
|
: w=withClause? selectStatement
|
||||||
;
|
;
|
||||||
|
|
||||||
body
|
|
||||||
: insertClause selectClause lateralView? whereClause? groupByClause? havingClause? window_clause? qualifyClause? orderByClause? clusterByClause?
|
|
||||||
distributeByClause? sortByClause? limitClause?
|
|
||||||
| selectClause lateralView? whereClause? groupByClause? havingClause? window_clause? qualifyClause? orderByClause? clusterByClause?
|
|
||||||
distributeByClause? sortByClause? limitClause?
|
|
||||||
;
|
|
||||||
|
|
||||||
insertClause
|
insertClause
|
||||||
: KW_INSERT (
|
: KW_INSERT (
|
||||||
KW_OVERWRITE destination ifNotExists?
|
KW_OVERWRITE destination ifNotExists?
|
||||||
@ -1667,8 +1667,7 @@ dropDataConnectorStatement
|
|||||||
;
|
;
|
||||||
|
|
||||||
tableAllColumns
|
tableAllColumns
|
||||||
: STAR
|
: (id_ DOT)* STAR
|
||||||
| tableOrView DOT STAR
|
|
||||||
;
|
;
|
||||||
|
|
||||||
defaultValue
|
defaultValue
|
||||||
@ -1866,6 +1865,7 @@ VALUES(1),(2) means 2 rows, 1 column each.
|
|||||||
VALUES(1,2),(3,4) means 2 rows, 2 columns each.
|
VALUES(1,2),(3,4) means 2 rows, 2 columns each.
|
||||||
VALUES(1,2,3) means 1 row, 3 columns
|
VALUES(1,2,3) means 1 row, 3 columns
|
||||||
*/
|
*/
|
||||||
|
|
||||||
valuesClause
|
valuesClause
|
||||||
: KW_VALUES valuesTableConstructor
|
: KW_VALUES valuesTableConstructor
|
||||||
;
|
;
|
||||||
|
@ -22,6 +22,11 @@ options
|
|||||||
{
|
{
|
||||||
tokenVocab=ImpalaSqlLexer;
|
tokenVocab=ImpalaSqlLexer;
|
||||||
caseInsensitive= true;
|
caseInsensitive= true;
|
||||||
|
superClass=SQLParserBase;
|
||||||
|
}
|
||||||
|
|
||||||
|
@header {
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
}
|
}
|
||||||
|
|
||||||
program
|
program
|
||||||
@ -75,7 +80,7 @@ createStatement
|
|||||||
createTableSelect
|
createTableSelect
|
||||||
: KW_CREATE KW_EXTERNAL? KW_TABLE ifNotExists? tableNameCreate (
|
: KW_CREATE KW_EXTERNAL? KW_TABLE ifNotExists? tableNameCreate (
|
||||||
LPAREN columnDefinition (COMMA columnDefinition)* (COMMA constraintSpecification)? RPAREN
|
LPAREN columnDefinition (COMMA columnDefinition)* (COMMA constraintSpecification)? RPAREN
|
||||||
)? (KW_PARTITIONED KW_BY (partitionedBy | createColumnAliases))? createCommonItem (
|
)? (KW_PARTITIONED KW_BY (columnAliases | partitionedBy))? createCommonItem (
|
||||||
KW_AS queryStatement
|
KW_AS queryStatement
|
||||||
)?
|
)?
|
||||||
;
|
;
|
||||||
@ -555,6 +560,7 @@ functionNamePath
|
|||||||
|
|
||||||
columnNamePath
|
columnNamePath
|
||||||
: qualifiedName
|
: qualifiedName
|
||||||
|
| {this.shouldMatchEmpty()}?
|
||||||
;
|
;
|
||||||
|
|
||||||
tableOrViewPath
|
tableOrViewPath
|
||||||
@ -582,8 +588,8 @@ assignmentItem
|
|||||||
;
|
;
|
||||||
|
|
||||||
viewColumns
|
viewColumns
|
||||||
: LPAREN columnNamePath (KW_COMMENT stringLiteral)? (
|
: LPAREN columnNamePathCreate (KW_COMMENT stringLiteral)? (
|
||||||
COMMA identifier (KW_COMMENT stringLiteral)?
|
COMMA columnNamePathCreate (KW_COMMENT stringLiteral)?
|
||||||
)* RPAREN
|
)* RPAREN
|
||||||
;
|
;
|
||||||
|
|
||||||
@ -610,6 +616,10 @@ foreignKeySpecification
|
|||||||
)? (KW_RELY)?
|
)? (KW_RELY)?
|
||||||
;
|
;
|
||||||
|
|
||||||
|
columnSpec
|
||||||
|
: columnNamePath type (KW_COMMENT stringLiteral)?
|
||||||
|
;
|
||||||
|
|
||||||
columnDefinition
|
columnDefinition
|
||||||
: columnNamePathCreate type (KW_COMMENT stringLiteral)?
|
: columnNamePathCreate type (KW_COMMENT stringLiteral)?
|
||||||
;
|
;
|
||||||
@ -625,7 +635,7 @@ kuduColumnDefinition
|
|||||||
;
|
;
|
||||||
|
|
||||||
columnSpecWithKudu
|
columnSpecWithKudu
|
||||||
: columnNamePath type (KW_COMMENT stringLiteral)? (kuduAttributes kuduAttributes*?)?
|
: columnSpec? (kuduAttributes kuduAttributes*?)?
|
||||||
;
|
;
|
||||||
|
|
||||||
createColumnSpecWithKudu
|
createColumnSpecWithKudu
|
||||||
@ -712,7 +722,7 @@ properties
|
|||||||
;
|
;
|
||||||
|
|
||||||
partitionedBy
|
partitionedBy
|
||||||
: LPAREN columnDefinition (COMMA columnDefinition)*? RPAREN
|
: LPAREN columnSpec (COMMA columnSpec)*? RPAREN
|
||||||
;
|
;
|
||||||
|
|
||||||
sortedBy
|
sortedBy
|
||||||
@ -835,10 +845,6 @@ columnAliases
|
|||||||
: LPAREN columnNamePath (COMMA columnNamePath)* RPAREN
|
: LPAREN columnNamePath (COMMA columnNamePath)* RPAREN
|
||||||
;
|
;
|
||||||
|
|
||||||
createColumnAliases
|
|
||||||
: LPAREN columnNamePathCreate (COMMA columnNamePathCreate)* RPAREN
|
|
||||||
;
|
|
||||||
|
|
||||||
relationPrimary
|
relationPrimary
|
||||||
: tableOrViewPath
|
: tableOrViewPath
|
||||||
| KW_LATERAL? subQueryRelation
|
| KW_LATERAL? subQueryRelation
|
||||||
|
@ -36,6 +36,11 @@ parser grammar MySqlParser;
|
|||||||
options {
|
options {
|
||||||
tokenVocab= MySqlLexer;
|
tokenVocab= MySqlLexer;
|
||||||
caseInsensitive= true;
|
caseInsensitive= true;
|
||||||
|
superClass=SQLParserBase;
|
||||||
|
}
|
||||||
|
|
||||||
|
@header {
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
}
|
}
|
||||||
|
|
||||||
// Top Level Description
|
// Top Level Description
|
||||||
@ -212,8 +217,8 @@ administrationStatement
|
|||||||
;
|
;
|
||||||
|
|
||||||
utilityStatement
|
utilityStatement
|
||||||
: simpleDescribeStatement
|
: fullDescribeStatement
|
||||||
| fullDescribeStatement
|
| simpleDescribeStatement
|
||||||
| analyzeDescribeStatement
|
| analyzeDescribeStatement
|
||||||
| helpStatement
|
| helpStatement
|
||||||
| useStatement
|
| useStatement
|
||||||
@ -273,16 +278,16 @@ createServer
|
|||||||
;
|
;
|
||||||
|
|
||||||
createTable
|
createTable
|
||||||
: KW_CREATE KW_TEMPORARY? KW_TABLE ifNotExists? tableNameCreate createDefinitions (
|
: KW_CREATE KW_TEMPORARY? KW_TABLE ifNotExists? tb= tableNameCreate col=createDefinitions? (
|
||||||
tableOption (','? tableOption)*
|
tableOption (','? tableOption)*
|
||||||
)? partitionDefinitions? # copyCreateTable
|
)? partitionDefinitions? (KW_IGNORE | KW_REPLACE)? KW_AS? selectStatement # queryCreateTable
|
||||||
| KW_CREATE KW_TEMPORARY? KW_TABLE ifNotExists? tableNameCreate createDefinitions? (
|
|
||||||
tableOption (','? tableOption)*
|
|
||||||
)? partitionDefinitions? (KW_IGNORE | KW_REPLACE)? KW_AS? selectStatement # columnCreateTable
|
|
||||||
| KW_CREATE KW_TEMPORARY? KW_TABLE ifNotExists? tableNameCreate (
|
| KW_CREATE KW_TEMPORARY? KW_TABLE ifNotExists? tableNameCreate (
|
||||||
KW_LIKE tableName
|
KW_LIKE tableName
|
||||||
| '(' KW_LIKE tableName ')'
|
| '(' KW_LIKE tableName ')'
|
||||||
) # queryCreateTable
|
) # copyCreateTable
|
||||||
|
| KW_CREATE KW_TEMPORARY? KW_TABLE ifNotExists? tableNameCreate createDefinitions (
|
||||||
|
tableOption (','? tableOption)*
|
||||||
|
)? partitionDefinitions? # columnCreateTable
|
||||||
;
|
;
|
||||||
|
|
||||||
createTablespaceInnodb
|
createTablespaceInnodb
|
||||||
@ -326,7 +331,7 @@ commonTableExpressions
|
|||||||
createView
|
createView
|
||||||
: KW_CREATE orReplace? (KW_ALGORITHM '=' algType=(KW_UNDEFINED | KW_MERGE | KW_TEMPTABLE))? ownerStatement? (
|
: KW_CREATE orReplace? (KW_ALGORITHM '=' algType=(KW_UNDEFINED | KW_MERGE | KW_TEMPTABLE))? ownerStatement? (
|
||||||
KW_SQL KW_SECURITY secContext=(KW_DEFINER | KW_INVOKER)
|
KW_SQL KW_SECURITY secContext=(KW_DEFINER | KW_INVOKER)
|
||||||
)? KW_VIEW viewNameCreate ('(' columnNames ')')? KW_AS (
|
)? KW_VIEW viewNameCreate ('(' columnNameCreate (',' columnNameCreate)* ')')? KW_AS (
|
||||||
'(' withClause? selectStatement ')'
|
'(' withClause? selectStatement ')'
|
||||||
| withClause? selectStatement (
|
| withClause? selectStatement (
|
||||||
KW_WITH checkOption=(KW_CASCADED | KW_LOCAL)? KW_CHECK KW_OPTION
|
KW_WITH checkOption=(KW_CASCADED | KW_LOCAL)? KW_CHECK KW_OPTION
|
||||||
@ -438,7 +443,7 @@ createDefinitions
|
|||||||
;
|
;
|
||||||
|
|
||||||
createDefinition
|
createDefinition
|
||||||
: columnName columnDefinition
|
: columnNameCreate columnDefinition
|
||||||
| (KW_INDEX | KW_KEY) indexName? indexType? indexColumnNames indexOption*
|
| (KW_INDEX | KW_KEY) indexName? indexType? indexColumnNames indexOption*
|
||||||
| (KW_FULLTEXT | KW_SPATIAL) (KW_INDEX | KW_KEY)? indexName? indexColumnNames indexOption*
|
| (KW_FULLTEXT | KW_SPATIAL) (KW_INDEX | KW_KEY)? indexName? indexColumnNames indexOption*
|
||||||
| constraintSymbol? KW_PRIMARY KW_KEY indexType? indexColumnNames indexOption*
|
| constraintSymbol? KW_PRIMARY KW_KEY indexType? indexColumnNames indexOption*
|
||||||
@ -2052,15 +2057,15 @@ showStatement
|
|||||||
| KW_SHOW KW_EXTENDED? KW_FULL? columnsFormat=(KW_COLUMNS | KW_FIELDS) tableFormat=(
|
| KW_SHOW KW_EXTENDED? KW_FULL? columnsFormat=(KW_COLUMNS | KW_FIELDS) tableFormat=(
|
||||||
KW_FROM
|
KW_FROM
|
||||||
| KW_IN
|
| KW_IN
|
||||||
) tableName (schemaFormat=(KW_FROM | KW_IN) databaseName)? showFilter? # showColumns
|
) tableName (schemaFormat=(KW_FROM | KW_IN) databaseName)? showFilter? # showColumns
|
||||||
| KW_SHOW KW_CREATE (KW_DATABASE | KW_SCHEMA) ifNotExists? databaseNameCreate # showCreateDb
|
| KW_SHOW KW_CREATE (KW_DATABASE | KW_SCHEMA) ifNotExists? databaseName # showCreateDb
|
||||||
| KW_SHOW KW_CREATE (KW_EVENT | KW_PROCEDURE | KW_TRIGGER) fullId # showCreateFullIdObject
|
| KW_SHOW KW_CREATE (KW_EVENT | KW_PROCEDURE | KW_TRIGGER) fullId # showCreateFullIdObject
|
||||||
| KW_SHOW KW_CREATE KW_FUNCTION functionNameCreate # showCreateFunction
|
| KW_SHOW KW_CREATE KW_FUNCTION functionName # showCreateFunction
|
||||||
| KW_SHOW KW_CREATE KW_VIEW viewNameCreate # showCreateView
|
| KW_SHOW KW_CREATE KW_VIEW viewName # showCreateView
|
||||||
| KW_SHOW KW_CREATE KW_TABLE tableNameCreate # showCreateTable
|
| KW_SHOW KW_CREATE KW_TABLE tableName # showCreateTable
|
||||||
| KW_SHOW KW_CREATE KW_USER userName # showCreateUser
|
| KW_SHOW KW_CREATE KW_USER userName # showCreateUser
|
||||||
| KW_SHOW KW_ENGINE engineName engineOption=(KW_STATUS | KW_MUTEX) # showEngine
|
| KW_SHOW KW_ENGINE engineName engineOption=(KW_STATUS | KW_MUTEX) # showEngine
|
||||||
| KW_SHOW showGlobalInfoClause # showGlobalInfo
|
| KW_SHOW showGlobalInfoClause # showGlobalInfo
|
||||||
| KW_SHOW errorFormat=(KW_ERRORS | KW_WARNINGS) (
|
| KW_SHOW errorFormat=(KW_ERRORS | KW_WARNINGS) (
|
||||||
KW_LIMIT (offset=decimalLiteral ',')? rowCount=decimalLiteral
|
KW_LIMIT (offset=decimalLiteral ',')? rowCount=decimalLiteral
|
||||||
)? # showErrors
|
)? # showErrors
|
||||||
@ -2396,6 +2401,7 @@ columnNames
|
|||||||
columnName
|
columnName
|
||||||
: uid (dottedId dottedId?)?
|
: uid (dottedId dottedId?)?
|
||||||
| .? dottedId dottedId?
|
| .? dottedId dottedId?
|
||||||
|
| {this.shouldMatchEmpty()}?
|
||||||
;
|
;
|
||||||
|
|
||||||
tablespaceNameCreate
|
tablespaceNameCreate
|
||||||
@ -2751,12 +2757,12 @@ orReplace
|
|||||||
// Functions
|
// Functions
|
||||||
|
|
||||||
functionCall
|
functionCall
|
||||||
: specificFunction # specificFunctionCall
|
: specificFunction # specificFunctionCall
|
||||||
| aggregateWindowedFunction # aggregateFunctionCall
|
| aggregateWindowedFunction # aggregateFunctionCall
|
||||||
| nonAggregateWindowedFunction # nonAggregateFunctionCall
|
| nonAggregateWindowedFunction # nonAggregateFunctionCall
|
||||||
| scalarFunctionName '(' functionArgs? ')' # scalarFunctionCall
|
| scalarFunctionName ('(' ')' | '(' functionArgs ')') # scalarFunctionCall
|
||||||
| functionName '(' functionArgs? ')' # udfFunctionCall
|
| functionName ('(' ')' | '(' functionArgs ')') # udfFunctionCall
|
||||||
| passwordFunctionClause # passwordFunctionCall
|
| passwordFunctionClause # passwordFunctionCall
|
||||||
;
|
;
|
||||||
|
|
||||||
specificFunction
|
specificFunction
|
||||||
@ -2925,7 +2931,6 @@ functionArgs
|
|||||||
|
|
||||||
functionArg
|
functionArg
|
||||||
: constant
|
: constant
|
||||||
| columnName
|
|
||||||
| functionCall
|
| functionCall
|
||||||
| expression
|
| expression
|
||||||
;
|
;
|
||||||
@ -2941,22 +2946,23 @@ expression
|
|||||||
;
|
;
|
||||||
|
|
||||||
predicate
|
predicate
|
||||||
: predicate KW_NOT? KW_IN '(' (selectStatement | expressions) ')' # inPredicate
|
: predicate KW_NOT? KW_IN '(' (selectStatement | expressions) ')' # inPredicate
|
||||||
| predicate KW_IS nullNotnull # isNullPredicate
|
| predicate KW_IS nullNotnull # isNullPredicate
|
||||||
| left=predicate comparisonOperator right=predicate # binaryComparisonPredicate
|
| predicate comparisonOperator (
|
||||||
| predicate comparisonOperator quantifier=(KW_ALL | KW_ANY | KW_SOME) '(' selectStatement ')' # subqueryComparisonPredicate
|
quantifier=(KW_ALL | KW_ANY | KW_SOME) '(' subQuery=selectStatement ')'
|
||||||
| predicate KW_NOT? KW_BETWEEN predicate KW_AND predicate # betweenPredicate
|
| right=predicate
|
||||||
| predicate KW_SOUNDS KW_LIKE predicate # soundsLikePredicate
|
) # binaryComparisonPredicate
|
||||||
| predicate KW_NOT? KW_LIKE predicate (KW_ESCAPE STRING_LITERAL)? # likePredicate
|
| predicate KW_NOT? KW_BETWEEN predicate KW_AND predicate # betweenPredicate
|
||||||
| predicate KW_NOT? regex=(KW_REGEXP | KW_RLIKE) predicate # regexpPredicate
|
| predicate KW_SOUNDS KW_LIKE predicate # soundsLikePredicate
|
||||||
| predicate KW_MEMBER KW_OF '(' predicate ')' # jsonMemberOfPredicate
|
| predicate KW_NOT? KW_LIKE predicate (KW_ESCAPE STRING_LITERAL)? # likePredicate
|
||||||
| expressionAtom # expressionAtomPredicate
|
| predicate KW_NOT? regex=(KW_REGEXP | KW_RLIKE) predicate # regexpPredicate
|
||||||
|
| predicate KW_MEMBER KW_OF '(' predicate ')' # jsonMemberOfPredicate
|
||||||
|
| expressionAtom # expressionAtomPredicate
|
||||||
;
|
;
|
||||||
|
|
||||||
// Add in ASTVisitor nullNotnull in constant
|
// Add in ASTVisitor nullNotnull in constant
|
||||||
expressionAtom
|
expressionAtom
|
||||||
: constant # constantExpressionAtom
|
: constant # constantExpressionAtom
|
||||||
| columnName # columnNameExpressionAtom
|
|
||||||
| functionCall # functionCallExpressionAtom
|
| functionCall # functionCallExpressionAtom
|
||||||
| expressionAtom KW_COLLATE collationName # collateExpressionAtom
|
| expressionAtom KW_COLLATE collationName # collateExpressionAtom
|
||||||
| mysqlVariable # mysqlVariableExpressionAtom
|
| mysqlVariable # mysqlVariableExpressionAtom
|
||||||
@ -2968,9 +2974,10 @@ expressionAtom
|
|||||||
| KW_EXISTS '(' selectStatement ')' # existsExpressionAtom
|
| KW_EXISTS '(' selectStatement ')' # existsExpressionAtom
|
||||||
| '(' selectStatement ')' # subqueryExpressionAtom
|
| '(' selectStatement ')' # subqueryExpressionAtom
|
||||||
| KW_INTERVAL expression intervalType # intervalExpressionAtom
|
| KW_INTERVAL expression intervalType # intervalExpressionAtom
|
||||||
|
| left=expressionAtom jsonOperator right=expressionAtom # jsonExpressionAtom
|
||||||
| left=expressionAtom bitOperator right=expressionAtom # bitExpressionAtom
|
| left=expressionAtom bitOperator right=expressionAtom # bitExpressionAtom
|
||||||
| left=expressionAtom mathOperator right=expressionAtom # mathExpressionAtom
|
| left=expressionAtom mathOperator right=expressionAtom # mathExpressionAtom
|
||||||
| left=expressionAtom jsonOperator right=expressionAtom # jsonExpressionAtom
|
| columnName # columnNameExpressionAtom
|
||||||
;
|
;
|
||||||
|
|
||||||
unaryOperator
|
unaryOperator
|
||||||
@ -2982,18 +2989,18 @@ unaryOperator
|
|||||||
;
|
;
|
||||||
|
|
||||||
comparisonOperator
|
comparisonOperator
|
||||||
: comparisonBase
|
: '<' '>'
|
||||||
| '<' '>'
|
|
||||||
| '!' '='
|
| '!' '='
|
||||||
| '<' '=' '>'
|
| '<' '=' '>'
|
||||||
|
| comparisonBase
|
||||||
;
|
;
|
||||||
|
|
||||||
comparisonBase
|
comparisonBase
|
||||||
: '='
|
: '<' '='
|
||||||
|
| '>' '='
|
||||||
|
| '='
|
||||||
| '>'
|
| '>'
|
||||||
| '<'
|
| '<'
|
||||||
| '<' '='
|
|
||||||
| '>' '='
|
|
||||||
;
|
;
|
||||||
|
|
||||||
logicalOperator
|
logicalOperator
|
||||||
|
@ -42,6 +42,11 @@ parser grammar PostgreSQLParser;
|
|||||||
options {
|
options {
|
||||||
tokenVocab= PostgreSQLLexer;
|
tokenVocab= PostgreSQLLexer;
|
||||||
caseInsensitive= true;
|
caseInsensitive= true;
|
||||||
|
superClass=SQLParserBase;
|
||||||
|
}
|
||||||
|
|
||||||
|
@header {
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
}
|
}
|
||||||
|
|
||||||
program
|
program
|
||||||
@ -298,7 +303,7 @@ createschemastmt
|
|||||||
;
|
;
|
||||||
|
|
||||||
schema_name_create
|
schema_name_create
|
||||||
: colid attrs?
|
: colid attrs? # schemaNameCreate
|
||||||
;
|
;
|
||||||
|
|
||||||
optschemaeltlist
|
optschemaeltlist
|
||||||
@ -469,7 +474,7 @@ altertablestmt
|
|||||||
| KW_FINALIZE
|
| KW_FINALIZE
|
||||||
)?
|
)?
|
||||||
| KW_ALTER KW_INDEX opt_if_exists? qualified_name (alter_table_cmds | index_partition_cmd)
|
| KW_ALTER KW_INDEX opt_if_exists? qualified_name (alter_table_cmds | index_partition_cmd)
|
||||||
| KW_ALTER KW_INDEX KW_ALL KW_IN KW_TABLESPACE tablespace_name (KW_OWNED KW_BY role_list)? KW_SET KW_TABLESPACE tablespace_name_create opt_nowait?
|
| KW_ALTER KW_INDEX KW_ALL KW_IN KW_TABLESPACE tablespace_name (KW_OWNED KW_BY role_list)? KW_SET KW_TABLESPACE tablespace_name opt_nowait?
|
||||||
| KW_ALTER KW_SEQUENCE opt_if_exists? qualified_name alter_table_cmds
|
| KW_ALTER KW_SEQUENCE opt_if_exists? qualified_name alter_table_cmds
|
||||||
| KW_ALTER KW_VIEW opt_if_exists? view_name alter_table_cmds
|
| KW_ALTER KW_VIEW opt_if_exists? view_name alter_table_cmds
|
||||||
| KW_ALTER KW_MATERIALIZED KW_VIEW opt_if_exists? view_name alter_table_cmds
|
| KW_ALTER KW_MATERIALIZED KW_VIEW opt_if_exists? view_name alter_table_cmds
|
||||||
@ -538,7 +543,7 @@ alter_table_cmd
|
|||||||
| KW_FORCE KW_ROW KW_LEVEL KW_SECURITY
|
| KW_FORCE KW_ROW KW_LEVEL KW_SECURITY
|
||||||
| KW_NO KW_FORCE KW_ROW KW_LEVEL KW_SECURITY
|
| KW_NO KW_FORCE KW_ROW KW_LEVEL KW_SECURITY
|
||||||
| KW_DROP KW_COLUMN? opt_if_exists? column_name opt_drop_behavior?
|
| KW_DROP KW_COLUMN? opt_if_exists? column_name opt_drop_behavior?
|
||||||
| KW_ADD KW_COLUMN? opt_if_not_exists? columnDefCluase
|
| KW_ADD KW_COLUMN? opt_if_not_exists? column_def
|
||||||
| KW_ALTER KW_COLUMN? column_name alter_column_default
|
| KW_ALTER KW_COLUMN? column_name alter_column_default
|
||||||
| KW_ALTER KW_COLUMN? column_name (KW_DROP | KW_SET) KW_NOT KW_NULL
|
| KW_ALTER KW_COLUMN? column_name (KW_DROP | KW_SET) KW_NOT KW_NULL
|
||||||
| KW_ALTER KW_COLUMN? column_name KW_DROP KW_EXPRESSION opt_if_exists?
|
| KW_ALTER KW_COLUMN? column_name KW_DROP KW_EXPRESSION opt_if_exists?
|
||||||
@ -674,10 +679,10 @@ copy_opt_item
|
|||||||
| KW_HEADER
|
| KW_HEADER
|
||||||
| KW_QUOTE opt_as? sconst
|
| KW_QUOTE opt_as? sconst
|
||||||
| KW_ESCAPE opt_as? sconst
|
| KW_ESCAPE opt_as? sconst
|
||||||
| KW_FORCE KW_QUOTE columnlist
|
| KW_FORCE KW_QUOTE column_list
|
||||||
| KW_FORCE KW_QUOTE STAR
|
| KW_FORCE KW_QUOTE STAR
|
||||||
| KW_FORCE KW_NOT KW_NULL columnlist
|
| KW_FORCE KW_NOT KW_NULL column_list
|
||||||
| KW_FORCE KW_NULL columnlist
|
| KW_FORCE KW_NULL column_list
|
||||||
| KW_ENCODING sconst
|
| KW_ENCODING sconst
|
||||||
;
|
;
|
||||||
|
|
||||||
@ -722,7 +727,7 @@ createstmt
|
|||||||
| KW_OF any_name opttypedtableelementlist? optpartitionspec? table_access_method_clause? optwith? oncommitoption? opttablespace?
|
| KW_OF any_name opttypedtableelementlist? optpartitionspec? table_access_method_clause? optwith? oncommitoption? opttablespace?
|
||||||
| KW_PARTITION KW_OF qualified_name opttypedtableelementlist? partitionboundspec optpartitionspec? table_access_method_clause? optwith?
|
| KW_PARTITION KW_OF qualified_name opttypedtableelementlist? partitionboundspec optpartitionspec? table_access_method_clause? optwith?
|
||||||
oncommitoption? opttablespace?
|
oncommitoption? opttablespace?
|
||||||
)
|
) # columnCreateTable
|
||||||
;
|
;
|
||||||
|
|
||||||
opttemp
|
opttemp
|
||||||
@ -754,7 +759,7 @@ typedtableelementlist
|
|||||||
;
|
;
|
||||||
|
|
||||||
tableelement
|
tableelement
|
||||||
: columnDef
|
: column_def
|
||||||
| tablelikeclause
|
| tablelikeclause
|
||||||
| tableconstraint
|
| tableconstraint
|
||||||
;
|
;
|
||||||
@ -764,14 +769,8 @@ typedtableelement
|
|||||||
| tableconstraint
|
| tableconstraint
|
||||||
;
|
;
|
||||||
|
|
||||||
columnDefCluase
|
column_def
|
||||||
: column_name typename create_generic_options? storageCluase? compressionCluase? (
|
: column_name_create typename create_generic_options? storageCluase? compressionCluase? (
|
||||||
KW_COLLATE any_name
|
|
||||||
)? (KW_WITH KW_OPTIONS)? colquallist
|
|
||||||
;
|
|
||||||
|
|
||||||
columnDef
|
|
||||||
: column_name typename create_generic_options? storageCluase? compressionCluase? (
|
|
||||||
KW_COLLATE any_name
|
KW_COLLATE any_name
|
||||||
)? (KW_WITH KW_OPTIONS)? colquallist
|
)? (KW_WITH KW_OPTIONS)? colquallist
|
||||||
;
|
;
|
||||||
@ -785,7 +784,7 @@ storageCluase
|
|||||||
;
|
;
|
||||||
|
|
||||||
columnOptions
|
columnOptions
|
||||||
: column_name (KW_WITH KW_OPTIONS)? colquallist
|
: column_name_create (KW_WITH KW_OPTIONS)? colquallist
|
||||||
;
|
;
|
||||||
|
|
||||||
colquallist
|
colquallist
|
||||||
@ -859,16 +858,16 @@ tableconstraint
|
|||||||
constraintelem
|
constraintelem
|
||||||
: KW_CHECK OPEN_PAREN a_expr CLOSE_PAREN constraintattributespec
|
: KW_CHECK OPEN_PAREN a_expr CLOSE_PAREN constraintattributespec
|
||||||
| KW_UNIQUE (
|
| KW_UNIQUE (
|
||||||
OPEN_PAREN columnlist CLOSE_PAREN opt_c_include? opt_definition? optconstablespace? constraintattributespec
|
OPEN_PAREN column_list CLOSE_PAREN opt_c_include? opt_definition? optconstablespace? constraintattributespec
|
||||||
| existingindex constraintattributespec
|
| existingindex constraintattributespec
|
||||||
)
|
)
|
||||||
| KW_PRIMARY KW_KEY (
|
| KW_PRIMARY KW_KEY (
|
||||||
OPEN_PAREN columnlist CLOSE_PAREN opt_c_include? opt_definition? optconstablespace? constraintattributespec
|
OPEN_PAREN column_list CLOSE_PAREN opt_c_include? opt_definition? optconstablespace? constraintattributespec
|
||||||
| existingindex constraintattributespec
|
| existingindex constraintattributespec
|
||||||
)
|
)
|
||||||
| KW_EXCLUDE access_method_clause? OPEN_PAREN exclusionconstraintlist CLOSE_PAREN opt_c_include? opt_definition? optconstablespace?
|
| KW_EXCLUDE access_method_clause? OPEN_PAREN exclusionconstraintlist CLOSE_PAREN opt_c_include? opt_definition? optconstablespace?
|
||||||
exclusionwhereclause? constraintattributespec
|
exclusionwhereclause? constraintattributespec
|
||||||
| KW_FOREIGN KW_KEY OPEN_PAREN columnlist CLOSE_PAREN KW_REFERENCES qualified_name opt_column_list? key_match? key_actions?
|
| KW_FOREIGN KW_KEY OPEN_PAREN column_list CLOSE_PAREN KW_REFERENCES qualified_name opt_column_list? key_match? key_actions?
|
||||||
constraintattributespec
|
constraintattributespec
|
||||||
;
|
;
|
||||||
|
|
||||||
@ -877,15 +876,23 @@ opt_no_inherit
|
|||||||
;
|
;
|
||||||
|
|
||||||
opt_column_list
|
opt_column_list
|
||||||
: OPEN_PAREN columnlist CLOSE_PAREN
|
: OPEN_PAREN column_list CLOSE_PAREN
|
||||||
;
|
;
|
||||||
|
|
||||||
columnlist
|
opt_column_list_create
|
||||||
|
: OPEN_PAREN column_list_create CLOSE_PAREN
|
||||||
|
;
|
||||||
|
|
||||||
|
column_list
|
||||||
: column_name (COMMA column_name)*
|
: column_name (COMMA column_name)*
|
||||||
;
|
;
|
||||||
|
|
||||||
|
column_list_create
|
||||||
|
: column_name_create (COMMA column_name_create)*
|
||||||
|
;
|
||||||
|
|
||||||
opt_c_include
|
opt_c_include
|
||||||
: KW_INCLUDE OPEN_PAREN columnlist CLOSE_PAREN
|
: KW_INCLUDE OPEN_PAREN column_list CLOSE_PAREN
|
||||||
;
|
;
|
||||||
|
|
||||||
key_match
|
key_match
|
||||||
@ -923,7 +930,7 @@ key_action
|
|||||||
: KW_NO KW_ACTION
|
: KW_NO KW_ACTION
|
||||||
| KW_RESTRICT
|
| KW_RESTRICT
|
||||||
| KW_CASCADE
|
| KW_CASCADE
|
||||||
| KW_SET (KW_NULL | KW_DEFAULT) columnlist?
|
| KW_SET (KW_NULL | KW_DEFAULT) column_list?
|
||||||
;
|
;
|
||||||
|
|
||||||
optinherit
|
optinherit
|
||||||
@ -990,11 +997,11 @@ alterstatsstmt
|
|||||||
;
|
;
|
||||||
|
|
||||||
createasstmt
|
createasstmt
|
||||||
: KW_CREATE opttemp? KW_TABLE opt_if_not_exists? create_as_target KW_AS selectstmt opt_with_data?
|
: KW_CREATE opttemp? KW_TABLE opt_if_not_exists? create_as_target KW_AS selectstmt opt_with_data? # queryCreateTable
|
||||||
;
|
;
|
||||||
|
|
||||||
create_as_target
|
create_as_target
|
||||||
: table_name_create opt_column_list? table_access_method_clause? optwith? oncommitoption? opttablespace?
|
: table_name_create opt_column_list_create? table_access_method_clause? optwith? oncommitoption? opttablespace?
|
||||||
;
|
;
|
||||||
|
|
||||||
opt_with_data
|
opt_with_data
|
||||||
@ -1002,11 +1009,11 @@ opt_with_data
|
|||||||
;
|
;
|
||||||
|
|
||||||
creatematviewstmt
|
creatematviewstmt
|
||||||
: KW_CREATE optnolog? KW_MATERIALIZED KW_VIEW opt_if_not_exists? create_mv_target KW_AS selectstmt opt_with_data?
|
: KW_CREATE optnolog? KW_MATERIALIZED KW_VIEW opt_if_not_exists? create_mv_target KW_AS selectstmt opt_with_data? # createMaterializedView
|
||||||
;
|
;
|
||||||
|
|
||||||
create_mv_target
|
create_mv_target
|
||||||
: view_name_create opt_column_list? table_access_method_clause? opt_reloptions? opttablespace?
|
: view_name_create opt_column_list_create? table_access_method_clause? opt_reloptions? opttablespace?
|
||||||
;
|
;
|
||||||
|
|
||||||
optnolog
|
optnolog
|
||||||
@ -1232,9 +1239,9 @@ alterforeignserverstmt
|
|||||||
|
|
||||||
createforeigntablestmt
|
createforeigntablestmt
|
||||||
: KW_CREATE KW_FOREIGN KW_TABLE opt_if_not_exists? table_name_create OPEN_PAREN opttableelementlist? CLOSE_PAREN optinherit? KW_SERVER name
|
: KW_CREATE KW_FOREIGN KW_TABLE opt_if_not_exists? table_name_create OPEN_PAREN opttableelementlist? CLOSE_PAREN optinherit? KW_SERVER name
|
||||||
create_generic_options?
|
create_generic_options? # createForeignTable
|
||||||
| KW_CREATE KW_FOREIGN KW_TABLE opt_if_not_exists? table_name_create KW_PARTITION KW_OF table_name opttypedtableelementlist? partitionboundspec
|
| KW_CREATE KW_FOREIGN KW_TABLE opt_if_not_exists? table_name_create KW_PARTITION KW_OF table_name opttypedtableelementlist? partitionboundspec
|
||||||
KW_SERVER name create_generic_options?
|
KW_SERVER name create_generic_options? # createPartitionForeignTable
|
||||||
;
|
;
|
||||||
|
|
||||||
importforeignschemastmt
|
importforeignschemastmt
|
||||||
@ -1363,7 +1370,7 @@ triggeroneevent
|
|||||||
: KW_INSERT
|
: KW_INSERT
|
||||||
| KW_DELETE
|
| KW_DELETE
|
||||||
| KW_UPDATE
|
| KW_UPDATE
|
||||||
| KW_UPDATE KW_OF columnlist
|
| KW_UPDATE KW_OF column_list
|
||||||
| KW_TRUNCATE
|
| KW_TRUNCATE
|
||||||
;
|
;
|
||||||
|
|
||||||
@ -1806,8 +1813,8 @@ privileges
|
|||||||
: privilege_list
|
: privilege_list
|
||||||
| KW_ALL
|
| KW_ALL
|
||||||
| KW_ALL KW_PRIVILEGES
|
| KW_ALL KW_PRIVILEGES
|
||||||
| KW_ALL OPEN_PAREN columnlist CLOSE_PAREN
|
| KW_ALL OPEN_PAREN column_list CLOSE_PAREN
|
||||||
| KW_ALL KW_PRIVILEGES OPEN_PAREN columnlist CLOSE_PAREN
|
| KW_ALL KW_PRIVILEGES OPEN_PAREN column_list CLOSE_PAREN
|
||||||
| beforeprivilegeselectlist
|
| beforeprivilegeselectlist
|
||||||
;
|
;
|
||||||
|
|
||||||
@ -2350,28 +2357,28 @@ opt_no
|
|||||||
;
|
;
|
||||||
|
|
||||||
alterobjectschemastmt
|
alterobjectschemastmt
|
||||||
: KW_ALTER KW_AGGREGATE aggregate_with_argtypes KW_SET KW_SCHEMA schema_name_create
|
: KW_ALTER KW_AGGREGATE aggregate_with_argtypes KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_COLLATION any_name KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_COLLATION any_name KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_CONVERSION any_name KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_CONVERSION any_name KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_DOMAIN any_name KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_DOMAIN any_name KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_EXTENSION name KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_EXTENSION name KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_FUNCTION function_with_argtypes KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_FUNCTION function_with_argtypes KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_OPERATOR operator_with_argtypes KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_OPERATOR operator_with_argtypes KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_OPERATOR KW_CLASS any_name KW_USING name KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_OPERATOR KW_CLASS any_name KW_USING name KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_OPERATOR KW_FAMILY any_name KW_USING name KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_OPERATOR KW_FAMILY any_name KW_USING name KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_PROCEDURE procedure_with_argtypes KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_PROCEDURE procedure_with_argtypes KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_ROUTINE routine_with_argtypes KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_ROUTINE routine_with_argtypes KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_TABLE opt_if_exists? relation_expr KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_TABLE opt_if_exists? relation_expr KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_STATISTICS any_name KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_STATISTICS any_name KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_TEXT KW_SEARCH KW_PARSER any_name KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_TEXT KW_SEARCH KW_PARSER any_name KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_TEXT KW_SEARCH KW_DICTIONARY any_name KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_TEXT KW_SEARCH KW_DICTIONARY any_name KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_TEXT KW_SEARCH KW_TEMPLATE any_name KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_TEXT KW_SEARCH KW_TEMPLATE any_name KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_TEXT KW_SEARCH KW_CONFIGURATION any_name KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_TEXT KW_SEARCH KW_CONFIGURATION any_name KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_SEQUENCE opt_if_exists? qualified_name KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_SEQUENCE opt_if_exists? qualified_name KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_VIEW opt_if_exists? view_name KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_VIEW opt_if_exists? view_name KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_MATERIALIZED KW_VIEW opt_if_exists? view_name KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_MATERIALIZED KW_VIEW opt_if_exists? view_name KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_FOREIGN KW_TABLE opt_if_exists? relation_expr KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_FOREIGN KW_TABLE opt_if_exists? relation_expr KW_SET KW_SCHEMA schema_name
|
||||||
| KW_ALTER KW_TYPE any_name KW_SET KW_SCHEMA schema_name_create
|
| KW_ALTER KW_TYPE any_name KW_SET KW_SCHEMA schema_name
|
||||||
;
|
;
|
||||||
|
|
||||||
alteroperatorstmt
|
alteroperatorstmt
|
||||||
@ -2571,9 +2578,9 @@ opt_transaction_chain
|
|||||||
|
|
||||||
viewstmt
|
viewstmt
|
||||||
: KW_CREATE (KW_OR KW_REPLACE)? opttemp? (
|
: KW_CREATE (KW_OR KW_REPLACE)? opttemp? (
|
||||||
KW_VIEW view_name_create opt_column_list? opt_reloptions?
|
KW_VIEW view_name_create opt_column_list_create? opt_reloptions?
|
||||||
| KW_RECURSIVE KW_VIEW view_name_create OPEN_PAREN columnlist CLOSE_PAREN opt_reloptions?
|
| KW_RECURSIVE KW_VIEW view_name_create OPEN_PAREN column_list CLOSE_PAREN opt_reloptions?
|
||||||
) KW_AS selectstmt opt_check_option?
|
) KW_AS selectstmt opt_check_option? # createView
|
||||||
;
|
;
|
||||||
|
|
||||||
opt_check_option
|
opt_check_option
|
||||||
@ -2585,7 +2592,7 @@ loadstmt
|
|||||||
;
|
;
|
||||||
|
|
||||||
createdbstmt
|
createdbstmt
|
||||||
: KW_CREATE KW_DATABASE database_name_create opt_with? createdb_opt_list?
|
: KW_CREATE KW_DATABASE database_name_create opt_with? createdb_opt_list? # createDatabase
|
||||||
;
|
;
|
||||||
|
|
||||||
createdb_opt_list
|
createdb_opt_list
|
||||||
@ -2762,7 +2769,7 @@ opt_freeze
|
|||||||
;
|
;
|
||||||
|
|
||||||
opt_name_list
|
opt_name_list
|
||||||
: OPEN_PAREN columnlist CLOSE_PAREN
|
: OPEN_PAREN column_list CLOSE_PAREN
|
||||||
;
|
;
|
||||||
|
|
||||||
vacuum_relation
|
vacuum_relation
|
||||||
@ -2843,7 +2850,7 @@ deallocatestmt
|
|||||||
;
|
;
|
||||||
|
|
||||||
insertstmt
|
insertstmt
|
||||||
: opt_with_clause? KW_INSERT KW_INTO insert_target insert_rest opt_on_conflict? returning_clause?
|
: opt_with_clause? KW_INSERT KW_INTO insert_target insert_rest opt_on_conflict? returning_clause? # insertStatement
|
||||||
;
|
;
|
||||||
|
|
||||||
insert_target
|
insert_target
|
||||||
@ -2971,8 +2978,8 @@ opt_hold
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
selectstmt
|
selectstmt
|
||||||
: select_no_parens
|
: select_no_parens # selectStatement
|
||||||
| select_with_parens
|
| select_with_parens # selectStatement
|
||||||
;
|
;
|
||||||
|
|
||||||
select_with_parens
|
select_with_parens
|
||||||
@ -3029,11 +3036,11 @@ common_table_expr
|
|||||||
;
|
;
|
||||||
|
|
||||||
search_cluase
|
search_cluase
|
||||||
: KW_SEARCH (KW_BREADTH | KW_DEPTH) KW_FIRST KW_BY columnlist KW_SET column_name
|
: KW_SEARCH (KW_BREADTH | KW_DEPTH) KW_FIRST KW_BY column_list KW_SET column_name
|
||||||
;
|
;
|
||||||
|
|
||||||
cycle_cluase
|
cycle_cluase
|
||||||
: KW_CYCLE columnlist KW_SET column_name (KW_TO name KW_DEFAULT name)? KW_USING column_name
|
: KW_CYCLE column_list KW_SET column_name (KW_TO name KW_DEFAULT name)? KW_USING column_name
|
||||||
;
|
;
|
||||||
|
|
||||||
opt_materialized
|
opt_materialized
|
||||||
@ -3265,7 +3272,7 @@ join_type
|
|||||||
;
|
;
|
||||||
|
|
||||||
join_qual
|
join_qual
|
||||||
: KW_USING OPEN_PAREN columnlist CLOSE_PAREN
|
: KW_USING OPEN_PAREN column_list CLOSE_PAREN
|
||||||
| KW_ON a_expr
|
| KW_ON a_expr
|
||||||
;
|
;
|
||||||
|
|
||||||
@ -3276,11 +3283,11 @@ relation_expr
|
|||||||
;
|
;
|
||||||
|
|
||||||
view_relation_expr
|
view_relation_expr
|
||||||
: KW_ONLY? view_name STAR?
|
: KW_ONLY? view_name STAR? column_list? where_clause?
|
||||||
;
|
;
|
||||||
|
|
||||||
publication_relation_expr
|
publication_relation_expr
|
||||||
: KW_TABLE KW_ONLY? table_name STAR? (OPEN_PAREN columnlist CLOSE_PAREN)? where_clause?
|
: KW_TABLE KW_ONLY? table_name STAR? (OPEN_PAREN column_list CLOSE_PAREN)? where_clause?
|
||||||
| KW_TABLE KW_ONLY ( table_name | OPEN_PAREN table_name CLOSE_PAREN)
|
| KW_TABLE KW_ONLY ( table_name | OPEN_PAREN table_name CLOSE_PAREN)
|
||||||
| KW_TABLES KW_IN KW_SCHEMA (schema_name | KW_CURRENT_SCHEMA)
|
| KW_TABLES KW_IN KW_SCHEMA (schema_name | KW_CURRENT_SCHEMA)
|
||||||
;
|
;
|
||||||
@ -3949,13 +3956,13 @@ column_expr_list
|
|||||||
;
|
;
|
||||||
|
|
||||||
column_expr
|
column_expr
|
||||||
: column_name
|
: (OPEN_PAREN a_expr CLOSE_PAREN)
|
||||||
| (OPEN_PAREN a_expr CLOSE_PAREN)
|
| column_name
|
||||||
;
|
;
|
||||||
|
|
||||||
column_expr_noparen
|
column_expr_noparen
|
||||||
: column_name
|
: a_expr
|
||||||
| a_expr
|
| column_name
|
||||||
;
|
;
|
||||||
|
|
||||||
func_arg_list
|
func_arg_list
|
||||||
@ -4104,27 +4111,27 @@ procedure_name_list
|
|||||||
;
|
;
|
||||||
|
|
||||||
tablespace_name_create
|
tablespace_name_create
|
||||||
: colid indirection?
|
: colid indirection? # tablespaceNameCreate
|
||||||
;
|
;
|
||||||
|
|
||||||
tablespace_name
|
tablespace_name
|
||||||
: colid indirection?
|
: colid indirection? # tablespaceName
|
||||||
;
|
;
|
||||||
|
|
||||||
table_name_create
|
table_name_create
|
||||||
: colid indirection?
|
: colid indirection? # tableNameCreate
|
||||||
;
|
;
|
||||||
|
|
||||||
table_name
|
table_name
|
||||||
: colid indirection?
|
: colid indirection? # tableName
|
||||||
;
|
;
|
||||||
|
|
||||||
view_name_create
|
view_name_create
|
||||||
: colid indirection?
|
: colid indirection? # viewNameCreate
|
||||||
;
|
;
|
||||||
|
|
||||||
view_name
|
view_name
|
||||||
: colid attrs?
|
: colid attrs? # viewName
|
||||||
;
|
;
|
||||||
|
|
||||||
qualified_name
|
qualified_name
|
||||||
@ -4140,41 +4147,42 @@ name_list
|
|||||||
;
|
;
|
||||||
|
|
||||||
database_name_create
|
database_name_create
|
||||||
: colid attrs?
|
: colid attrs? # databaseNameCreate
|
||||||
;
|
;
|
||||||
|
|
||||||
database_name
|
database_name
|
||||||
: colid attrs?
|
: colid attrs? # databaseName
|
||||||
;
|
;
|
||||||
|
|
||||||
schema_name
|
schema_name
|
||||||
: colid attrs?
|
: colid attrs? # schemaName
|
||||||
;
|
;
|
||||||
|
|
||||||
routine_name_create
|
routine_name_create
|
||||||
: colid
|
: colid # routineNameCreate
|
||||||
;
|
;
|
||||||
|
|
||||||
routine_name
|
routine_name
|
||||||
: colid
|
: colid # routineName
|
||||||
;
|
;
|
||||||
|
|
||||||
procedure_name
|
procedure_name
|
||||||
: type_function_name
|
: type_function_name # procedureName
|
||||||
| colid indirection
|
| colid indirection # procedureName
|
||||||
;
|
;
|
||||||
|
|
||||||
procedure_name_create
|
procedure_name_create
|
||||||
: type_function_name
|
: type_function_name # procedureNameCreate
|
||||||
| colid indirection
|
| colid indirection # procedureNameCreate
|
||||||
;
|
;
|
||||||
|
|
||||||
column_name
|
column_name
|
||||||
: colid indirection_el*
|
: colid indirection_el* # columnName
|
||||||
|
| {this.shouldMatchEmpty()}? # columnNameMatch
|
||||||
;
|
;
|
||||||
|
|
||||||
column_name_create
|
column_name_create
|
||||||
: colid
|
: colid # columnNameCreate
|
||||||
;
|
;
|
||||||
|
|
||||||
name
|
name
|
||||||
@ -4190,13 +4198,13 @@ file_name
|
|||||||
;
|
;
|
||||||
|
|
||||||
function_name_create
|
function_name_create
|
||||||
: type_function_name
|
: type_function_name # functionNameCreate
|
||||||
| colid indirection
|
| colid indirection # functionNameCreate
|
||||||
;
|
;
|
||||||
|
|
||||||
function_name
|
function_name
|
||||||
: type_function_name
|
: type_function_name # functionName
|
||||||
| colid indirection
|
| colid indirection # functionName
|
||||||
;
|
;
|
||||||
|
|
||||||
usual_name
|
usual_name
|
||||||
@ -5353,13 +5361,15 @@ merge_when_clause
|
|||||||
;
|
;
|
||||||
|
|
||||||
merge_insert
|
merge_insert
|
||||||
: KW_INSERT (OPEN_PAREN columnlist CLOSE_PAREN)? (KW_OVERRIDING (KW_SYSTEM | KW_USER) KW_VALUE)? default_values_or_values
|
: KW_INSERT (OPEN_PAREN column_list CLOSE_PAREN)? (
|
||||||
|
KW_OVERRIDING (KW_SYSTEM | KW_USER) KW_VALUE
|
||||||
|
)? default_values_or_values
|
||||||
;
|
;
|
||||||
|
|
||||||
merge_update
|
merge_update
|
||||||
: KW_UPDATE KW_SET (
|
: KW_UPDATE KW_SET (
|
||||||
column_name EQUAL exprofdefault
|
column_name EQUAL exprofdefault
|
||||||
| OPEN_PAREN columnlist CLOSE_PAREN EQUAL OPEN_PAREN exprofdefaultlist CLOSE_PAREN
|
| OPEN_PAREN column_list CLOSE_PAREN EQUAL OPEN_PAREN exprofdefaultlist CLOSE_PAREN
|
||||||
)+
|
)+
|
||||||
;
|
;
|
||||||
|
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* This file is an adaptation of spark's spark/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4 grammar.
|
* This file is an adaptation of spark's spark/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4 grammar.
|
||||||
* Reference: https://github.com/apache/spark/blob/master/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4
|
* Reference: https://github.com/apache/spark/blob/v3.5.0/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseLexer.g4
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// $antlr-format alignTrailingComments true, columnLimit 150, maxEmptyLinesToKeep 1, reflowComments false, useTab false
|
// $antlr-format alignTrailingComments true, columnLimit 150, maxEmptyLinesToKeep 1, reflowComments false, useTab false
|
||||||
|
@ -15,7 +15,7 @@
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* This file is an adaptation of spark's spark/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 grammar.
|
* This file is an adaptation of spark's spark/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4 grammar.
|
||||||
* Reference: https://github.com/apache/spark/blob/master/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
|
* Reference: https://github.com/apache/spark/blob/v3.5.0/sql/api/src/main/antlr4/org/apache/spark/sql/catalyst/parser/SqlBaseParser.g4
|
||||||
*/
|
*/
|
||||||
|
|
||||||
// $antlr-format alignTrailingComments true, columnLimit 150, minEmptyLines 1, maxEmptyLinesToKeep 1, reflowComments false, useTab false
|
// $antlr-format alignTrailingComments true, columnLimit 150, minEmptyLines 1, maxEmptyLinesToKeep 1, reflowComments false, useTab false
|
||||||
@ -27,6 +27,11 @@ parser grammar SparkSqlParser;
|
|||||||
options {
|
options {
|
||||||
tokenVocab=SparkSqlLexer;
|
tokenVocab=SparkSqlLexer;
|
||||||
caseInsensitive= true;
|
caseInsensitive= true;
|
||||||
|
superClass=SQLParserBase;
|
||||||
|
}
|
||||||
|
|
||||||
|
@header {
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
}
|
}
|
||||||
|
|
||||||
program
|
program
|
||||||
@ -38,78 +43,78 @@ singleStatement
|
|||||||
;
|
;
|
||||||
|
|
||||||
statement
|
statement
|
||||||
: query
|
: query # statementDefault
|
||||||
| ctes? dmlStatementNoWith
|
| ctes? dmlStatementNoWith # dmlStatement
|
||||||
| KW_USE dbSchemaName
|
| KW_USE namespace? namespaceName # useNamespace
|
||||||
| KW_USE dbSchema dbSchemaName
|
| KW_SET KW_CATALOG (identifier | stringLit) # setCatalog
|
||||||
| KW_SET KW_CATALOG (identifier | stringLit)
|
| KW_CREATE namespace (ifNotExists)? namespaceNameCreate (
|
||||||
| KW_CREATE dbSchema (ifNotExists)? dbSchemaNameCreate (
|
|
||||||
commentSpec
|
commentSpec
|
||||||
| locationSpec
|
| locationSpec
|
||||||
| (KW_WITH (KW_DBPROPERTIES | KW_PROPERTIES) propertyList)
|
| (KW_WITH (KW_DBPROPERTIES | KW_PROPERTIES) propertyList)
|
||||||
)*
|
)* # createNamespace
|
||||||
| KW_ALTER dbSchema dbSchemaName KW_SET (KW_DBPROPERTIES | KW_PROPERTIES) propertyList
|
| KW_ALTER namespace namespaceName KW_SET (KW_DBPROPERTIES | KW_PROPERTIES) propertyList # setNamespaceProperties
|
||||||
| KW_ALTER dbSchema dbSchemaName KW_SET locationSpec
|
| KW_ALTER namespace namespaceName KW_SET locationSpec # setNamespaceLocation
|
||||||
| KW_DROP dbSchema (ifExists)? dbSchemaName (KW_RESTRICT | KW_CASCADE)?
|
| KW_DROP namespace (ifExists)? namespaceName (KW_RESTRICT | KW_CASCADE)? # dropNamespace
|
||||||
| KW_SHOW dbSchemas ((KW_FROM | KW_IN) multipartIdentifier)? (KW_LIKE? pattern=stringLit)?
|
| KW_SHOW namespaces ((KW_FROM | KW_IN) multipartIdentifier)? (KW_LIKE? pattern=stringLit)? # showNamespaces
|
||||||
| createTableHeader (LEFT_PAREN createOrReplaceTableColTypeList RIGHT_PAREN)? tableProvider? createTableClauses (
|
| createTableHeader (LEFT_PAREN createOrReplaceTableColTypeList RIGHT_PAREN)? tableProvider? createTableClauses (
|
||||||
KW_AS? query
|
KW_AS? query
|
||||||
)?
|
)? # createTable
|
||||||
| KW_CREATE KW_TABLE (ifNotExists)? target=tableNameCreate KW_LIKE source=tableName (
|
| KW_CREATE KW_TABLE (ifNotExists)? target=tableNameCreate KW_LIKE source=tableName (
|
||||||
tableProvider
|
tableProvider
|
||||||
| rowFormat
|
| rowFormat
|
||||||
| createFileFormat
|
| createFileFormat
|
||||||
| locationSpec
|
| locationSpec
|
||||||
| (KW_TBLPROPERTIES tableProps=propertyList)
|
| (KW_TBLPROPERTIES tableProps=propertyList)
|
||||||
)*
|
)* # createTableLike
|
||||||
| replaceTableHeader (LEFT_PAREN createOrReplaceTableColTypeList RIGHT_PAREN)? tableProvider? createTableClauses (
|
| replaceTableHeader (LEFT_PAREN createOrReplaceTableColTypeList RIGHT_PAREN)? tableProvider? createTableClauses (
|
||||||
KW_AS? query
|
KW_AS? query
|
||||||
)?
|
)? # replaceTable
|
||||||
| KW_ANALYZE KW_TABLE tableName partitionSpec? KW_COMPUTE KW_STATISTICS (
|
| KW_ANALYZE KW_TABLE tableName partitionSpec? KW_COMPUTE KW_STATISTICS (
|
||||||
KW_NOSCAN
|
KW_NOSCAN
|
||||||
| KW_FOR KW_COLUMNS columnNameSeq
|
| KW_FOR KW_COLUMNS columnNameSeq
|
||||||
| KW_FOR KW_ALL KW_COLUMNS
|
| KW_FOR KW_ALL KW_COLUMNS
|
||||||
)?
|
)? # analyze
|
||||||
| KW_ANALYZE KW_TABLES ((KW_FROM | KW_IN) dbSchemaName)? KW_COMPUTE KW_STATISTICS (KW_NOSCAN)?
|
| KW_ANALYZE KW_TABLES ((KW_FROM | KW_IN) namespaceName)? KW_COMPUTE KW_STATISTICS (KW_NOSCAN)? # analyzeTables
|
||||||
| KW_ALTER KW_TABLE tableName KW_ADD KW_COLUMN qualifiedColTypeWithPositionForAdd
|
| KW_ALTER KW_TABLE tableName KW_ADD KW_COLUMN qualifiedColTypeWithPositionForAdd # alterTableAddColumn
|
||||||
| KW_ALTER KW_TABLE tableName KW_ADD KW_COLUMNS LEFT_PAREN qualifiedColTypeWithPositionSeqForAdd RIGHT_PAREN
|
| KW_ALTER KW_TABLE tableName KW_ADD KW_COLUMNS LEFT_PAREN qualifiedColTypeWithPositionSeqForAdd RIGHT_PAREN # alterTableAddColumns
|
||||||
| KW_ALTER KW_TABLE table=tableName KW_RENAME KW_COLUMN columnName KW_TO columnNameCreate
|
| KW_ALTER KW_TABLE table=tableName KW_RENAME KW_COLUMN columnName KW_TO columnNameCreate # renameTableColumn
|
||||||
| KW_ALTER KW_TABLE tableName KW_DROP KW_COLUMN (ifExists)? columnName
|
| KW_ALTER KW_TABLE tableName KW_DROP KW_COLUMN (ifExists)? columnName # alterTableDropColumn
|
||||||
| KW_ALTER KW_TABLE tableName KW_DROP KW_COLUMNS (ifExists)? LEFT_PAREN columnNameSeq RIGHT_PAREN
|
| KW_ALTER KW_TABLE tableName KW_DROP KW_COLUMNS (ifExists)? LEFT_PAREN columnNameSeq RIGHT_PAREN # dropTableColumns
|
||||||
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName) KW_RENAME KW_TO multipartIdentifier
|
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName) KW_RENAME KW_TO multipartIdentifier # renameTable
|
||||||
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName) KW_SET KW_TBLPROPERTIES propertyList
|
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName) KW_SET KW_TBLPROPERTIES propertyList # setTableProperties
|
||||||
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName) KW_UNSET KW_TBLPROPERTIES (ifExists)? propertyList
|
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName) KW_UNSET KW_TBLPROPERTIES (ifExists)? propertyList # unsetTableProperties
|
||||||
| KW_ALTER KW_TABLE table=tableName (KW_ALTER | KW_CHANGE) KW_COLUMN? column=columnName alterColumnAction?
|
| KW_ALTER KW_TABLE table=tableName (KW_ALTER | KW_CHANGE) KW_COLUMN? column=columnName alterColumnAction? # alterTableAlterColumn
|
||||||
| KW_ALTER KW_TABLE table=tableName partitionSpec? KW_CHANGE KW_COLUMN? colName=columnName colType colPosition?
|
| KW_ALTER KW_TABLE table=tableName partitionSpec? KW_CHANGE KW_COLUMN? colName=columnName colType colPosition? # hiveChangeColumn
|
||||||
| KW_ALTER KW_TABLE table=tableName partitionSpec? KW_REPLACE KW_COLUMNS LEFT_PAREN qualifiedColTypeWithPositionSeqForReplace RIGHT_PAREN
|
| KW_ALTER KW_TABLE table=tableName partitionSpec? KW_REPLACE KW_COLUMNS LEFT_PAREN qualifiedColTypeWithPositionSeqForReplace RIGHT_PAREN #
|
||||||
|
hiveReplaceColumns
|
||||||
| KW_ALTER KW_TABLE tableName (partitionSpec)? KW_SET KW_SERDE stringLit (
|
| KW_ALTER KW_TABLE tableName (partitionSpec)? KW_SET KW_SERDE stringLit (
|
||||||
KW_WITH KW_SERDEPROPERTIES propertyList
|
KW_WITH KW_SERDEPROPERTIES propertyList
|
||||||
)?
|
)? # setTableSerDe
|
||||||
| KW_ALTER KW_TABLE tableName (partitionSpec)? KW_SET KW_SERDEPROPERTIES propertyList
|
| KW_ALTER KW_TABLE tableName (partitionSpec)? KW_SET KW_SERDEPROPERTIES propertyList # setTableSerDeProperties
|
||||||
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName) KW_ADD (ifNotExists)? partitionSpecLocation+
|
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName) KW_ADD (ifNotExists)? partitionSpecLocation+ # addTablePartition
|
||||||
| KW_ALTER KW_TABLE tableName partitionSpec KW_RENAME KW_TO partitionSpec
|
| KW_ALTER KW_TABLE tableName partitionSpec KW_RENAME KW_TO partitionSpec # renameTablePartition
|
||||||
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName) KW_DROP (ifExists)? partitionSpec (
|
| KW_ALTER (KW_TABLE tableName | KW_VIEW viewName) KW_DROP (ifExists)? partitionSpec (
|
||||||
COMMA partitionSpec
|
COMMA partitionSpec
|
||||||
)* KW_PURGE?
|
)* KW_PURGE? # dropTablePartitions
|
||||||
| KW_ALTER KW_TABLE tableName (partitionSpec)? KW_SET locationSpec
|
| KW_ALTER KW_TABLE tableName (partitionSpec)? KW_SET locationSpec # setTableLocation
|
||||||
| KW_ALTER KW_TABLE tableName KW_RECOVER KW_PARTITIONS
|
| KW_ALTER KW_TABLE tableName KW_RECOVER KW_PARTITIONS # recoverPartitions
|
||||||
| KW_ALTER KW_MATERIALIZED KW_VIEW viewName (KW_ENABLE | KW_DISABLE) KW_REWRITE
|
| KW_ALTER KW_MATERIALIZED KW_VIEW viewName (KW_ENABLE | KW_DISABLE) KW_REWRITE # alterMaterializedViewRewrite
|
||||||
| KW_ALTER KW_MATERIALIZED KW_VIEW viewName KW_SET KW_TBLPROPERTIES propertyList
|
| KW_ALTER KW_MATERIALIZED KW_VIEW viewName KW_SET KW_TBLPROPERTIES propertyList # alterMaterializedViewProperties
|
||||||
| KW_DROP KW_TABLE (ifExists)? tableName KW_PURGE?
|
| KW_DROP KW_TABLE (ifExists)? tableName KW_PURGE? # dropTable
|
||||||
| KW_DROP KW_VIEW (ifExists)? viewName
|
| KW_DROP KW_VIEW (ifExists)? viewName # dropView
|
||||||
| KW_DROP KW_MATERIALIZED KW_VIEW (ifExists)? viewName
|
| KW_DROP KW_MATERIALIZED KW_VIEW (ifExists)? viewName # dropMaterializedView
|
||||||
| KW_CREATE (KW_OR KW_REPLACE)? (KW_GLOBAL? KW_TEMPORARY)? KW_VIEW (ifNotExists)? viewNameCreate identifierCommentList? (
|
| KW_CREATE (KW_OR KW_REPLACE)? (KW_GLOBAL? KW_TEMPORARY)? KW_VIEW (ifNotExists)? viewNameCreate identifierCommentList? (
|
||||||
commentSpec
|
commentSpec
|
||||||
| (KW_PARTITIONED KW_ON identifierList)
|
| (KW_PARTITIONED KW_ON identifierList)
|
||||||
| (KW_TBLPROPERTIES propertyList)
|
| (KW_TBLPROPERTIES propertyList)
|
||||||
)* KW_AS query
|
)* KW_AS query # createView
|
||||||
| KW_CREATE (KW_OR KW_REPLACE)? KW_GLOBAL? KW_TEMPORARY KW_VIEW viewNameCreate (
|
| KW_CREATE (KW_OR KW_REPLACE)? KW_GLOBAL? KW_TEMPORARY KW_VIEW viewNameCreate (
|
||||||
LEFT_PAREN colTypeList RIGHT_PAREN
|
LEFT_PAREN colTypeList RIGHT_PAREN
|
||||||
)? tableProvider (KW_OPTIONS propertyList)?
|
)? tableProvider (KW_OPTIONS propertyList)? # createTempViewUsing
|
||||||
| KW_ALTER KW_VIEW viewName KW_AS? query
|
| KW_ALTER KW_VIEW viewName KW_AS? query # alterViewQuery
|
||||||
| KW_CREATE (KW_OR KW_REPLACE)? KW_TEMPORARY? KW_FUNCTION (ifNotExists)? functionNameCreate KW_AS className=stringLit (
|
| KW_CREATE (KW_OR KW_REPLACE)? KW_TEMPORARY? KW_FUNCTION (ifNotExists)? functionNameCreate KW_AS className=stringLit (
|
||||||
KW_USING resource (COMMA resource)*
|
KW_USING resource (COMMA resource)*
|
||||||
)?
|
)? # createFunction
|
||||||
|
|
|
|
||||||
// Self developed materialized view syntax by dtstack, spark not support now.
|
// Self developed materialized view syntax by dtstack, spark not support now.
|
||||||
KW_CREATE KW_MATERIALIZED KW_VIEW (ifNotExists)? viewNameCreate tableProvider? (
|
KW_CREATE KW_MATERIALIZED KW_VIEW (ifNotExists)? viewNameCreate tableProvider? (
|
||||||
@ -122,62 +127,62 @@ statement
|
|||||||
| locationSpec
|
| locationSpec
|
||||||
| commentSpec
|
| commentSpec
|
||||||
| (KW_TBLPROPERTIES tableProps=propertyList)
|
| (KW_TBLPROPERTIES tableProps=propertyList)
|
||||||
)* KW_AS query
|
)* KW_AS query # createMaterializedView
|
||||||
| KW_DROP KW_TEMPORARY? KW_FUNCTION (ifExists)? functionName
|
| KW_DROP KW_TEMPORARY? KW_FUNCTION (ifExists)? functionName # dropFunction
|
||||||
| KW_DECLARE (KW_OR KW_REPLACE)? KW_VARIABLE? functionName dataType? variableDefaultExpression?
|
| KW_DECLARE (KW_OR KW_REPLACE)? KW_VARIABLE? functionName dataType? variableDefaultExpression? # declareVariable
|
||||||
| KW_DROP KW_TEMPORARY KW_VARIABLE (ifExists)? (tableName | viewName | functionName)
|
| KW_DROP KW_TEMPORARY KW_VARIABLE (ifExists)? (tableName | viewName | functionName) # dropVariable
|
||||||
| KW_EXPLAIN (KW_LOGICAL | KW_FORMATTED | KW_EXTENDED | KW_CODEGEN | KW_COST)? statement
|
| KW_EXPLAIN (KW_LOGICAL | KW_FORMATTED | KW_EXTENDED | KW_CODEGEN | KW_COST)? statement # explainStatement
|
||||||
| KW_SHOW KW_TABLES ((KW_FROM | KW_IN) dbSchemaName)? (KW_LIKE? pattern=stringLit)?
|
| KW_SHOW KW_TABLES ((KW_FROM | KW_IN) namespaceName)? (KW_LIKE? pattern=stringLit)? # showTables
|
||||||
| KW_SHOW KW_TABLE KW_EXTENDED ((KW_FROM | KW_IN) ns=dbSchemaName)? KW_LIKE pattern=stringLit partitionSpec?
|
| KW_SHOW KW_TABLE KW_EXTENDED ((KW_FROM | KW_IN) ns=namespaceName)? KW_LIKE pattern=stringLit partitionSpec? # showTableExtended
|
||||||
| KW_SHOW KW_TBLPROPERTIES table=tableName (LEFT_PAREN key=propertyKey RIGHT_PAREN)?
|
| KW_SHOW KW_TBLPROPERTIES table=tableName (LEFT_PAREN key=propertyKey RIGHT_PAREN)? # showTblProperties
|
||||||
| KW_SHOW KW_COLUMNS (KW_FROM | KW_IN) table=tableName ((KW_FROM | KW_IN) dbSchemaName)?
|
| KW_SHOW KW_COLUMNS (KW_FROM | KW_IN) table=tableName ((KW_FROM | KW_IN) namespaceName)? # showColumns
|
||||||
| KW_SHOW KW_VIEWS ((KW_FROM | KW_IN) dbSchemaName)? (KW_LIKE? pattern=stringLit)?
|
| KW_SHOW KW_VIEWS ((KW_FROM | KW_IN) namespaceName)? (KW_LIKE? pattern=stringLit)? # showViews
|
||||||
| KW_SHOW KW_PARTITIONS tableName partitionSpec?
|
| KW_SHOW KW_PARTITIONS tableName partitionSpec? # showPartitions
|
||||||
| KW_SHOW functionKind? KW_FUNCTIONS ((KW_FROM | KW_IN) ns=dbSchemaName)? (
|
| KW_SHOW functionKind? KW_FUNCTIONS ((KW_FROM | KW_IN) ns=namespaceName)? (
|
||||||
KW_LIKE? (legacy=multipartIdentifier | pattern=stringLit)
|
KW_LIKE? (legacy=multipartIdentifier | pattern=stringLit)
|
||||||
)?
|
)? # showFunctions
|
||||||
| KW_SHOW KW_CREATE KW_TABLE tableName (KW_AS KW_SERDE)?
|
| KW_SHOW KW_CREATE KW_TABLE tableName (KW_AS KW_SERDE)? # showCreateTable
|
||||||
| KW_SHOW KW_CURRENT dbSchema
|
| KW_SHOW KW_CURRENT namespace # showCurrentNamespace
|
||||||
| KW_SHOW KW_CATALOGS (KW_LIKE? pattern=stringLit)?
|
| KW_SHOW KW_CATALOGS (KW_LIKE? pattern=stringLit)? # showCatalogs
|
||||||
| KW_SHOW KW_MATERIALIZED KW_VIEWS ((KW_FROM | KW_IN) db_name=dbSchemaName)? (
|
| KW_SHOW KW_MATERIALIZED KW_VIEWS ((KW_FROM | KW_IN) db_name=namespaceName)? (
|
||||||
KW_LIKE? pattern=stringLit
|
KW_LIKE? pattern=stringLit
|
||||||
)?
|
)? # showMaterializedViews
|
||||||
| KW_SHOW KW_CREATE KW_MATERIALIZED KW_VIEW viewName (KW_AS KW_SERDE)?
|
| KW_SHOW KW_CREATE KW_MATERIALIZED KW_VIEW viewName (KW_AS KW_SERDE)? # showCreateMaterializedView
|
||||||
| (KW_DESC | KW_DESCRIBE) KW_FUNCTION KW_EXTENDED? describeFuncName
|
| (KW_DESC | KW_DESCRIBE) KW_FUNCTION KW_EXTENDED? describeFuncName # describeFunction
|
||||||
| (KW_DESC | KW_DESCRIBE) KW_DATABASE KW_EXTENDED? dbSchemaName
|
| (KW_DESC | KW_DESCRIBE) KW_DATABASE KW_EXTENDED? namespaceName # describeNamespace
|
||||||
| (KW_DESC | KW_DESCRIBE) KW_TABLE? option=(KW_EXTENDED | KW_FORMATTED)? tableName partitionSpec? describeColName?
|
| (KW_DESC | KW_DESCRIBE) KW_TABLE? option=(KW_EXTENDED | KW_FORMATTED)? tableName partitionSpec? describeColName? # describeRelation
|
||||||
| (KW_DESC | KW_DESCRIBE) KW_QUERY? query
|
| (KW_DESC | KW_DESCRIBE) KW_QUERY? query # describeQuery
|
||||||
| KW_COMMENT KW_ON dbSchema dbSchemaName KW_IS comment
|
| KW_COMMENT KW_ON namespace namespaceName KW_IS comment # commentNamespace
|
||||||
| KW_COMMENT KW_ON KW_TABLE tableName KW_IS comment
|
| KW_COMMENT KW_ON KW_TABLE tableName KW_IS comment # commentTable
|
||||||
| KW_REFRESH KW_TABLE tableName
|
| KW_REFRESH KW_TABLE tableName # refreshTable
|
||||||
| KW_REFRESH KW_FUNCTION functionName
|
| KW_REFRESH KW_FUNCTION functionName # refreshFunction
|
||||||
| KW_REFRESH (stringLit | .*?)
|
| KW_REFRESH (stringLit | .*?) # refreshResource
|
||||||
| KW_REFRESH KW_MATERIALIZED KW_VIEW viewName
|
| KW_REFRESH KW_MATERIALIZED KW_VIEW viewName # refreshMaterializedView
|
||||||
| KW_CACHE KW_LAZY? KW_TABLE tableName (KW_OPTIONS options=propertyList)? (KW_AS? query)?
|
| KW_CACHE KW_LAZY? KW_TABLE tableName (KW_OPTIONS options=propertyList)? (KW_AS? query)? # cacheTable
|
||||||
| KW_UNCACHE KW_TABLE (ifExists)? tableName
|
| KW_UNCACHE KW_TABLE (ifExists)? tableName # unCacheTable
|
||||||
| KW_CLEAR KW_CACHE
|
| KW_CLEAR KW_CACHE # clearCache
|
||||||
| KW_LOAD KW_DATA KW_LOCAL? KW_INPATH path=stringLit KW_OVERWRITE? KW_INTO KW_TABLE tableName partitionSpec?
|
| KW_LOAD KW_DATA KW_LOCAL? KW_INPATH path=stringLit KW_OVERWRITE? KW_INTO KW_TABLE tableName partitionSpec? # loadData
|
||||||
| KW_TRUNCATE KW_TABLE tableName partitionSpec?
|
| KW_TRUNCATE KW_TABLE tableName partitionSpec? # truncateTable
|
||||||
| (KW_MSCK)? KW_REPAIR KW_TABLE tableName (option=(KW_ADD | KW_DROP | KW_SYNC) KW_PARTITIONS)?
|
| (KW_MSCK)? KW_REPAIR KW_TABLE tableName (option=(KW_ADD | KW_DROP | KW_SYNC) KW_PARTITIONS)? # repairTable
|
||||||
| op=(KW_ADD | KW_LIST) identifier .*?
|
| op=(KW_ADD | KW_LIST) identifier .*? # manageResource
|
||||||
| KW_SET KW_ROLE .*?
|
| KW_SET KW_ROLE .*? # failNativeCommand
|
||||||
| KW_SET KW_TIME KW_ZONE interval
|
| KW_SET KW_TIME KW_ZONE interval # setTimeZoneInterval
|
||||||
| KW_SET KW_TIME KW_ZONE timezone
|
| KW_SET KW_TIME KW_ZONE timezone # setTimeZone
|
||||||
| KW_SET KW_TIME KW_ZONE .*?
|
| KW_SET KW_TIME KW_ZONE .*? # setTimeZoneAny
|
||||||
| KW_SET (KW_VARIABLE | KW_VAR) assignmentList
|
| KW_SET (KW_VARIABLE | KW_VAR) assignmentList # setVariableAssignment
|
||||||
| KW_SET (KW_VARIABLE | KW_VAR) LEFT_PAREN multipartIdentifierList RIGHT_PAREN EQ LEFT_PAREN query RIGHT_PAREN
|
| KW_SET (KW_VARIABLE | KW_VAR) LEFT_PAREN multipartIdentifierList RIGHT_PAREN EQ LEFT_PAREN query RIGHT_PAREN # setVariableMultiAssignment
|
||||||
| KW_SET configKey EQ configValue
|
| KW_SET configKey EQ configValue # setConfig
|
||||||
| KW_SET configKey (EQ .*?)?
|
| KW_SET configKey (EQ .*?)? # setConfigAndValue
|
||||||
| KW_SET .*? EQ configValue
|
| KW_SET .*? EQ configValue # setConfigAnyKey
|
||||||
| KW_SET .*?
|
| KW_SET .*? # setAny
|
||||||
| KW_RESET configKey
|
| KW_RESET configKey # resetConfig
|
||||||
| KW_RESET .*?
|
| KW_RESET .*? # resetAny
|
||||||
| KW_CREATE KW_INDEX (ifNotExists)? identifier KW_ON KW_TABLE? tableName (
|
| KW_CREATE KW_INDEX (ifNotExists)? identifier KW_ON KW_TABLE? tableName (
|
||||||
KW_USING indexType=identifier
|
KW_USING indexType=identifier
|
||||||
)? LEFT_PAREN multipartIdentifierPropertyList RIGHT_PAREN (KW_OPTIONS options=propertyList)?
|
)? LEFT_PAREN multipartIdentifierPropertyList RIGHT_PAREN (KW_OPTIONS options=propertyList)? # createIndex
|
||||||
| KW_DROP KW_INDEX (ifExists)? identifier KW_ON KW_TABLE? tableName
|
| KW_DROP KW_INDEX (ifExists)? identifier KW_ON KW_TABLE? tableName # dropIndex
|
||||||
| KW_OPTIMIZE tableName whereClause? zorderClause
|
| KW_OPTIMIZE tableName whereClause? zorderClause # optimizeTable
|
||||||
| unsupportedHiveNativeCommands .*?
|
| unsupportedHiveNativeCommands .*? # unsupportHiveCommands
|
||||||
;
|
;
|
||||||
|
|
||||||
timezone
|
timezone
|
||||||
@ -267,7 +272,7 @@ commentSpec
|
|||||||
;
|
;
|
||||||
|
|
||||||
query
|
query
|
||||||
: ctes? queryTerm queryOrganization
|
: ctes? queryTerm queryOrganization # queryStatement
|
||||||
;
|
;
|
||||||
|
|
||||||
insertInto
|
insertInto
|
||||||
@ -299,13 +304,13 @@ partitionVal
|
|||||||
| identifier EQ KW_DEFAULT
|
| identifier EQ KW_DEFAULT
|
||||||
;
|
;
|
||||||
|
|
||||||
dbSchema
|
namespace
|
||||||
: KW_NAMESPACE
|
: KW_NAMESPACE
|
||||||
| KW_DATABASE
|
| KW_DATABASE
|
||||||
| KW_SCHEMA
|
| KW_SCHEMA
|
||||||
;
|
;
|
||||||
|
|
||||||
dbSchemas
|
namespaces
|
||||||
: KW_NAMESPACES
|
: KW_NAMESPACES
|
||||||
| KW_DATABASES
|
| KW_DATABASES
|
||||||
| KW_SCHEMAS
|
| KW_SCHEMAS
|
||||||
@ -404,21 +409,21 @@ resource
|
|||||||
;
|
;
|
||||||
|
|
||||||
dmlStatementNoWith
|
dmlStatementNoWith
|
||||||
: insertInto query
|
: insertInto query # insertFromQuery
|
||||||
| fromClause multiInsertQueryBody+
|
| fromClause multiInsertQueryBody+ # multipleInsert
|
||||||
| KW_DELETE KW_FROM tableName tableAlias whereClause?
|
| KW_DELETE KW_FROM tableName tableAlias whereClause? # deleteFromTable
|
||||||
| KW_UPDATE tableName tableAlias setClause whereClause?
|
| KW_UPDATE tableName tableAlias setClause whereClause? # updateTable
|
||||||
| KW_MERGE KW_INTO target=tableName targetAlias=tableAlias KW_USING (
|
| KW_MERGE KW_INTO target=tableName targetAlias=tableAlias KW_USING (
|
||||||
source=identifierReference
|
source=identifierReference
|
||||||
| LEFT_PAREN sourceQuery=query RIGHT_PAREN
|
| LEFT_PAREN sourceQuery=query RIGHT_PAREN
|
||||||
) sourceAlias=tableAlias KW_ON mergeCondition=booleanExpression matchedClause* notMatchedClause* notMatchedBySourceClause*
|
) sourceAlias=tableAlias KW_ON mergeCondition=booleanExpression matchedClause* notMatchedClause* notMatchedBySourceClause* # mergeIntoTable
|
||||||
;
|
;
|
||||||
|
|
||||||
dbSchemaName
|
namespaceName
|
||||||
: identifierReference
|
: identifierReference
|
||||||
;
|
;
|
||||||
|
|
||||||
dbSchemaNameCreate
|
namespaceNameCreate
|
||||||
: identifierReference
|
: identifierReference
|
||||||
;
|
;
|
||||||
|
|
||||||
@ -440,6 +445,7 @@ viewName
|
|||||||
|
|
||||||
columnName
|
columnName
|
||||||
: multipartIdentifier
|
: multipartIdentifier
|
||||||
|
| {this.shouldMatchEmpty()}?
|
||||||
;
|
;
|
||||||
|
|
||||||
columnNameSeq
|
columnNameSeq
|
||||||
@ -782,11 +788,11 @@ identifierCommentList
|
|||||||
;
|
;
|
||||||
|
|
||||||
identifierComment
|
identifierComment
|
||||||
: identifier commentSpec?
|
: columnNameCreate commentSpec?
|
||||||
;
|
;
|
||||||
|
|
||||||
relationPrimary
|
relationPrimary
|
||||||
: identifierReference temporalClause? sample? tableAlias
|
: (tableName | viewName | identifierReference) temporalClause? sample? tableAlias
|
||||||
| LEFT_PAREN query RIGHT_PAREN sample? tableAlias
|
| LEFT_PAREN query RIGHT_PAREN sample? tableAlias
|
||||||
| LEFT_PAREN relation RIGHT_PAREN sample? tableAlias
|
| LEFT_PAREN relation RIGHT_PAREN sample? tableAlias
|
||||||
| inlineTable
|
| inlineTable
|
||||||
@ -1811,7 +1817,7 @@ nonReserved
|
|||||||
| KW_FOREIGN
|
| KW_FOREIGN
|
||||||
| KW_FORMAT
|
| KW_FORMAT
|
||||||
| KW_FORMATTED
|
| KW_FORMATTED
|
||||||
| KW_FROM
|
// | KW_FROM
|
||||||
| KW_FUNCTION
|
| KW_FUNCTION
|
||||||
| KW_FUNCTIONS
|
| KW_FUNCTIONS
|
||||||
| KW_GENERATED
|
| KW_GENERATED
|
||||||
@ -1967,7 +1973,7 @@ nonReserved
|
|||||||
| KW_SYSTEM
|
| KW_SYSTEM
|
||||||
| KW_SYSTEM_TIME
|
| KW_SYSTEM_TIME
|
||||||
| KW_SYSTEM_VERSION
|
| KW_SYSTEM_VERSION
|
||||||
| KW_TABLE
|
// | KW_TABLE
|
||||||
| KW_TABLES
|
| KW_TABLES
|
||||||
| KW_TABLESAMPLE
|
| KW_TABLESAMPLE
|
||||||
| KW_TARGET
|
| KW_TARGET
|
||||||
|
@ -25,6 +25,11 @@ grammar TrinoSql;
|
|||||||
|
|
||||||
options {
|
options {
|
||||||
caseInsensitive= true;
|
caseInsensitive= true;
|
||||||
|
superClass=SQLParserBase;
|
||||||
|
}
|
||||||
|
|
||||||
|
@header {
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
}
|
}
|
||||||
|
|
||||||
tokens {
|
tokens {
|
||||||
@ -173,12 +178,12 @@ statement
|
|||||||
KW_WHERE where= booleanExpression
|
KW_WHERE where= booleanExpression
|
||||||
)? # update
|
)? # update
|
||||||
| KW_MERGE KW_INTO tableName (KW_AS? identifier)? KW_USING relation KW_ON expression mergeCase+ # merge
|
| KW_MERGE KW_INTO tableName (KW_AS? identifier)? KW_USING relation KW_ON expression mergeCase+ # merge
|
||||||
| KW_SHOW KW_COMMENT KW_ON KW_TABLE tableName # showTableComment
|
| KW_SHOW KW_COMMENT KW_ON KW_TABLE tableName # showTableComment // dtstack
|
||||||
| KW_SHOW KW_COMMENT KW_ON KW_COLUMN columnName # showColumnComment
|
| KW_SHOW KW_COMMENT KW_ON KW_COLUMN columnName # showColumnComment // dtstack
|
||||||
;
|
;
|
||||||
|
|
||||||
query
|
query
|
||||||
: with? queryNoWith
|
: with? queryNoWith # queryStatement
|
||||||
;
|
;
|
||||||
|
|
||||||
with
|
with
|
||||||
@ -746,6 +751,7 @@ functionName
|
|||||||
|
|
||||||
columnName
|
columnName
|
||||||
: qualifiedName
|
: qualifiedName
|
||||||
|
| {this.shouldMatchEmpty()}?
|
||||||
;
|
;
|
||||||
|
|
||||||
columnNameCreate
|
columnNameCreate
|
||||||
|
18
src/index.ts
18
src/index.ts
@ -30,19 +30,29 @@ export type {
|
|||||||
ImpalaSqlParserVisitor,
|
ImpalaSqlParserVisitor,
|
||||||
} from './lib';
|
} from './lib';
|
||||||
|
|
||||||
export { SyntaxContextType } from './parser/common/basic-parser-types';
|
export { EntityContextType } from './parser/common/basic-parser-types';
|
||||||
|
|
||||||
|
export {
|
||||||
|
/**
|
||||||
|
* @deprecated SyntaxContextType has been renamed to {@link EntityContextType},
|
||||||
|
* It will be removed when the stable version is released.
|
||||||
|
*/
|
||||||
|
EntityContextType as SyntaxContextType,
|
||||||
|
} from './parser/common/basic-parser-types';
|
||||||
|
|
||||||
export type {
|
export type {
|
||||||
CaretPosition,
|
CaretPosition,
|
||||||
WordRange,
|
|
||||||
Suggestions,
|
Suggestions,
|
||||||
SyntaxSuggestion,
|
SyntaxSuggestion,
|
||||||
TextSlice,
|
|
||||||
} from './parser/common/basic-parser-types';
|
} from './parser/common/basic-parser-types';
|
||||||
|
|
||||||
|
export type { WordRange, TextSlice } from './parser/common/textAndWord';
|
||||||
|
|
||||||
export type { SyntaxError, ParseError, ErrorListener } from './parser/common/parseErrorListener';
|
export type { SyntaxError, ParseError, ErrorListener } from './parser/common/parseErrorListener';
|
||||||
|
|
||||||
|
export type { StmtContextType, StmtContext, EntityContext } from './parser/common/entityCollector';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @deprecated legacy, will be removed.
|
* @deprecated Legacy utils will be removed when the stable version is released.
|
||||||
*/
|
*/
|
||||||
export * from './utils';
|
export * from './utils';
|
||||||
|
18
src/lib/SQLParserBase.ts
Normal file
18
src/lib/SQLParserBase.ts
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
import * as antlr from "antlr4ng";
|
||||||
|
export default abstract class SQLParserBase<T = antlr.ParserRuleContext> extends antlr.Parser{
|
||||||
|
public constructor(input: antlr.TokenStream) {
|
||||||
|
super(input);
|
||||||
|
}
|
||||||
|
|
||||||
|
public abstract program(): T;
|
||||||
|
|
||||||
|
public caretTokenIndex = -1;
|
||||||
|
|
||||||
|
public entityCollecting = false;
|
||||||
|
|
||||||
|
public shouldMatchEmpty () {
|
||||||
|
return this.entityCollecting
|
||||||
|
&& this.tokenStream.LT(-1).tokenIndex <= this.caretTokenIndex
|
||||||
|
&& this.tokenStream.LT(1).tokenIndex >= this.caretTokenIndex
|
||||||
|
}
|
||||||
|
}
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@ -3,6 +3,9 @@
|
|||||||
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
||||||
|
|
||||||
|
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
|
|
||||||
|
|
||||||
import { ProgramContext } from "./FlinkSqlParser.js";
|
import { ProgramContext } from "./FlinkSqlParser.js";
|
||||||
import { SingleStatementContext } from "./FlinkSqlParser.js";
|
import { SingleStatementContext } from "./FlinkSqlParser.js";
|
||||||
import { SqlStatementContext } from "./FlinkSqlParser.js";
|
import { SqlStatementContext } from "./FlinkSqlParser.js";
|
||||||
@ -51,7 +54,6 @@ import { SelfDefinitionClauseContext } from "./FlinkSqlParser.js";
|
|||||||
import { PartitionDefinitionContext } from "./FlinkSqlParser.js";
|
import { PartitionDefinitionContext } from "./FlinkSqlParser.js";
|
||||||
import { TransformListContext } from "./FlinkSqlParser.js";
|
import { TransformListContext } from "./FlinkSqlParser.js";
|
||||||
import { IdentityTransformContext } from "./FlinkSqlParser.js";
|
import { IdentityTransformContext } from "./FlinkSqlParser.js";
|
||||||
import { ColumnTransformContext } from "./FlinkSqlParser.js";
|
|
||||||
import { ApplyTransformContext } from "./FlinkSqlParser.js";
|
import { ApplyTransformContext } from "./FlinkSqlParser.js";
|
||||||
import { TransformArgumentContext } from "./FlinkSqlParser.js";
|
import { TransformArgumentContext } from "./FlinkSqlParser.js";
|
||||||
import { LikeDefinitionContext } from "./FlinkSqlParser.js";
|
import { LikeDefinitionContext } from "./FlinkSqlParser.js";
|
||||||
@ -706,18 +708,6 @@ export class FlinkSqlParserListener implements ParseTreeListener {
|
|||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitIdentityTransform?: (ctx: IdentityTransformContext) => void;
|
exitIdentityTransform?: (ctx: IdentityTransformContext) => void;
|
||||||
/**
|
|
||||||
* Enter a parse tree produced by the `columnTransform`
|
|
||||||
* labeled alternative in `FlinkSqlParser.transform`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
*/
|
|
||||||
enterColumnTransform?: (ctx: ColumnTransformContext) => void;
|
|
||||||
/**
|
|
||||||
* Exit a parse tree produced by the `columnTransform`
|
|
||||||
* labeled alternative in `FlinkSqlParser.transform`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
*/
|
|
||||||
exitColumnTransform?: (ctx: ColumnTransformContext) => void;
|
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by the `applyTransform`
|
* Enter a parse tree produced by the `applyTransform`
|
||||||
* labeled alternative in `FlinkSqlParser.transform`.
|
* labeled alternative in `FlinkSqlParser.transform`.
|
||||||
|
@ -3,6 +3,9 @@
|
|||||||
import { AbstractParseTreeVisitor } from "antlr4ng";
|
import { AbstractParseTreeVisitor } from "antlr4ng";
|
||||||
|
|
||||||
|
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
|
|
||||||
|
|
||||||
import { ProgramContext } from "./FlinkSqlParser.js";
|
import { ProgramContext } from "./FlinkSqlParser.js";
|
||||||
import { SingleStatementContext } from "./FlinkSqlParser.js";
|
import { SingleStatementContext } from "./FlinkSqlParser.js";
|
||||||
import { SqlStatementContext } from "./FlinkSqlParser.js";
|
import { SqlStatementContext } from "./FlinkSqlParser.js";
|
||||||
@ -51,7 +54,6 @@ import { SelfDefinitionClauseContext } from "./FlinkSqlParser.js";
|
|||||||
import { PartitionDefinitionContext } from "./FlinkSqlParser.js";
|
import { PartitionDefinitionContext } from "./FlinkSqlParser.js";
|
||||||
import { TransformListContext } from "./FlinkSqlParser.js";
|
import { TransformListContext } from "./FlinkSqlParser.js";
|
||||||
import { IdentityTransformContext } from "./FlinkSqlParser.js";
|
import { IdentityTransformContext } from "./FlinkSqlParser.js";
|
||||||
import { ColumnTransformContext } from "./FlinkSqlParser.js";
|
|
||||||
import { ApplyTransformContext } from "./FlinkSqlParser.js";
|
import { ApplyTransformContext } from "./FlinkSqlParser.js";
|
||||||
import { TransformArgumentContext } from "./FlinkSqlParser.js";
|
import { TransformArgumentContext } from "./FlinkSqlParser.js";
|
||||||
import { LikeDefinitionContext } from "./FlinkSqlParser.js";
|
import { LikeDefinitionContext } from "./FlinkSqlParser.js";
|
||||||
@ -516,13 +518,6 @@ export class FlinkSqlParserVisitor<Result> extends AbstractParseTreeVisitor<Resu
|
|||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitIdentityTransform?: (ctx: IdentityTransformContext) => Result;
|
visitIdentityTransform?: (ctx: IdentityTransformContext) => Result;
|
||||||
/**
|
|
||||||
* Visit a parse tree produced by the `columnTransform`
|
|
||||||
* labeled alternative in `FlinkSqlParser.transform`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
* @return the visitor result
|
|
||||||
*/
|
|
||||||
visitColumnTransform?: (ctx: ColumnTransformContext) => Result;
|
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by the `applyTransform`
|
* Visit a parse tree produced by the `applyTransform`
|
||||||
* labeled alternative in `FlinkSqlParser.transform`.
|
* labeled alternative in `FlinkSqlParser.transform`.
|
||||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@ -3,6 +3,9 @@
|
|||||||
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
||||||
|
|
||||||
|
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
|
|
||||||
|
|
||||||
import { ProgramContext } from "./HiveSqlParser.js";
|
import { ProgramContext } from "./HiveSqlParser.js";
|
||||||
import { StatementContext } from "./HiveSqlParser.js";
|
import { StatementContext } from "./HiveSqlParser.js";
|
||||||
import { ExplainStatementContext } from "./HiveSqlParser.js";
|
import { ExplainStatementContext } from "./HiveSqlParser.js";
|
||||||
@ -204,13 +207,14 @@ import { QueryStatementExpressionBodyContext } from "./HiveSqlParser.js";
|
|||||||
import { WithClauseContext } from "./HiveSqlParser.js";
|
import { WithClauseContext } from "./HiveSqlParser.js";
|
||||||
import { CteStatementContext } from "./HiveSqlParser.js";
|
import { CteStatementContext } from "./HiveSqlParser.js";
|
||||||
import { FromStatementContext } from "./HiveSqlParser.js";
|
import { FromStatementContext } from "./HiveSqlParser.js";
|
||||||
import { SingleFromStatementContext } from "./HiveSqlParser.js";
|
import { FromInsertStmtContext } from "./HiveSqlParser.js";
|
||||||
import { RegularBodyContext } from "./HiveSqlParser.js";
|
import { FromSelectStmtContext } from "./HiveSqlParser.js";
|
||||||
|
import { InsertStmtContext } from "./HiveSqlParser.js";
|
||||||
|
import { SelectStmtContext } from "./HiveSqlParser.js";
|
||||||
import { AtomSelectStatementContext } from "./HiveSqlParser.js";
|
import { AtomSelectStatementContext } from "./HiveSqlParser.js";
|
||||||
import { SelectStatementContext } from "./HiveSqlParser.js";
|
import { SelectStatementContext } from "./HiveSqlParser.js";
|
||||||
import { SetOpSelectStatementContext } from "./HiveSqlParser.js";
|
import { SetOpSelectStatementContext } from "./HiveSqlParser.js";
|
||||||
import { SelectStatementWithCTEContext } from "./HiveSqlParser.js";
|
import { SelectStatementWithCTEContext } from "./HiveSqlParser.js";
|
||||||
import { BodyContext } from "./HiveSqlParser.js";
|
|
||||||
import { InsertClauseContext } from "./HiveSqlParser.js";
|
import { InsertClauseContext } from "./HiveSqlParser.js";
|
||||||
import { DestinationContext } from "./HiveSqlParser.js";
|
import { DestinationContext } from "./HiveSqlParser.js";
|
||||||
import { LimitClauseContext } from "./HiveSqlParser.js";
|
import { LimitClauseContext } from "./HiveSqlParser.js";
|
||||||
@ -2542,25 +2546,53 @@ export class HiveSqlParserListener implements ParseTreeListener {
|
|||||||
*/
|
*/
|
||||||
exitFromStatement?: (ctx: FromStatementContext) => void;
|
exitFromStatement?: (ctx: FromStatementContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `HiveSqlParser.singleFromStatement`.
|
* Enter a parse tree produced by the `fromInsertStmt`
|
||||||
|
* labeled alternative in `HiveSqlParser.singleFromStatement`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterSingleFromStatement?: (ctx: SingleFromStatementContext) => void;
|
enterFromInsertStmt?: (ctx: FromInsertStmtContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `HiveSqlParser.singleFromStatement`.
|
* Exit a parse tree produced by the `fromInsertStmt`
|
||||||
|
* labeled alternative in `HiveSqlParser.singleFromStatement`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitSingleFromStatement?: (ctx: SingleFromStatementContext) => void;
|
exitFromInsertStmt?: (ctx: FromInsertStmtContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `HiveSqlParser.regularBody`.
|
* Enter a parse tree produced by the `fromSelectStmt`
|
||||||
|
* labeled alternative in `HiveSqlParser.singleFromStatement`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterRegularBody?: (ctx: RegularBodyContext) => void;
|
enterFromSelectStmt?: (ctx: FromSelectStmtContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `HiveSqlParser.regularBody`.
|
* Exit a parse tree produced by the `fromSelectStmt`
|
||||||
|
* labeled alternative in `HiveSqlParser.singleFromStatement`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitRegularBody?: (ctx: RegularBodyContext) => void;
|
exitFromSelectStmt?: (ctx: FromSelectStmtContext) => void;
|
||||||
|
/**
|
||||||
|
* Enter a parse tree produced by the `insertStmt`
|
||||||
|
* labeled alternative in `HiveSqlParser.regularBody`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
*/
|
||||||
|
enterInsertStmt?: (ctx: InsertStmtContext) => void;
|
||||||
|
/**
|
||||||
|
* Exit a parse tree produced by the `insertStmt`
|
||||||
|
* labeled alternative in `HiveSqlParser.regularBody`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
*/
|
||||||
|
exitInsertStmt?: (ctx: InsertStmtContext) => void;
|
||||||
|
/**
|
||||||
|
* Enter a parse tree produced by the `selectStmt`
|
||||||
|
* labeled alternative in `HiveSqlParser.regularBody`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
*/
|
||||||
|
enterSelectStmt?: (ctx: SelectStmtContext) => void;
|
||||||
|
/**
|
||||||
|
* Exit a parse tree produced by the `selectStmt`
|
||||||
|
* labeled alternative in `HiveSqlParser.regularBody`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
*/
|
||||||
|
exitSelectStmt?: (ctx: SelectStmtContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `HiveSqlParser.atomSelectStatement`.
|
* Enter a parse tree produced by `HiveSqlParser.atomSelectStatement`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -2601,16 +2633,6 @@ export class HiveSqlParserListener implements ParseTreeListener {
|
|||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitSelectStatementWithCTE?: (ctx: SelectStatementWithCTEContext) => void;
|
exitSelectStatementWithCTE?: (ctx: SelectStatementWithCTEContext) => void;
|
||||||
/**
|
|
||||||
* Enter a parse tree produced by `HiveSqlParser.body`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
*/
|
|
||||||
enterBody?: (ctx: BodyContext) => void;
|
|
||||||
/**
|
|
||||||
* Exit a parse tree produced by `HiveSqlParser.body`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
*/
|
|
||||||
exitBody?: (ctx: BodyContext) => void;
|
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `HiveSqlParser.insertClause`.
|
* Enter a parse tree produced by `HiveSqlParser.insertClause`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
|
@ -3,6 +3,9 @@
|
|||||||
import { AbstractParseTreeVisitor } from "antlr4ng";
|
import { AbstractParseTreeVisitor } from "antlr4ng";
|
||||||
|
|
||||||
|
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
|
|
||||||
|
|
||||||
import { ProgramContext } from "./HiveSqlParser.js";
|
import { ProgramContext } from "./HiveSqlParser.js";
|
||||||
import { StatementContext } from "./HiveSqlParser.js";
|
import { StatementContext } from "./HiveSqlParser.js";
|
||||||
import { ExplainStatementContext } from "./HiveSqlParser.js";
|
import { ExplainStatementContext } from "./HiveSqlParser.js";
|
||||||
@ -204,13 +207,14 @@ import { QueryStatementExpressionBodyContext } from "./HiveSqlParser.js";
|
|||||||
import { WithClauseContext } from "./HiveSqlParser.js";
|
import { WithClauseContext } from "./HiveSqlParser.js";
|
||||||
import { CteStatementContext } from "./HiveSqlParser.js";
|
import { CteStatementContext } from "./HiveSqlParser.js";
|
||||||
import { FromStatementContext } from "./HiveSqlParser.js";
|
import { FromStatementContext } from "./HiveSqlParser.js";
|
||||||
import { SingleFromStatementContext } from "./HiveSqlParser.js";
|
import { FromInsertStmtContext } from "./HiveSqlParser.js";
|
||||||
import { RegularBodyContext } from "./HiveSqlParser.js";
|
import { FromSelectStmtContext } from "./HiveSqlParser.js";
|
||||||
|
import { InsertStmtContext } from "./HiveSqlParser.js";
|
||||||
|
import { SelectStmtContext } from "./HiveSqlParser.js";
|
||||||
import { AtomSelectStatementContext } from "./HiveSqlParser.js";
|
import { AtomSelectStatementContext } from "./HiveSqlParser.js";
|
||||||
import { SelectStatementContext } from "./HiveSqlParser.js";
|
import { SelectStatementContext } from "./HiveSqlParser.js";
|
||||||
import { SetOpSelectStatementContext } from "./HiveSqlParser.js";
|
import { SetOpSelectStatementContext } from "./HiveSqlParser.js";
|
||||||
import { SelectStatementWithCTEContext } from "./HiveSqlParser.js";
|
import { SelectStatementWithCTEContext } from "./HiveSqlParser.js";
|
||||||
import { BodyContext } from "./HiveSqlParser.js";
|
|
||||||
import { InsertClauseContext } from "./HiveSqlParser.js";
|
import { InsertClauseContext } from "./HiveSqlParser.js";
|
||||||
import { DestinationContext } from "./HiveSqlParser.js";
|
import { DestinationContext } from "./HiveSqlParser.js";
|
||||||
import { LimitClauseContext } from "./HiveSqlParser.js";
|
import { LimitClauseContext } from "./HiveSqlParser.js";
|
||||||
@ -1741,17 +1745,33 @@ export class HiveSqlParserVisitor<Result> extends AbstractParseTreeVisitor<Resul
|
|||||||
*/
|
*/
|
||||||
visitFromStatement?: (ctx: FromStatementContext) => Result;
|
visitFromStatement?: (ctx: FromStatementContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `HiveSqlParser.singleFromStatement`.
|
* Visit a parse tree produced by the `fromInsertStmt`
|
||||||
|
* labeled alternative in `HiveSqlParser.singleFromStatement`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitSingleFromStatement?: (ctx: SingleFromStatementContext) => Result;
|
visitFromInsertStmt?: (ctx: FromInsertStmtContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `HiveSqlParser.regularBody`.
|
* Visit a parse tree produced by the `fromSelectStmt`
|
||||||
|
* labeled alternative in `HiveSqlParser.singleFromStatement`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitRegularBody?: (ctx: RegularBodyContext) => Result;
|
visitFromSelectStmt?: (ctx: FromSelectStmtContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `insertStmt`
|
||||||
|
* labeled alternative in `HiveSqlParser.regularBody`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitInsertStmt?: (ctx: InsertStmtContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `selectStmt`
|
||||||
|
* labeled alternative in `HiveSqlParser.regularBody`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitSelectStmt?: (ctx: SelectStmtContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `HiveSqlParser.atomSelectStatement`.
|
* Visit a parse tree produced by `HiveSqlParser.atomSelectStatement`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -1776,12 +1796,6 @@ export class HiveSqlParserVisitor<Result> extends AbstractParseTreeVisitor<Resul
|
|||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitSelectStatementWithCTE?: (ctx: SelectStatementWithCTEContext) => Result;
|
visitSelectStatementWithCTE?: (ctx: SelectStatementWithCTEContext) => Result;
|
||||||
/**
|
|
||||||
* Visit a parse tree produced by `HiveSqlParser.body`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
* @return the visitor result
|
|
||||||
*/
|
|
||||||
visitBody?: (ctx: BodyContext) => Result;
|
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `HiveSqlParser.insertClause`.
|
* Visit a parse tree produced by `HiveSqlParser.insertClause`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@ -3,6 +3,9 @@
|
|||||||
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
||||||
|
|
||||||
|
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
|
|
||||||
|
|
||||||
import { ProgramContext } from "./ImpalaSqlParser.js";
|
import { ProgramContext } from "./ImpalaSqlParser.js";
|
||||||
import { SingleStatementContext } from "./ImpalaSqlParser.js";
|
import { SingleStatementContext } from "./ImpalaSqlParser.js";
|
||||||
import { SqlStatementContext } from "./ImpalaSqlParser.js";
|
import { SqlStatementContext } from "./ImpalaSqlParser.js";
|
||||||
@ -109,6 +112,7 @@ import { QueryStatementContext } from "./ImpalaSqlParser.js";
|
|||||||
import { WithContext } from "./ImpalaSqlParser.js";
|
import { WithContext } from "./ImpalaSqlParser.js";
|
||||||
import { ConstraintSpecificationContext } from "./ImpalaSqlParser.js";
|
import { ConstraintSpecificationContext } from "./ImpalaSqlParser.js";
|
||||||
import { ForeignKeySpecificationContext } from "./ImpalaSqlParser.js";
|
import { ForeignKeySpecificationContext } from "./ImpalaSqlParser.js";
|
||||||
|
import { ColumnSpecContext } from "./ImpalaSqlParser.js";
|
||||||
import { ColumnDefinitionContext } from "./ImpalaSqlParser.js";
|
import { ColumnDefinitionContext } from "./ImpalaSqlParser.js";
|
||||||
import { KuduTableElementContext } from "./ImpalaSqlParser.js";
|
import { KuduTableElementContext } from "./ImpalaSqlParser.js";
|
||||||
import { KuduColumnDefinitionContext } from "./ImpalaSqlParser.js";
|
import { KuduColumnDefinitionContext } from "./ImpalaSqlParser.js";
|
||||||
@ -155,7 +159,6 @@ import { SampledRelationContext } from "./ImpalaSqlParser.js";
|
|||||||
import { SampleTypeContext } from "./ImpalaSqlParser.js";
|
import { SampleTypeContext } from "./ImpalaSqlParser.js";
|
||||||
import { AliasedRelationContext } from "./ImpalaSqlParser.js";
|
import { AliasedRelationContext } from "./ImpalaSqlParser.js";
|
||||||
import { ColumnAliasesContext } from "./ImpalaSqlParser.js";
|
import { ColumnAliasesContext } from "./ImpalaSqlParser.js";
|
||||||
import { CreateColumnAliasesContext } from "./ImpalaSqlParser.js";
|
|
||||||
import { RelationPrimaryContext } from "./ImpalaSqlParser.js";
|
import { RelationPrimaryContext } from "./ImpalaSqlParser.js";
|
||||||
import { SubQueryRelationContext } from "./ImpalaSqlParser.js";
|
import { SubQueryRelationContext } from "./ImpalaSqlParser.js";
|
||||||
import { UnnestContext } from "./ImpalaSqlParser.js";
|
import { UnnestContext } from "./ImpalaSqlParser.js";
|
||||||
@ -1310,6 +1313,16 @@ export class ImpalaSqlParserListener implements ParseTreeListener {
|
|||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitForeignKeySpecification?: (ctx: ForeignKeySpecificationContext) => void;
|
exitForeignKeySpecification?: (ctx: ForeignKeySpecificationContext) => void;
|
||||||
|
/**
|
||||||
|
* Enter a parse tree produced by `ImpalaSqlParser.columnSpec`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
*/
|
||||||
|
enterColumnSpec?: (ctx: ColumnSpecContext) => void;
|
||||||
|
/**
|
||||||
|
* Exit a parse tree produced by `ImpalaSqlParser.columnSpec`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
*/
|
||||||
|
exitColumnSpec?: (ctx: ColumnSpecContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `ImpalaSqlParser.columnDefinition`.
|
* Enter a parse tree produced by `ImpalaSqlParser.columnDefinition`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -1792,16 +1805,6 @@ export class ImpalaSqlParserListener implements ParseTreeListener {
|
|||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitColumnAliases?: (ctx: ColumnAliasesContext) => void;
|
exitColumnAliases?: (ctx: ColumnAliasesContext) => void;
|
||||||
/**
|
|
||||||
* Enter a parse tree produced by `ImpalaSqlParser.createColumnAliases`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
*/
|
|
||||||
enterCreateColumnAliases?: (ctx: CreateColumnAliasesContext) => void;
|
|
||||||
/**
|
|
||||||
* Exit a parse tree produced by `ImpalaSqlParser.createColumnAliases`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
*/
|
|
||||||
exitCreateColumnAliases?: (ctx: CreateColumnAliasesContext) => void;
|
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `ImpalaSqlParser.relationPrimary`.
|
* Enter a parse tree produced by `ImpalaSqlParser.relationPrimary`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
|
@ -3,6 +3,9 @@
|
|||||||
import { AbstractParseTreeVisitor } from "antlr4ng";
|
import { AbstractParseTreeVisitor } from "antlr4ng";
|
||||||
|
|
||||||
|
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
|
|
||||||
|
|
||||||
import { ProgramContext } from "./ImpalaSqlParser.js";
|
import { ProgramContext } from "./ImpalaSqlParser.js";
|
||||||
import { SingleStatementContext } from "./ImpalaSqlParser.js";
|
import { SingleStatementContext } from "./ImpalaSqlParser.js";
|
||||||
import { SqlStatementContext } from "./ImpalaSqlParser.js";
|
import { SqlStatementContext } from "./ImpalaSqlParser.js";
|
||||||
@ -109,6 +112,7 @@ import { QueryStatementContext } from "./ImpalaSqlParser.js";
|
|||||||
import { WithContext } from "./ImpalaSqlParser.js";
|
import { WithContext } from "./ImpalaSqlParser.js";
|
||||||
import { ConstraintSpecificationContext } from "./ImpalaSqlParser.js";
|
import { ConstraintSpecificationContext } from "./ImpalaSqlParser.js";
|
||||||
import { ForeignKeySpecificationContext } from "./ImpalaSqlParser.js";
|
import { ForeignKeySpecificationContext } from "./ImpalaSqlParser.js";
|
||||||
|
import { ColumnSpecContext } from "./ImpalaSqlParser.js";
|
||||||
import { ColumnDefinitionContext } from "./ImpalaSqlParser.js";
|
import { ColumnDefinitionContext } from "./ImpalaSqlParser.js";
|
||||||
import { KuduTableElementContext } from "./ImpalaSqlParser.js";
|
import { KuduTableElementContext } from "./ImpalaSqlParser.js";
|
||||||
import { KuduColumnDefinitionContext } from "./ImpalaSqlParser.js";
|
import { KuduColumnDefinitionContext } from "./ImpalaSqlParser.js";
|
||||||
@ -155,7 +159,6 @@ import { SampledRelationContext } from "./ImpalaSqlParser.js";
|
|||||||
import { SampleTypeContext } from "./ImpalaSqlParser.js";
|
import { SampleTypeContext } from "./ImpalaSqlParser.js";
|
||||||
import { AliasedRelationContext } from "./ImpalaSqlParser.js";
|
import { AliasedRelationContext } from "./ImpalaSqlParser.js";
|
||||||
import { ColumnAliasesContext } from "./ImpalaSqlParser.js";
|
import { ColumnAliasesContext } from "./ImpalaSqlParser.js";
|
||||||
import { CreateColumnAliasesContext } from "./ImpalaSqlParser.js";
|
|
||||||
import { RelationPrimaryContext } from "./ImpalaSqlParser.js";
|
import { RelationPrimaryContext } from "./ImpalaSqlParser.js";
|
||||||
import { SubQueryRelationContext } from "./ImpalaSqlParser.js";
|
import { SubQueryRelationContext } from "./ImpalaSqlParser.js";
|
||||||
import { UnnestContext } from "./ImpalaSqlParser.js";
|
import { UnnestContext } from "./ImpalaSqlParser.js";
|
||||||
@ -889,6 +892,12 @@ export class ImpalaSqlParserVisitor<Result> extends AbstractParseTreeVisitor<Res
|
|||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitForeignKeySpecification?: (ctx: ForeignKeySpecificationContext) => Result;
|
visitForeignKeySpecification?: (ctx: ForeignKeySpecificationContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by `ImpalaSqlParser.columnSpec`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitColumnSpec?: (ctx: ColumnSpecContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `ImpalaSqlParser.columnDefinition`.
|
* Visit a parse tree produced by `ImpalaSqlParser.columnDefinition`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -1176,12 +1185,6 @@ export class ImpalaSqlParserVisitor<Result> extends AbstractParseTreeVisitor<Res
|
|||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitColumnAliases?: (ctx: ColumnAliasesContext) => Result;
|
visitColumnAliases?: (ctx: ColumnAliasesContext) => Result;
|
||||||
/**
|
|
||||||
* Visit a parse tree produced by `ImpalaSqlParser.createColumnAliases`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
* @return the visitor result
|
|
||||||
*/
|
|
||||||
visitCreateColumnAliases?: (ctx: CreateColumnAliasesContext) => Result;
|
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `ImpalaSqlParser.relationPrimary`.
|
* Visit a parse tree produced by `ImpalaSqlParser.relationPrimary`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@ -3,6 +3,9 @@
|
|||||||
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
||||||
|
|
||||||
|
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
|
|
||||||
|
|
||||||
import { ProgramContext } from "./MySqlParser.js";
|
import { ProgramContext } from "./MySqlParser.js";
|
||||||
import { SingleStatementContext } from "./MySqlParser.js";
|
import { SingleStatementContext } from "./MySqlParser.js";
|
||||||
import { SqlStatementContext } from "./MySqlParser.js";
|
import { SqlStatementContext } from "./MySqlParser.js";
|
||||||
@ -22,9 +25,9 @@ import { CreateLogfileGroupContext } from "./MySqlParser.js";
|
|||||||
import { CreateProcedureContext } from "./MySqlParser.js";
|
import { CreateProcedureContext } from "./MySqlParser.js";
|
||||||
import { CreateRoleContext } from "./MySqlParser.js";
|
import { CreateRoleContext } from "./MySqlParser.js";
|
||||||
import { CreateServerContext } from "./MySqlParser.js";
|
import { CreateServerContext } from "./MySqlParser.js";
|
||||||
|
import { QueryCreateTableContext } from "./MySqlParser.js";
|
||||||
import { CopyCreateTableContext } from "./MySqlParser.js";
|
import { CopyCreateTableContext } from "./MySqlParser.js";
|
||||||
import { ColumnCreateTableContext } from "./MySqlParser.js";
|
import { ColumnCreateTableContext } from "./MySqlParser.js";
|
||||||
import { QueryCreateTableContext } from "./MySqlParser.js";
|
|
||||||
import { CreateTablespaceInnodbContext } from "./MySqlParser.js";
|
import { CreateTablespaceInnodbContext } from "./MySqlParser.js";
|
||||||
import { CreateTablespaceNdbContext } from "./MySqlParser.js";
|
import { CreateTablespaceNdbContext } from "./MySqlParser.js";
|
||||||
import { CreateTriggerContext } from "./MySqlParser.js";
|
import { CreateTriggerContext } from "./MySqlParser.js";
|
||||||
@ -635,7 +638,6 @@ import { LogicalExpressionContext } from "./MySqlParser.js";
|
|||||||
import { PredicateExpressionContext } from "./MySqlParser.js";
|
import { PredicateExpressionContext } from "./MySqlParser.js";
|
||||||
import { SoundsLikePredicateContext } from "./MySqlParser.js";
|
import { SoundsLikePredicateContext } from "./MySqlParser.js";
|
||||||
import { ExpressionAtomPredicateContext } from "./MySqlParser.js";
|
import { ExpressionAtomPredicateContext } from "./MySqlParser.js";
|
||||||
import { SubqueryComparisonPredicateContext } from "./MySqlParser.js";
|
|
||||||
import { JsonMemberOfPredicateContext } from "./MySqlParser.js";
|
import { JsonMemberOfPredicateContext } from "./MySqlParser.js";
|
||||||
import { BinaryComparisonPredicateContext } from "./MySqlParser.js";
|
import { BinaryComparisonPredicateContext } from "./MySqlParser.js";
|
||||||
import { InPredicateContext } from "./MySqlParser.js";
|
import { InPredicateContext } from "./MySqlParser.js";
|
||||||
@ -870,6 +872,18 @@ export class MySqlParserListener implements ParseTreeListener {
|
|||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitCreateServer?: (ctx: CreateServerContext) => void;
|
exitCreateServer?: (ctx: CreateServerContext) => void;
|
||||||
|
/**
|
||||||
|
* Enter a parse tree produced by the `queryCreateTable`
|
||||||
|
* labeled alternative in `MySqlParser.createTable`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
*/
|
||||||
|
enterQueryCreateTable?: (ctx: QueryCreateTableContext) => void;
|
||||||
|
/**
|
||||||
|
* Exit a parse tree produced by the `queryCreateTable`
|
||||||
|
* labeled alternative in `MySqlParser.createTable`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
*/
|
||||||
|
exitQueryCreateTable?: (ctx: QueryCreateTableContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by the `copyCreateTable`
|
* Enter a parse tree produced by the `copyCreateTable`
|
||||||
* labeled alternative in `MySqlParser.createTable`.
|
* labeled alternative in `MySqlParser.createTable`.
|
||||||
@ -894,18 +908,6 @@ export class MySqlParserListener implements ParseTreeListener {
|
|||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitColumnCreateTable?: (ctx: ColumnCreateTableContext) => void;
|
exitColumnCreateTable?: (ctx: ColumnCreateTableContext) => void;
|
||||||
/**
|
|
||||||
* Enter a parse tree produced by the `queryCreateTable`
|
|
||||||
* labeled alternative in `MySqlParser.createTable`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
*/
|
|
||||||
enterQueryCreateTable?: (ctx: QueryCreateTableContext) => void;
|
|
||||||
/**
|
|
||||||
* Exit a parse tree produced by the `queryCreateTable`
|
|
||||||
* labeled alternative in `MySqlParser.createTable`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
*/
|
|
||||||
exitQueryCreateTable?: (ctx: QueryCreateTableContext) => void;
|
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `MySqlParser.createTablespaceInnodb`.
|
* Enter a parse tree produced by `MySqlParser.createTablespaceInnodb`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -7540,18 +7542,6 @@ export class MySqlParserListener implements ParseTreeListener {
|
|||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitExpressionAtomPredicate?: (ctx: ExpressionAtomPredicateContext) => void;
|
exitExpressionAtomPredicate?: (ctx: ExpressionAtomPredicateContext) => void;
|
||||||
/**
|
|
||||||
* Enter a parse tree produced by the `subqueryComparisonPredicate`
|
|
||||||
* labeled alternative in `MySqlParser.predicate`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
*/
|
|
||||||
enterSubqueryComparisonPredicate?: (ctx: SubqueryComparisonPredicateContext) => void;
|
|
||||||
/**
|
|
||||||
* Exit a parse tree produced by the `subqueryComparisonPredicate`
|
|
||||||
* labeled alternative in `MySqlParser.predicate`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
*/
|
|
||||||
exitSubqueryComparisonPredicate?: (ctx: SubqueryComparisonPredicateContext) => void;
|
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by the `jsonMemberOfPredicate`
|
* Enter a parse tree produced by the `jsonMemberOfPredicate`
|
||||||
* labeled alternative in `MySqlParser.predicate`.
|
* labeled alternative in `MySqlParser.predicate`.
|
||||||
|
@ -3,6 +3,9 @@
|
|||||||
import { AbstractParseTreeVisitor } from "antlr4ng";
|
import { AbstractParseTreeVisitor } from "antlr4ng";
|
||||||
|
|
||||||
|
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
|
|
||||||
|
|
||||||
import { ProgramContext } from "./MySqlParser.js";
|
import { ProgramContext } from "./MySqlParser.js";
|
||||||
import { SingleStatementContext } from "./MySqlParser.js";
|
import { SingleStatementContext } from "./MySqlParser.js";
|
||||||
import { SqlStatementContext } from "./MySqlParser.js";
|
import { SqlStatementContext } from "./MySqlParser.js";
|
||||||
@ -22,9 +25,9 @@ import { CreateLogfileGroupContext } from "./MySqlParser.js";
|
|||||||
import { CreateProcedureContext } from "./MySqlParser.js";
|
import { CreateProcedureContext } from "./MySqlParser.js";
|
||||||
import { CreateRoleContext } from "./MySqlParser.js";
|
import { CreateRoleContext } from "./MySqlParser.js";
|
||||||
import { CreateServerContext } from "./MySqlParser.js";
|
import { CreateServerContext } from "./MySqlParser.js";
|
||||||
|
import { QueryCreateTableContext } from "./MySqlParser.js";
|
||||||
import { CopyCreateTableContext } from "./MySqlParser.js";
|
import { CopyCreateTableContext } from "./MySqlParser.js";
|
||||||
import { ColumnCreateTableContext } from "./MySqlParser.js";
|
import { ColumnCreateTableContext } from "./MySqlParser.js";
|
||||||
import { QueryCreateTableContext } from "./MySqlParser.js";
|
|
||||||
import { CreateTablespaceInnodbContext } from "./MySqlParser.js";
|
import { CreateTablespaceInnodbContext } from "./MySqlParser.js";
|
||||||
import { CreateTablespaceNdbContext } from "./MySqlParser.js";
|
import { CreateTablespaceNdbContext } from "./MySqlParser.js";
|
||||||
import { CreateTriggerContext } from "./MySqlParser.js";
|
import { CreateTriggerContext } from "./MySqlParser.js";
|
||||||
@ -635,7 +638,6 @@ import { LogicalExpressionContext } from "./MySqlParser.js";
|
|||||||
import { PredicateExpressionContext } from "./MySqlParser.js";
|
import { PredicateExpressionContext } from "./MySqlParser.js";
|
||||||
import { SoundsLikePredicateContext } from "./MySqlParser.js";
|
import { SoundsLikePredicateContext } from "./MySqlParser.js";
|
||||||
import { ExpressionAtomPredicateContext } from "./MySqlParser.js";
|
import { ExpressionAtomPredicateContext } from "./MySqlParser.js";
|
||||||
import { SubqueryComparisonPredicateContext } from "./MySqlParser.js";
|
|
||||||
import { JsonMemberOfPredicateContext } from "./MySqlParser.js";
|
import { JsonMemberOfPredicateContext } from "./MySqlParser.js";
|
||||||
import { BinaryComparisonPredicateContext } from "./MySqlParser.js";
|
import { BinaryComparisonPredicateContext } from "./MySqlParser.js";
|
||||||
import { InPredicateContext } from "./MySqlParser.js";
|
import { InPredicateContext } from "./MySqlParser.js";
|
||||||
@ -797,6 +799,13 @@ export class MySqlParserVisitor<Result> extends AbstractParseTreeVisitor<Result>
|
|||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitCreateServer?: (ctx: CreateServerContext) => Result;
|
visitCreateServer?: (ctx: CreateServerContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `queryCreateTable`
|
||||||
|
* labeled alternative in `MySqlParser.createTable`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitQueryCreateTable?: (ctx: QueryCreateTableContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by the `copyCreateTable`
|
* Visit a parse tree produced by the `copyCreateTable`
|
||||||
* labeled alternative in `MySqlParser.createTable`.
|
* labeled alternative in `MySqlParser.createTable`.
|
||||||
@ -811,13 +820,6 @@ export class MySqlParserVisitor<Result> extends AbstractParseTreeVisitor<Result>
|
|||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitColumnCreateTable?: (ctx: ColumnCreateTableContext) => Result;
|
visitColumnCreateTable?: (ctx: ColumnCreateTableContext) => Result;
|
||||||
/**
|
|
||||||
* Visit a parse tree produced by the `queryCreateTable`
|
|
||||||
* labeled alternative in `MySqlParser.createTable`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
* @return the visitor result
|
|
||||||
*/
|
|
||||||
visitQueryCreateTable?: (ctx: QueryCreateTableContext) => Result;
|
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `MySqlParser.createTablespaceInnodb`.
|
* Visit a parse tree produced by `MySqlParser.createTablespaceInnodb`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -4745,13 +4747,6 @@ export class MySqlParserVisitor<Result> extends AbstractParseTreeVisitor<Result>
|
|||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitExpressionAtomPredicate?: (ctx: ExpressionAtomPredicateContext) => Result;
|
visitExpressionAtomPredicate?: (ctx: ExpressionAtomPredicateContext) => Result;
|
||||||
/**
|
|
||||||
* Visit a parse tree produced by the `subqueryComparisonPredicate`
|
|
||||||
* labeled alternative in `MySqlParser.predicate`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
* @return the visitor result
|
|
||||||
*/
|
|
||||||
visitSubqueryComparisonPredicate?: (ctx: SubqueryComparisonPredicateContext) => Result;
|
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by the `jsonMemberOfPredicate`
|
* Visit a parse tree produced by the `jsonMemberOfPredicate`
|
||||||
* labeled alternative in `MySqlParser.predicate`.
|
* labeled alternative in `MySqlParser.predicate`.
|
||||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@ -3,6 +3,9 @@
|
|||||||
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
||||||
|
|
||||||
|
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
|
|
||||||
|
|
||||||
import { ProgramContext } from "./PostgreSQLParser.js";
|
import { ProgramContext } from "./PostgreSQLParser.js";
|
||||||
import { PlsqlrootContext } from "./PostgreSQLParser.js";
|
import { PlsqlrootContext } from "./PostgreSQLParser.js";
|
||||||
import { SingleStmtContext } from "./PostgreSQLParser.js";
|
import { SingleStmtContext } from "./PostgreSQLParser.js";
|
||||||
@ -27,7 +30,7 @@ import { CreategroupstmtContext } from "./PostgreSQLParser.js";
|
|||||||
import { AltergroupstmtContext } from "./PostgreSQLParser.js";
|
import { AltergroupstmtContext } from "./PostgreSQLParser.js";
|
||||||
import { Add_dropContext } from "./PostgreSQLParser.js";
|
import { Add_dropContext } from "./PostgreSQLParser.js";
|
||||||
import { CreateschemastmtContext } from "./PostgreSQLParser.js";
|
import { CreateschemastmtContext } from "./PostgreSQLParser.js";
|
||||||
import { Schema_name_createContext } from "./PostgreSQLParser.js";
|
import { SchemaNameCreateContext } from "./PostgreSQLParser.js";
|
||||||
import { OptschemaeltlistContext } from "./PostgreSQLParser.js";
|
import { OptschemaeltlistContext } from "./PostgreSQLParser.js";
|
||||||
import { Schema_stmtContext } from "./PostgreSQLParser.js";
|
import { Schema_stmtContext } from "./PostgreSQLParser.js";
|
||||||
import { VariablesetstmtContext } from "./PostgreSQLParser.js";
|
import { VariablesetstmtContext } from "./PostgreSQLParser.js";
|
||||||
@ -97,7 +100,7 @@ import { Copy_generic_opt_elemContext } from "./PostgreSQLParser.js";
|
|||||||
import { Copy_generic_opt_argContext } from "./PostgreSQLParser.js";
|
import { Copy_generic_opt_argContext } from "./PostgreSQLParser.js";
|
||||||
import { Copy_generic_opt_arg_listContext } from "./PostgreSQLParser.js";
|
import { Copy_generic_opt_arg_listContext } from "./PostgreSQLParser.js";
|
||||||
import { Copy_generic_opt_arg_list_itemContext } from "./PostgreSQLParser.js";
|
import { Copy_generic_opt_arg_list_itemContext } from "./PostgreSQLParser.js";
|
||||||
import { CreatestmtContext } from "./PostgreSQLParser.js";
|
import { ColumnCreateTableContext } from "./PostgreSQLParser.js";
|
||||||
import { OpttempContext } from "./PostgreSQLParser.js";
|
import { OpttempContext } from "./PostgreSQLParser.js";
|
||||||
import { Table_column_listContext } from "./PostgreSQLParser.js";
|
import { Table_column_listContext } from "./PostgreSQLParser.js";
|
||||||
import { OpttableelementlistContext } from "./PostgreSQLParser.js";
|
import { OpttableelementlistContext } from "./PostgreSQLParser.js";
|
||||||
@ -106,8 +109,7 @@ import { TableelementlistContext } from "./PostgreSQLParser.js";
|
|||||||
import { TypedtableelementlistContext } from "./PostgreSQLParser.js";
|
import { TypedtableelementlistContext } from "./PostgreSQLParser.js";
|
||||||
import { TableelementContext } from "./PostgreSQLParser.js";
|
import { TableelementContext } from "./PostgreSQLParser.js";
|
||||||
import { TypedtableelementContext } from "./PostgreSQLParser.js";
|
import { TypedtableelementContext } from "./PostgreSQLParser.js";
|
||||||
import { ColumnDefCluaseContext } from "./PostgreSQLParser.js";
|
import { Column_defContext } from "./PostgreSQLParser.js";
|
||||||
import { ColumnDefContext } from "./PostgreSQLParser.js";
|
|
||||||
import { CompressionCluaseContext } from "./PostgreSQLParser.js";
|
import { CompressionCluaseContext } from "./PostgreSQLParser.js";
|
||||||
import { StorageCluaseContext } from "./PostgreSQLParser.js";
|
import { StorageCluaseContext } from "./PostgreSQLParser.js";
|
||||||
import { ColumnOptionsContext } from "./PostgreSQLParser.js";
|
import { ColumnOptionsContext } from "./PostgreSQLParser.js";
|
||||||
@ -125,7 +127,9 @@ import { TableconstraintContext } from "./PostgreSQLParser.js";
|
|||||||
import { ConstraintelemContext } from "./PostgreSQLParser.js";
|
import { ConstraintelemContext } from "./PostgreSQLParser.js";
|
||||||
import { Opt_no_inheritContext } from "./PostgreSQLParser.js";
|
import { Opt_no_inheritContext } from "./PostgreSQLParser.js";
|
||||||
import { Opt_column_listContext } from "./PostgreSQLParser.js";
|
import { Opt_column_listContext } from "./PostgreSQLParser.js";
|
||||||
import { ColumnlistContext } from "./PostgreSQLParser.js";
|
import { Opt_column_list_createContext } from "./PostgreSQLParser.js";
|
||||||
|
import { Column_listContext } from "./PostgreSQLParser.js";
|
||||||
|
import { Column_list_createContext } from "./PostgreSQLParser.js";
|
||||||
import { Opt_c_includeContext } from "./PostgreSQLParser.js";
|
import { Opt_c_includeContext } from "./PostgreSQLParser.js";
|
||||||
import { Key_matchContext } from "./PostgreSQLParser.js";
|
import { Key_matchContext } from "./PostgreSQLParser.js";
|
||||||
import { ExclusionconstraintlistContext } from "./PostgreSQLParser.js";
|
import { ExclusionconstraintlistContext } from "./PostgreSQLParser.js";
|
||||||
@ -149,10 +153,10 @@ import { OptconstablespaceContext } from "./PostgreSQLParser.js";
|
|||||||
import { ExistingindexContext } from "./PostgreSQLParser.js";
|
import { ExistingindexContext } from "./PostgreSQLParser.js";
|
||||||
import { CreatestatsstmtContext } from "./PostgreSQLParser.js";
|
import { CreatestatsstmtContext } from "./PostgreSQLParser.js";
|
||||||
import { AlterstatsstmtContext } from "./PostgreSQLParser.js";
|
import { AlterstatsstmtContext } from "./PostgreSQLParser.js";
|
||||||
import { CreateasstmtContext } from "./PostgreSQLParser.js";
|
import { QueryCreateTableContext } from "./PostgreSQLParser.js";
|
||||||
import { Create_as_targetContext } from "./PostgreSQLParser.js";
|
import { Create_as_targetContext } from "./PostgreSQLParser.js";
|
||||||
import { Opt_with_dataContext } from "./PostgreSQLParser.js";
|
import { Opt_with_dataContext } from "./PostgreSQLParser.js";
|
||||||
import { CreatematviewstmtContext } from "./PostgreSQLParser.js";
|
import { CreateMaterializedViewContext } from "./PostgreSQLParser.js";
|
||||||
import { Create_mv_targetContext } from "./PostgreSQLParser.js";
|
import { Create_mv_targetContext } from "./PostgreSQLParser.js";
|
||||||
import { OptnologContext } from "./PostgreSQLParser.js";
|
import { OptnologContext } from "./PostgreSQLParser.js";
|
||||||
import { RefreshmatviewstmtContext } from "./PostgreSQLParser.js";
|
import { RefreshmatviewstmtContext } from "./PostgreSQLParser.js";
|
||||||
@ -199,7 +203,8 @@ import { Opt_typeContext } from "./PostgreSQLParser.js";
|
|||||||
import { Foreign_server_versionContext } from "./PostgreSQLParser.js";
|
import { Foreign_server_versionContext } from "./PostgreSQLParser.js";
|
||||||
import { Opt_foreign_server_versionContext } from "./PostgreSQLParser.js";
|
import { Opt_foreign_server_versionContext } from "./PostgreSQLParser.js";
|
||||||
import { AlterforeignserverstmtContext } from "./PostgreSQLParser.js";
|
import { AlterforeignserverstmtContext } from "./PostgreSQLParser.js";
|
||||||
import { CreateforeigntablestmtContext } from "./PostgreSQLParser.js";
|
import { CreateForeignTableContext } from "./PostgreSQLParser.js";
|
||||||
|
import { CreatePartitionForeignTableContext } from "./PostgreSQLParser.js";
|
||||||
import { ImportforeignschemastmtContext } from "./PostgreSQLParser.js";
|
import { ImportforeignschemastmtContext } from "./PostgreSQLParser.js";
|
||||||
import { Import_qualification_typeContext } from "./PostgreSQLParser.js";
|
import { Import_qualification_typeContext } from "./PostgreSQLParser.js";
|
||||||
import { Import_qualificationContext } from "./PostgreSQLParser.js";
|
import { Import_qualificationContext } from "./PostgreSQLParser.js";
|
||||||
@ -421,10 +426,10 @@ import { Transaction_mode_itemContext } from "./PostgreSQLParser.js";
|
|||||||
import { Transaction_mode_listContext } from "./PostgreSQLParser.js";
|
import { Transaction_mode_listContext } from "./PostgreSQLParser.js";
|
||||||
import { Transaction_mode_list_or_emptyContext } from "./PostgreSQLParser.js";
|
import { Transaction_mode_list_or_emptyContext } from "./PostgreSQLParser.js";
|
||||||
import { Opt_transaction_chainContext } from "./PostgreSQLParser.js";
|
import { Opt_transaction_chainContext } from "./PostgreSQLParser.js";
|
||||||
import { ViewstmtContext } from "./PostgreSQLParser.js";
|
import { CreateViewContext } from "./PostgreSQLParser.js";
|
||||||
import { Opt_check_optionContext } from "./PostgreSQLParser.js";
|
import { Opt_check_optionContext } from "./PostgreSQLParser.js";
|
||||||
import { LoadstmtContext } from "./PostgreSQLParser.js";
|
import { LoadstmtContext } from "./PostgreSQLParser.js";
|
||||||
import { CreatedbstmtContext } from "./PostgreSQLParser.js";
|
import { CreateDatabaseContext } from "./PostgreSQLParser.js";
|
||||||
import { Createdb_opt_listContext } from "./PostgreSQLParser.js";
|
import { Createdb_opt_listContext } from "./PostgreSQLParser.js";
|
||||||
import { Createdb_opt_itemsContext } from "./PostgreSQLParser.js";
|
import { Createdb_opt_itemsContext } from "./PostgreSQLParser.js";
|
||||||
import { Createdb_opt_itemContext } from "./PostgreSQLParser.js";
|
import { Createdb_opt_itemContext } from "./PostgreSQLParser.js";
|
||||||
@ -477,7 +482,7 @@ import { PreparablestmtContext } from "./PostgreSQLParser.js";
|
|||||||
import { ExecutestmtContext } from "./PostgreSQLParser.js";
|
import { ExecutestmtContext } from "./PostgreSQLParser.js";
|
||||||
import { Execute_param_clauseContext } from "./PostgreSQLParser.js";
|
import { Execute_param_clauseContext } from "./PostgreSQLParser.js";
|
||||||
import { DeallocatestmtContext } from "./PostgreSQLParser.js";
|
import { DeallocatestmtContext } from "./PostgreSQLParser.js";
|
||||||
import { InsertstmtContext } from "./PostgreSQLParser.js";
|
import { InsertStatementContext } from "./PostgreSQLParser.js";
|
||||||
import { Insert_targetContext } from "./PostgreSQLParser.js";
|
import { Insert_targetContext } from "./PostgreSQLParser.js";
|
||||||
import { Insert_restContext } from "./PostgreSQLParser.js";
|
import { Insert_restContext } from "./PostgreSQLParser.js";
|
||||||
import { Override_kindContext } from "./PostgreSQLParser.js";
|
import { Override_kindContext } from "./PostgreSQLParser.js";
|
||||||
@ -502,7 +507,7 @@ import { DeclarecursorstmtContext } from "./PostgreSQLParser.js";
|
|||||||
import { Cursor_nameContext } from "./PostgreSQLParser.js";
|
import { Cursor_nameContext } from "./PostgreSQLParser.js";
|
||||||
import { Cursor_optionsContext } from "./PostgreSQLParser.js";
|
import { Cursor_optionsContext } from "./PostgreSQLParser.js";
|
||||||
import { Opt_holdContext } from "./PostgreSQLParser.js";
|
import { Opt_holdContext } from "./PostgreSQLParser.js";
|
||||||
import { SelectstmtContext } from "./PostgreSQLParser.js";
|
import { SelectStatementContext } from "./PostgreSQLParser.js";
|
||||||
import { Select_with_parensContext } from "./PostgreSQLParser.js";
|
import { Select_with_parensContext } from "./PostgreSQLParser.js";
|
||||||
import { Select_no_parensContext } from "./PostgreSQLParser.js";
|
import { Select_no_parensContext } from "./PostgreSQLParser.js";
|
||||||
import { Select_clauseContext } from "./PostgreSQLParser.js";
|
import { Select_clauseContext } from "./PostgreSQLParser.js";
|
||||||
@ -708,29 +713,30 @@ import { Table_name_listContext } from "./PostgreSQLParser.js";
|
|||||||
import { Schema_name_listContext } from "./PostgreSQLParser.js";
|
import { Schema_name_listContext } from "./PostgreSQLParser.js";
|
||||||
import { Database_nameListContext } from "./PostgreSQLParser.js";
|
import { Database_nameListContext } from "./PostgreSQLParser.js";
|
||||||
import { Procedure_name_listContext } from "./PostgreSQLParser.js";
|
import { Procedure_name_listContext } from "./PostgreSQLParser.js";
|
||||||
import { Tablespace_name_createContext } from "./PostgreSQLParser.js";
|
import { TablespaceNameCreateContext } from "./PostgreSQLParser.js";
|
||||||
import { Tablespace_nameContext } from "./PostgreSQLParser.js";
|
import { TablespaceNameContext } from "./PostgreSQLParser.js";
|
||||||
import { Table_name_createContext } from "./PostgreSQLParser.js";
|
import { TableNameCreateContext } from "./PostgreSQLParser.js";
|
||||||
import { Table_nameContext } from "./PostgreSQLParser.js";
|
import { TableNameContext } from "./PostgreSQLParser.js";
|
||||||
import { View_name_createContext } from "./PostgreSQLParser.js";
|
import { ViewNameCreateContext } from "./PostgreSQLParser.js";
|
||||||
import { View_nameContext } from "./PostgreSQLParser.js";
|
import { ViewNameContext } from "./PostgreSQLParser.js";
|
||||||
import { Qualified_nameContext } from "./PostgreSQLParser.js";
|
import { Qualified_nameContext } from "./PostgreSQLParser.js";
|
||||||
import { Tablespace_name_listContext } from "./PostgreSQLParser.js";
|
import { Tablespace_name_listContext } from "./PostgreSQLParser.js";
|
||||||
import { Name_listContext } from "./PostgreSQLParser.js";
|
import { Name_listContext } from "./PostgreSQLParser.js";
|
||||||
import { Database_name_createContext } from "./PostgreSQLParser.js";
|
import { DatabaseNameCreateContext } from "./PostgreSQLParser.js";
|
||||||
import { Database_nameContext } from "./PostgreSQLParser.js";
|
import { DatabaseNameContext } from "./PostgreSQLParser.js";
|
||||||
import { Schema_nameContext } from "./PostgreSQLParser.js";
|
import { SchemaNameContext } from "./PostgreSQLParser.js";
|
||||||
import { Routine_name_createContext } from "./PostgreSQLParser.js";
|
import { RoutineNameCreateContext } from "./PostgreSQLParser.js";
|
||||||
import { Routine_nameContext } from "./PostgreSQLParser.js";
|
import { RoutineNameContext } from "./PostgreSQLParser.js";
|
||||||
import { Procedure_nameContext } from "./PostgreSQLParser.js";
|
import { ProcedureNameContext } from "./PostgreSQLParser.js";
|
||||||
import { Procedure_name_createContext } from "./PostgreSQLParser.js";
|
import { ProcedureNameCreateContext } from "./PostgreSQLParser.js";
|
||||||
import { Column_nameContext } from "./PostgreSQLParser.js";
|
import { ColumnNameContext } from "./PostgreSQLParser.js";
|
||||||
import { Column_name_createContext } from "./PostgreSQLParser.js";
|
import { ColumnNameMatchContext } from "./PostgreSQLParser.js";
|
||||||
|
import { ColumnNameCreateContext } from "./PostgreSQLParser.js";
|
||||||
import { NameContext } from "./PostgreSQLParser.js";
|
import { NameContext } from "./PostgreSQLParser.js";
|
||||||
import { Attr_nameContext } from "./PostgreSQLParser.js";
|
import { Attr_nameContext } from "./PostgreSQLParser.js";
|
||||||
import { File_nameContext } from "./PostgreSQLParser.js";
|
import { File_nameContext } from "./PostgreSQLParser.js";
|
||||||
import { Function_name_createContext } from "./PostgreSQLParser.js";
|
import { FunctionNameCreateContext } from "./PostgreSQLParser.js";
|
||||||
import { Function_nameContext } from "./PostgreSQLParser.js";
|
import { FunctionNameContext } from "./PostgreSQLParser.js";
|
||||||
import { Usual_nameContext } from "./PostgreSQLParser.js";
|
import { Usual_nameContext } from "./PostgreSQLParser.js";
|
||||||
import { AexprconstContext } from "./PostgreSQLParser.js";
|
import { AexprconstContext } from "./PostgreSQLParser.js";
|
||||||
import { XconstContext } from "./PostgreSQLParser.js";
|
import { XconstContext } from "./PostgreSQLParser.js";
|
||||||
@ -1131,15 +1137,17 @@ export class PostgreSQLParserListener implements ParseTreeListener {
|
|||||||
*/
|
*/
|
||||||
exitCreateschemastmt?: (ctx: CreateschemastmtContext) => void;
|
exitCreateschemastmt?: (ctx: CreateschemastmtContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.schema_name_create`.
|
* Enter a parse tree produced by the `schemaNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.schema_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterSchema_name_create?: (ctx: Schema_name_createContext) => void;
|
enterSchemaNameCreate?: (ctx: SchemaNameCreateContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.schema_name_create`.
|
* Exit a parse tree produced by the `schemaNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.schema_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitSchema_name_create?: (ctx: Schema_name_createContext) => void;
|
exitSchemaNameCreate?: (ctx: SchemaNameCreateContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.optschemaeltlist`.
|
* Enter a parse tree produced by `PostgreSQLParser.optschemaeltlist`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -1831,15 +1839,17 @@ export class PostgreSQLParserListener implements ParseTreeListener {
|
|||||||
*/
|
*/
|
||||||
exitCopy_generic_opt_arg_list_item?: (ctx: Copy_generic_opt_arg_list_itemContext) => void;
|
exitCopy_generic_opt_arg_list_item?: (ctx: Copy_generic_opt_arg_list_itemContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.createstmt`.
|
* Enter a parse tree produced by the `columnCreateTable`
|
||||||
|
* labeled alternative in `PostgreSQLParser.createstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterCreatestmt?: (ctx: CreatestmtContext) => void;
|
enterColumnCreateTable?: (ctx: ColumnCreateTableContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.createstmt`.
|
* Exit a parse tree produced by the `columnCreateTable`
|
||||||
|
* labeled alternative in `PostgreSQLParser.createstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitCreatestmt?: (ctx: CreatestmtContext) => void;
|
exitColumnCreateTable?: (ctx: ColumnCreateTableContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.opttemp`.
|
* Enter a parse tree produced by `PostgreSQLParser.opttemp`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -1921,25 +1931,15 @@ export class PostgreSQLParserListener implements ParseTreeListener {
|
|||||||
*/
|
*/
|
||||||
exitTypedtableelement?: (ctx: TypedtableelementContext) => void;
|
exitTypedtableelement?: (ctx: TypedtableelementContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.columnDefCluase`.
|
* Enter a parse tree produced by `PostgreSQLParser.column_def`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterColumnDefCluase?: (ctx: ColumnDefCluaseContext) => void;
|
enterColumn_def?: (ctx: Column_defContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.columnDefCluase`.
|
* Exit a parse tree produced by `PostgreSQLParser.column_def`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitColumnDefCluase?: (ctx: ColumnDefCluaseContext) => void;
|
exitColumn_def?: (ctx: Column_defContext) => void;
|
||||||
/**
|
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.columnDef`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
*/
|
|
||||||
enterColumnDef?: (ctx: ColumnDefContext) => void;
|
|
||||||
/**
|
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.columnDef`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
*/
|
|
||||||
exitColumnDef?: (ctx: ColumnDefContext) => void;
|
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.compressionCluase`.
|
* Enter a parse tree produced by `PostgreSQLParser.compressionCluase`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -2111,15 +2111,35 @@ export class PostgreSQLParserListener implements ParseTreeListener {
|
|||||||
*/
|
*/
|
||||||
exitOpt_column_list?: (ctx: Opt_column_listContext) => void;
|
exitOpt_column_list?: (ctx: Opt_column_listContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.columnlist`.
|
* Enter a parse tree produced by `PostgreSQLParser.opt_column_list_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterColumnlist?: (ctx: ColumnlistContext) => void;
|
enterOpt_column_list_create?: (ctx: Opt_column_list_createContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.columnlist`.
|
* Exit a parse tree produced by `PostgreSQLParser.opt_column_list_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitColumnlist?: (ctx: ColumnlistContext) => void;
|
exitOpt_column_list_create?: (ctx: Opt_column_list_createContext) => void;
|
||||||
|
/**
|
||||||
|
* Enter a parse tree produced by `PostgreSQLParser.column_list`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
*/
|
||||||
|
enterColumn_list?: (ctx: Column_listContext) => void;
|
||||||
|
/**
|
||||||
|
* Exit a parse tree produced by `PostgreSQLParser.column_list`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
*/
|
||||||
|
exitColumn_list?: (ctx: Column_listContext) => void;
|
||||||
|
/**
|
||||||
|
* Enter a parse tree produced by `PostgreSQLParser.column_list_create`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
*/
|
||||||
|
enterColumn_list_create?: (ctx: Column_list_createContext) => void;
|
||||||
|
/**
|
||||||
|
* Exit a parse tree produced by `PostgreSQLParser.column_list_create`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
*/
|
||||||
|
exitColumn_list_create?: (ctx: Column_list_createContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.opt_c_include`.
|
* Enter a parse tree produced by `PostgreSQLParser.opt_c_include`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -2351,15 +2371,17 @@ export class PostgreSQLParserListener implements ParseTreeListener {
|
|||||||
*/
|
*/
|
||||||
exitAlterstatsstmt?: (ctx: AlterstatsstmtContext) => void;
|
exitAlterstatsstmt?: (ctx: AlterstatsstmtContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.createasstmt`.
|
* Enter a parse tree produced by the `queryCreateTable`
|
||||||
|
* labeled alternative in `PostgreSQLParser.createasstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterCreateasstmt?: (ctx: CreateasstmtContext) => void;
|
enterQueryCreateTable?: (ctx: QueryCreateTableContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.createasstmt`.
|
* Exit a parse tree produced by the `queryCreateTable`
|
||||||
|
* labeled alternative in `PostgreSQLParser.createasstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitCreateasstmt?: (ctx: CreateasstmtContext) => void;
|
exitQueryCreateTable?: (ctx: QueryCreateTableContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.create_as_target`.
|
* Enter a parse tree produced by `PostgreSQLParser.create_as_target`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -2381,15 +2403,17 @@ export class PostgreSQLParserListener implements ParseTreeListener {
|
|||||||
*/
|
*/
|
||||||
exitOpt_with_data?: (ctx: Opt_with_dataContext) => void;
|
exitOpt_with_data?: (ctx: Opt_with_dataContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.creatematviewstmt`.
|
* Enter a parse tree produced by the `createMaterializedView`
|
||||||
|
* labeled alternative in `PostgreSQLParser.creatematviewstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterCreatematviewstmt?: (ctx: CreatematviewstmtContext) => void;
|
enterCreateMaterializedView?: (ctx: CreateMaterializedViewContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.creatematviewstmt`.
|
* Exit a parse tree produced by the `createMaterializedView`
|
||||||
|
* labeled alternative in `PostgreSQLParser.creatematviewstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitCreatematviewstmt?: (ctx: CreatematviewstmtContext) => void;
|
exitCreateMaterializedView?: (ctx: CreateMaterializedViewContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.create_mv_target`.
|
* Enter a parse tree produced by `PostgreSQLParser.create_mv_target`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -2851,15 +2875,29 @@ export class PostgreSQLParserListener implements ParseTreeListener {
|
|||||||
*/
|
*/
|
||||||
exitAlterforeignserverstmt?: (ctx: AlterforeignserverstmtContext) => void;
|
exitAlterforeignserverstmt?: (ctx: AlterforeignserverstmtContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.createforeigntablestmt`.
|
* Enter a parse tree produced by the `createForeignTable`
|
||||||
|
* labeled alternative in `PostgreSQLParser.createforeigntablestmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterCreateforeigntablestmt?: (ctx: CreateforeigntablestmtContext) => void;
|
enterCreateForeignTable?: (ctx: CreateForeignTableContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.createforeigntablestmt`.
|
* Exit a parse tree produced by the `createForeignTable`
|
||||||
|
* labeled alternative in `PostgreSQLParser.createforeigntablestmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitCreateforeigntablestmt?: (ctx: CreateforeigntablestmtContext) => void;
|
exitCreateForeignTable?: (ctx: CreateForeignTableContext) => void;
|
||||||
|
/**
|
||||||
|
* Enter a parse tree produced by the `createPartitionForeignTable`
|
||||||
|
* labeled alternative in `PostgreSQLParser.createforeigntablestmt`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
*/
|
||||||
|
enterCreatePartitionForeignTable?: (ctx: CreatePartitionForeignTableContext) => void;
|
||||||
|
/**
|
||||||
|
* Exit a parse tree produced by the `createPartitionForeignTable`
|
||||||
|
* labeled alternative in `PostgreSQLParser.createforeigntablestmt`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
*/
|
||||||
|
exitCreatePartitionForeignTable?: (ctx: CreatePartitionForeignTableContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.importforeignschemastmt`.
|
* Enter a parse tree produced by `PostgreSQLParser.importforeignschemastmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -5071,15 +5109,17 @@ export class PostgreSQLParserListener implements ParseTreeListener {
|
|||||||
*/
|
*/
|
||||||
exitOpt_transaction_chain?: (ctx: Opt_transaction_chainContext) => void;
|
exitOpt_transaction_chain?: (ctx: Opt_transaction_chainContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.viewstmt`.
|
* Enter a parse tree produced by the `createView`
|
||||||
|
* labeled alternative in `PostgreSQLParser.viewstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterViewstmt?: (ctx: ViewstmtContext) => void;
|
enterCreateView?: (ctx: CreateViewContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.viewstmt`.
|
* Exit a parse tree produced by the `createView`
|
||||||
|
* labeled alternative in `PostgreSQLParser.viewstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitViewstmt?: (ctx: ViewstmtContext) => void;
|
exitCreateView?: (ctx: CreateViewContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.opt_check_option`.
|
* Enter a parse tree produced by `PostgreSQLParser.opt_check_option`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -5101,15 +5141,17 @@ export class PostgreSQLParserListener implements ParseTreeListener {
|
|||||||
*/
|
*/
|
||||||
exitLoadstmt?: (ctx: LoadstmtContext) => void;
|
exitLoadstmt?: (ctx: LoadstmtContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.createdbstmt`.
|
* Enter a parse tree produced by the `createDatabase`
|
||||||
|
* labeled alternative in `PostgreSQLParser.createdbstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterCreatedbstmt?: (ctx: CreatedbstmtContext) => void;
|
enterCreateDatabase?: (ctx: CreateDatabaseContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.createdbstmt`.
|
* Exit a parse tree produced by the `createDatabase`
|
||||||
|
* labeled alternative in `PostgreSQLParser.createdbstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitCreatedbstmt?: (ctx: CreatedbstmtContext) => void;
|
exitCreateDatabase?: (ctx: CreateDatabaseContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.createdb_opt_list`.
|
* Enter a parse tree produced by `PostgreSQLParser.createdb_opt_list`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -5631,15 +5673,17 @@ export class PostgreSQLParserListener implements ParseTreeListener {
|
|||||||
*/
|
*/
|
||||||
exitDeallocatestmt?: (ctx: DeallocatestmtContext) => void;
|
exitDeallocatestmt?: (ctx: DeallocatestmtContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.insertstmt`.
|
* Enter a parse tree produced by the `insertStatement`
|
||||||
|
* labeled alternative in `PostgreSQLParser.insertstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterInsertstmt?: (ctx: InsertstmtContext) => void;
|
enterInsertStatement?: (ctx: InsertStatementContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.insertstmt`.
|
* Exit a parse tree produced by the `insertStatement`
|
||||||
|
* labeled alternative in `PostgreSQLParser.insertstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitInsertstmt?: (ctx: InsertstmtContext) => void;
|
exitInsertStatement?: (ctx: InsertStatementContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.insert_target`.
|
* Enter a parse tree produced by `PostgreSQLParser.insert_target`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -5881,15 +5925,17 @@ export class PostgreSQLParserListener implements ParseTreeListener {
|
|||||||
*/
|
*/
|
||||||
exitOpt_hold?: (ctx: Opt_holdContext) => void;
|
exitOpt_hold?: (ctx: Opt_holdContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.selectstmt`.
|
* Enter a parse tree produced by the `selectStatement`
|
||||||
|
* labeled alternative in `PostgreSQLParser.selectstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterSelectstmt?: (ctx: SelectstmtContext) => void;
|
enterSelectStatement?: (ctx: SelectStatementContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.selectstmt`.
|
* Exit a parse tree produced by the `selectStatement`
|
||||||
|
* labeled alternative in `PostgreSQLParser.selectstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitSelectstmt?: (ctx: SelectstmtContext) => void;
|
exitSelectStatement?: (ctx: SelectStatementContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.select_with_parens`.
|
* Enter a parse tree produced by `PostgreSQLParser.select_with_parens`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -7961,65 +8007,77 @@ export class PostgreSQLParserListener implements ParseTreeListener {
|
|||||||
*/
|
*/
|
||||||
exitProcedure_name_list?: (ctx: Procedure_name_listContext) => void;
|
exitProcedure_name_list?: (ctx: Procedure_name_listContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.tablespace_name_create`.
|
* Enter a parse tree produced by the `tablespaceNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.tablespace_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterTablespace_name_create?: (ctx: Tablespace_name_createContext) => void;
|
enterTablespaceNameCreate?: (ctx: TablespaceNameCreateContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.tablespace_name_create`.
|
* Exit a parse tree produced by the `tablespaceNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.tablespace_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitTablespace_name_create?: (ctx: Tablespace_name_createContext) => void;
|
exitTablespaceNameCreate?: (ctx: TablespaceNameCreateContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.tablespace_name`.
|
* Enter a parse tree produced by the `tablespaceName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.tablespace_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterTablespace_name?: (ctx: Tablespace_nameContext) => void;
|
enterTablespaceName?: (ctx: TablespaceNameContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.tablespace_name`.
|
* Exit a parse tree produced by the `tablespaceName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.tablespace_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitTablespace_name?: (ctx: Tablespace_nameContext) => void;
|
exitTablespaceName?: (ctx: TablespaceNameContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.table_name_create`.
|
* Enter a parse tree produced by the `tableNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.table_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterTable_name_create?: (ctx: Table_name_createContext) => void;
|
enterTableNameCreate?: (ctx: TableNameCreateContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.table_name_create`.
|
* Exit a parse tree produced by the `tableNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.table_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitTable_name_create?: (ctx: Table_name_createContext) => void;
|
exitTableNameCreate?: (ctx: TableNameCreateContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.table_name`.
|
* Enter a parse tree produced by the `tableName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.table_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterTable_name?: (ctx: Table_nameContext) => void;
|
enterTableName?: (ctx: TableNameContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.table_name`.
|
* Exit a parse tree produced by the `tableName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.table_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitTable_name?: (ctx: Table_nameContext) => void;
|
exitTableName?: (ctx: TableNameContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.view_name_create`.
|
* Enter a parse tree produced by the `viewNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.view_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterView_name_create?: (ctx: View_name_createContext) => void;
|
enterViewNameCreate?: (ctx: ViewNameCreateContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.view_name_create`.
|
* Exit a parse tree produced by the `viewNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.view_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitView_name_create?: (ctx: View_name_createContext) => void;
|
exitViewNameCreate?: (ctx: ViewNameCreateContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.view_name`.
|
* Enter a parse tree produced by the `viewName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.view_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterView_name?: (ctx: View_nameContext) => void;
|
enterViewName?: (ctx: ViewNameContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.view_name`.
|
* Exit a parse tree produced by the `viewName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.view_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitView_name?: (ctx: View_nameContext) => void;
|
exitViewName?: (ctx: ViewNameContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.qualified_name`.
|
* Enter a parse tree produced by `PostgreSQLParser.qualified_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -8051,95 +8109,125 @@ export class PostgreSQLParserListener implements ParseTreeListener {
|
|||||||
*/
|
*/
|
||||||
exitName_list?: (ctx: Name_listContext) => void;
|
exitName_list?: (ctx: Name_listContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.database_name_create`.
|
* Enter a parse tree produced by the `databaseNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.database_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterDatabase_name_create?: (ctx: Database_name_createContext) => void;
|
enterDatabaseNameCreate?: (ctx: DatabaseNameCreateContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.database_name_create`.
|
* Exit a parse tree produced by the `databaseNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.database_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitDatabase_name_create?: (ctx: Database_name_createContext) => void;
|
exitDatabaseNameCreate?: (ctx: DatabaseNameCreateContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.database_name`.
|
* Enter a parse tree produced by the `databaseName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.database_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterDatabase_name?: (ctx: Database_nameContext) => void;
|
enterDatabaseName?: (ctx: DatabaseNameContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.database_name`.
|
* Exit a parse tree produced by the `databaseName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.database_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitDatabase_name?: (ctx: Database_nameContext) => void;
|
exitDatabaseName?: (ctx: DatabaseNameContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.schema_name`.
|
* Enter a parse tree produced by the `schemaName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.schema_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterSchema_name?: (ctx: Schema_nameContext) => void;
|
enterSchemaName?: (ctx: SchemaNameContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.schema_name`.
|
* Exit a parse tree produced by the `schemaName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.schema_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitSchema_name?: (ctx: Schema_nameContext) => void;
|
exitSchemaName?: (ctx: SchemaNameContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.routine_name_create`.
|
* Enter a parse tree produced by the `routineNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.routine_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterRoutine_name_create?: (ctx: Routine_name_createContext) => void;
|
enterRoutineNameCreate?: (ctx: RoutineNameCreateContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.routine_name_create`.
|
* Exit a parse tree produced by the `routineNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.routine_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitRoutine_name_create?: (ctx: Routine_name_createContext) => void;
|
exitRoutineNameCreate?: (ctx: RoutineNameCreateContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.routine_name`.
|
* Enter a parse tree produced by the `routineName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.routine_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterRoutine_name?: (ctx: Routine_nameContext) => void;
|
enterRoutineName?: (ctx: RoutineNameContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.routine_name`.
|
* Exit a parse tree produced by the `routineName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.routine_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitRoutine_name?: (ctx: Routine_nameContext) => void;
|
exitRoutineName?: (ctx: RoutineNameContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.procedure_name`.
|
* Enter a parse tree produced by the `procedureName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.procedure_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterProcedure_name?: (ctx: Procedure_nameContext) => void;
|
enterProcedureName?: (ctx: ProcedureNameContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.procedure_name`.
|
* Exit a parse tree produced by the `procedureName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.procedure_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitProcedure_name?: (ctx: Procedure_nameContext) => void;
|
exitProcedureName?: (ctx: ProcedureNameContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.procedure_name_create`.
|
* Enter a parse tree produced by the `procedureNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.procedure_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterProcedure_name_create?: (ctx: Procedure_name_createContext) => void;
|
enterProcedureNameCreate?: (ctx: ProcedureNameCreateContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.procedure_name_create`.
|
* Exit a parse tree produced by the `procedureNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.procedure_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitProcedure_name_create?: (ctx: Procedure_name_createContext) => void;
|
exitProcedureNameCreate?: (ctx: ProcedureNameCreateContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.column_name`.
|
* Enter a parse tree produced by the `columnName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.column_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterColumn_name?: (ctx: Column_nameContext) => void;
|
enterColumnName?: (ctx: ColumnNameContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.column_name`.
|
* Exit a parse tree produced by the `columnName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.column_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitColumn_name?: (ctx: Column_nameContext) => void;
|
exitColumnName?: (ctx: ColumnNameContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.column_name_create`.
|
* Enter a parse tree produced by the `columnNameMatch`
|
||||||
|
* labeled alternative in `PostgreSQLParser.column_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterColumn_name_create?: (ctx: Column_name_createContext) => void;
|
enterColumnNameMatch?: (ctx: ColumnNameMatchContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.column_name_create`.
|
* Exit a parse tree produced by the `columnNameMatch`
|
||||||
|
* labeled alternative in `PostgreSQLParser.column_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitColumn_name_create?: (ctx: Column_name_createContext) => void;
|
exitColumnNameMatch?: (ctx: ColumnNameMatchContext) => void;
|
||||||
|
/**
|
||||||
|
* Enter a parse tree produced by the `columnNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.column_name_create`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
*/
|
||||||
|
enterColumnNameCreate?: (ctx: ColumnNameCreateContext) => void;
|
||||||
|
/**
|
||||||
|
* Exit a parse tree produced by the `columnNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.column_name_create`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
*/
|
||||||
|
exitColumnNameCreate?: (ctx: ColumnNameCreateContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.name`.
|
* Enter a parse tree produced by `PostgreSQLParser.name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -8171,25 +8259,29 @@ export class PostgreSQLParserListener implements ParseTreeListener {
|
|||||||
*/
|
*/
|
||||||
exitFile_name?: (ctx: File_nameContext) => void;
|
exitFile_name?: (ctx: File_nameContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.function_name_create`.
|
* Enter a parse tree produced by the `functionNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.function_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterFunction_name_create?: (ctx: Function_name_createContext) => void;
|
enterFunctionNameCreate?: (ctx: FunctionNameCreateContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.function_name_create`.
|
* Exit a parse tree produced by the `functionNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.function_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitFunction_name_create?: (ctx: Function_name_createContext) => void;
|
exitFunctionNameCreate?: (ctx: FunctionNameCreateContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.function_name`.
|
* Enter a parse tree produced by the `functionName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.function_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterFunction_name?: (ctx: Function_nameContext) => void;
|
enterFunctionName?: (ctx: FunctionNameContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `PostgreSQLParser.function_name`.
|
* Exit a parse tree produced by the `functionName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.function_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitFunction_name?: (ctx: Function_nameContext) => void;
|
exitFunctionName?: (ctx: FunctionNameContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `PostgreSQLParser.usual_name`.
|
* Enter a parse tree produced by `PostgreSQLParser.usual_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
|
@ -3,6 +3,9 @@
|
|||||||
import { AbstractParseTreeVisitor } from "antlr4ng";
|
import { AbstractParseTreeVisitor } from "antlr4ng";
|
||||||
|
|
||||||
|
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
|
|
||||||
|
|
||||||
import { ProgramContext } from "./PostgreSQLParser.js";
|
import { ProgramContext } from "./PostgreSQLParser.js";
|
||||||
import { PlsqlrootContext } from "./PostgreSQLParser.js";
|
import { PlsqlrootContext } from "./PostgreSQLParser.js";
|
||||||
import { SingleStmtContext } from "./PostgreSQLParser.js";
|
import { SingleStmtContext } from "./PostgreSQLParser.js";
|
||||||
@ -27,7 +30,7 @@ import { CreategroupstmtContext } from "./PostgreSQLParser.js";
|
|||||||
import { AltergroupstmtContext } from "./PostgreSQLParser.js";
|
import { AltergroupstmtContext } from "./PostgreSQLParser.js";
|
||||||
import { Add_dropContext } from "./PostgreSQLParser.js";
|
import { Add_dropContext } from "./PostgreSQLParser.js";
|
||||||
import { CreateschemastmtContext } from "./PostgreSQLParser.js";
|
import { CreateschemastmtContext } from "./PostgreSQLParser.js";
|
||||||
import { Schema_name_createContext } from "./PostgreSQLParser.js";
|
import { SchemaNameCreateContext } from "./PostgreSQLParser.js";
|
||||||
import { OptschemaeltlistContext } from "./PostgreSQLParser.js";
|
import { OptschemaeltlistContext } from "./PostgreSQLParser.js";
|
||||||
import { Schema_stmtContext } from "./PostgreSQLParser.js";
|
import { Schema_stmtContext } from "./PostgreSQLParser.js";
|
||||||
import { VariablesetstmtContext } from "./PostgreSQLParser.js";
|
import { VariablesetstmtContext } from "./PostgreSQLParser.js";
|
||||||
@ -97,7 +100,7 @@ import { Copy_generic_opt_elemContext } from "./PostgreSQLParser.js";
|
|||||||
import { Copy_generic_opt_argContext } from "./PostgreSQLParser.js";
|
import { Copy_generic_opt_argContext } from "./PostgreSQLParser.js";
|
||||||
import { Copy_generic_opt_arg_listContext } from "./PostgreSQLParser.js";
|
import { Copy_generic_opt_arg_listContext } from "./PostgreSQLParser.js";
|
||||||
import { Copy_generic_opt_arg_list_itemContext } from "./PostgreSQLParser.js";
|
import { Copy_generic_opt_arg_list_itemContext } from "./PostgreSQLParser.js";
|
||||||
import { CreatestmtContext } from "./PostgreSQLParser.js";
|
import { ColumnCreateTableContext } from "./PostgreSQLParser.js";
|
||||||
import { OpttempContext } from "./PostgreSQLParser.js";
|
import { OpttempContext } from "./PostgreSQLParser.js";
|
||||||
import { Table_column_listContext } from "./PostgreSQLParser.js";
|
import { Table_column_listContext } from "./PostgreSQLParser.js";
|
||||||
import { OpttableelementlistContext } from "./PostgreSQLParser.js";
|
import { OpttableelementlistContext } from "./PostgreSQLParser.js";
|
||||||
@ -106,8 +109,7 @@ import { TableelementlistContext } from "./PostgreSQLParser.js";
|
|||||||
import { TypedtableelementlistContext } from "./PostgreSQLParser.js";
|
import { TypedtableelementlistContext } from "./PostgreSQLParser.js";
|
||||||
import { TableelementContext } from "./PostgreSQLParser.js";
|
import { TableelementContext } from "./PostgreSQLParser.js";
|
||||||
import { TypedtableelementContext } from "./PostgreSQLParser.js";
|
import { TypedtableelementContext } from "./PostgreSQLParser.js";
|
||||||
import { ColumnDefCluaseContext } from "./PostgreSQLParser.js";
|
import { Column_defContext } from "./PostgreSQLParser.js";
|
||||||
import { ColumnDefContext } from "./PostgreSQLParser.js";
|
|
||||||
import { CompressionCluaseContext } from "./PostgreSQLParser.js";
|
import { CompressionCluaseContext } from "./PostgreSQLParser.js";
|
||||||
import { StorageCluaseContext } from "./PostgreSQLParser.js";
|
import { StorageCluaseContext } from "./PostgreSQLParser.js";
|
||||||
import { ColumnOptionsContext } from "./PostgreSQLParser.js";
|
import { ColumnOptionsContext } from "./PostgreSQLParser.js";
|
||||||
@ -125,7 +127,9 @@ import { TableconstraintContext } from "./PostgreSQLParser.js";
|
|||||||
import { ConstraintelemContext } from "./PostgreSQLParser.js";
|
import { ConstraintelemContext } from "./PostgreSQLParser.js";
|
||||||
import { Opt_no_inheritContext } from "./PostgreSQLParser.js";
|
import { Opt_no_inheritContext } from "./PostgreSQLParser.js";
|
||||||
import { Opt_column_listContext } from "./PostgreSQLParser.js";
|
import { Opt_column_listContext } from "./PostgreSQLParser.js";
|
||||||
import { ColumnlistContext } from "./PostgreSQLParser.js";
|
import { Opt_column_list_createContext } from "./PostgreSQLParser.js";
|
||||||
|
import { Column_listContext } from "./PostgreSQLParser.js";
|
||||||
|
import { Column_list_createContext } from "./PostgreSQLParser.js";
|
||||||
import { Opt_c_includeContext } from "./PostgreSQLParser.js";
|
import { Opt_c_includeContext } from "./PostgreSQLParser.js";
|
||||||
import { Key_matchContext } from "./PostgreSQLParser.js";
|
import { Key_matchContext } from "./PostgreSQLParser.js";
|
||||||
import { ExclusionconstraintlistContext } from "./PostgreSQLParser.js";
|
import { ExclusionconstraintlistContext } from "./PostgreSQLParser.js";
|
||||||
@ -149,10 +153,10 @@ import { OptconstablespaceContext } from "./PostgreSQLParser.js";
|
|||||||
import { ExistingindexContext } from "./PostgreSQLParser.js";
|
import { ExistingindexContext } from "./PostgreSQLParser.js";
|
||||||
import { CreatestatsstmtContext } from "./PostgreSQLParser.js";
|
import { CreatestatsstmtContext } from "./PostgreSQLParser.js";
|
||||||
import { AlterstatsstmtContext } from "./PostgreSQLParser.js";
|
import { AlterstatsstmtContext } from "./PostgreSQLParser.js";
|
||||||
import { CreateasstmtContext } from "./PostgreSQLParser.js";
|
import { QueryCreateTableContext } from "./PostgreSQLParser.js";
|
||||||
import { Create_as_targetContext } from "./PostgreSQLParser.js";
|
import { Create_as_targetContext } from "./PostgreSQLParser.js";
|
||||||
import { Opt_with_dataContext } from "./PostgreSQLParser.js";
|
import { Opt_with_dataContext } from "./PostgreSQLParser.js";
|
||||||
import { CreatematviewstmtContext } from "./PostgreSQLParser.js";
|
import { CreateMaterializedViewContext } from "./PostgreSQLParser.js";
|
||||||
import { Create_mv_targetContext } from "./PostgreSQLParser.js";
|
import { Create_mv_targetContext } from "./PostgreSQLParser.js";
|
||||||
import { OptnologContext } from "./PostgreSQLParser.js";
|
import { OptnologContext } from "./PostgreSQLParser.js";
|
||||||
import { RefreshmatviewstmtContext } from "./PostgreSQLParser.js";
|
import { RefreshmatviewstmtContext } from "./PostgreSQLParser.js";
|
||||||
@ -199,7 +203,8 @@ import { Opt_typeContext } from "./PostgreSQLParser.js";
|
|||||||
import { Foreign_server_versionContext } from "./PostgreSQLParser.js";
|
import { Foreign_server_versionContext } from "./PostgreSQLParser.js";
|
||||||
import { Opt_foreign_server_versionContext } from "./PostgreSQLParser.js";
|
import { Opt_foreign_server_versionContext } from "./PostgreSQLParser.js";
|
||||||
import { AlterforeignserverstmtContext } from "./PostgreSQLParser.js";
|
import { AlterforeignserverstmtContext } from "./PostgreSQLParser.js";
|
||||||
import { CreateforeigntablestmtContext } from "./PostgreSQLParser.js";
|
import { CreateForeignTableContext } from "./PostgreSQLParser.js";
|
||||||
|
import { CreatePartitionForeignTableContext } from "./PostgreSQLParser.js";
|
||||||
import { ImportforeignschemastmtContext } from "./PostgreSQLParser.js";
|
import { ImportforeignschemastmtContext } from "./PostgreSQLParser.js";
|
||||||
import { Import_qualification_typeContext } from "./PostgreSQLParser.js";
|
import { Import_qualification_typeContext } from "./PostgreSQLParser.js";
|
||||||
import { Import_qualificationContext } from "./PostgreSQLParser.js";
|
import { Import_qualificationContext } from "./PostgreSQLParser.js";
|
||||||
@ -421,10 +426,10 @@ import { Transaction_mode_itemContext } from "./PostgreSQLParser.js";
|
|||||||
import { Transaction_mode_listContext } from "./PostgreSQLParser.js";
|
import { Transaction_mode_listContext } from "./PostgreSQLParser.js";
|
||||||
import { Transaction_mode_list_or_emptyContext } from "./PostgreSQLParser.js";
|
import { Transaction_mode_list_or_emptyContext } from "./PostgreSQLParser.js";
|
||||||
import { Opt_transaction_chainContext } from "./PostgreSQLParser.js";
|
import { Opt_transaction_chainContext } from "./PostgreSQLParser.js";
|
||||||
import { ViewstmtContext } from "./PostgreSQLParser.js";
|
import { CreateViewContext } from "./PostgreSQLParser.js";
|
||||||
import { Opt_check_optionContext } from "./PostgreSQLParser.js";
|
import { Opt_check_optionContext } from "./PostgreSQLParser.js";
|
||||||
import { LoadstmtContext } from "./PostgreSQLParser.js";
|
import { LoadstmtContext } from "./PostgreSQLParser.js";
|
||||||
import { CreatedbstmtContext } from "./PostgreSQLParser.js";
|
import { CreateDatabaseContext } from "./PostgreSQLParser.js";
|
||||||
import { Createdb_opt_listContext } from "./PostgreSQLParser.js";
|
import { Createdb_opt_listContext } from "./PostgreSQLParser.js";
|
||||||
import { Createdb_opt_itemsContext } from "./PostgreSQLParser.js";
|
import { Createdb_opt_itemsContext } from "./PostgreSQLParser.js";
|
||||||
import { Createdb_opt_itemContext } from "./PostgreSQLParser.js";
|
import { Createdb_opt_itemContext } from "./PostgreSQLParser.js";
|
||||||
@ -477,7 +482,7 @@ import { PreparablestmtContext } from "./PostgreSQLParser.js";
|
|||||||
import { ExecutestmtContext } from "./PostgreSQLParser.js";
|
import { ExecutestmtContext } from "./PostgreSQLParser.js";
|
||||||
import { Execute_param_clauseContext } from "./PostgreSQLParser.js";
|
import { Execute_param_clauseContext } from "./PostgreSQLParser.js";
|
||||||
import { DeallocatestmtContext } from "./PostgreSQLParser.js";
|
import { DeallocatestmtContext } from "./PostgreSQLParser.js";
|
||||||
import { InsertstmtContext } from "./PostgreSQLParser.js";
|
import { InsertStatementContext } from "./PostgreSQLParser.js";
|
||||||
import { Insert_targetContext } from "./PostgreSQLParser.js";
|
import { Insert_targetContext } from "./PostgreSQLParser.js";
|
||||||
import { Insert_restContext } from "./PostgreSQLParser.js";
|
import { Insert_restContext } from "./PostgreSQLParser.js";
|
||||||
import { Override_kindContext } from "./PostgreSQLParser.js";
|
import { Override_kindContext } from "./PostgreSQLParser.js";
|
||||||
@ -502,7 +507,7 @@ import { DeclarecursorstmtContext } from "./PostgreSQLParser.js";
|
|||||||
import { Cursor_nameContext } from "./PostgreSQLParser.js";
|
import { Cursor_nameContext } from "./PostgreSQLParser.js";
|
||||||
import { Cursor_optionsContext } from "./PostgreSQLParser.js";
|
import { Cursor_optionsContext } from "./PostgreSQLParser.js";
|
||||||
import { Opt_holdContext } from "./PostgreSQLParser.js";
|
import { Opt_holdContext } from "./PostgreSQLParser.js";
|
||||||
import { SelectstmtContext } from "./PostgreSQLParser.js";
|
import { SelectStatementContext } from "./PostgreSQLParser.js";
|
||||||
import { Select_with_parensContext } from "./PostgreSQLParser.js";
|
import { Select_with_parensContext } from "./PostgreSQLParser.js";
|
||||||
import { Select_no_parensContext } from "./PostgreSQLParser.js";
|
import { Select_no_parensContext } from "./PostgreSQLParser.js";
|
||||||
import { Select_clauseContext } from "./PostgreSQLParser.js";
|
import { Select_clauseContext } from "./PostgreSQLParser.js";
|
||||||
@ -708,29 +713,30 @@ import { Table_name_listContext } from "./PostgreSQLParser.js";
|
|||||||
import { Schema_name_listContext } from "./PostgreSQLParser.js";
|
import { Schema_name_listContext } from "./PostgreSQLParser.js";
|
||||||
import { Database_nameListContext } from "./PostgreSQLParser.js";
|
import { Database_nameListContext } from "./PostgreSQLParser.js";
|
||||||
import { Procedure_name_listContext } from "./PostgreSQLParser.js";
|
import { Procedure_name_listContext } from "./PostgreSQLParser.js";
|
||||||
import { Tablespace_name_createContext } from "./PostgreSQLParser.js";
|
import { TablespaceNameCreateContext } from "./PostgreSQLParser.js";
|
||||||
import { Tablespace_nameContext } from "./PostgreSQLParser.js";
|
import { TablespaceNameContext } from "./PostgreSQLParser.js";
|
||||||
import { Table_name_createContext } from "./PostgreSQLParser.js";
|
import { TableNameCreateContext } from "./PostgreSQLParser.js";
|
||||||
import { Table_nameContext } from "./PostgreSQLParser.js";
|
import { TableNameContext } from "./PostgreSQLParser.js";
|
||||||
import { View_name_createContext } from "./PostgreSQLParser.js";
|
import { ViewNameCreateContext } from "./PostgreSQLParser.js";
|
||||||
import { View_nameContext } from "./PostgreSQLParser.js";
|
import { ViewNameContext } from "./PostgreSQLParser.js";
|
||||||
import { Qualified_nameContext } from "./PostgreSQLParser.js";
|
import { Qualified_nameContext } from "./PostgreSQLParser.js";
|
||||||
import { Tablespace_name_listContext } from "./PostgreSQLParser.js";
|
import { Tablespace_name_listContext } from "./PostgreSQLParser.js";
|
||||||
import { Name_listContext } from "./PostgreSQLParser.js";
|
import { Name_listContext } from "./PostgreSQLParser.js";
|
||||||
import { Database_name_createContext } from "./PostgreSQLParser.js";
|
import { DatabaseNameCreateContext } from "./PostgreSQLParser.js";
|
||||||
import { Database_nameContext } from "./PostgreSQLParser.js";
|
import { DatabaseNameContext } from "./PostgreSQLParser.js";
|
||||||
import { Schema_nameContext } from "./PostgreSQLParser.js";
|
import { SchemaNameContext } from "./PostgreSQLParser.js";
|
||||||
import { Routine_name_createContext } from "./PostgreSQLParser.js";
|
import { RoutineNameCreateContext } from "./PostgreSQLParser.js";
|
||||||
import { Routine_nameContext } from "./PostgreSQLParser.js";
|
import { RoutineNameContext } from "./PostgreSQLParser.js";
|
||||||
import { Procedure_nameContext } from "./PostgreSQLParser.js";
|
import { ProcedureNameContext } from "./PostgreSQLParser.js";
|
||||||
import { Procedure_name_createContext } from "./PostgreSQLParser.js";
|
import { ProcedureNameCreateContext } from "./PostgreSQLParser.js";
|
||||||
import { Column_nameContext } from "./PostgreSQLParser.js";
|
import { ColumnNameContext } from "./PostgreSQLParser.js";
|
||||||
import { Column_name_createContext } from "./PostgreSQLParser.js";
|
import { ColumnNameMatchContext } from "./PostgreSQLParser.js";
|
||||||
|
import { ColumnNameCreateContext } from "./PostgreSQLParser.js";
|
||||||
import { NameContext } from "./PostgreSQLParser.js";
|
import { NameContext } from "./PostgreSQLParser.js";
|
||||||
import { Attr_nameContext } from "./PostgreSQLParser.js";
|
import { Attr_nameContext } from "./PostgreSQLParser.js";
|
||||||
import { File_nameContext } from "./PostgreSQLParser.js";
|
import { File_nameContext } from "./PostgreSQLParser.js";
|
||||||
import { Function_name_createContext } from "./PostgreSQLParser.js";
|
import { FunctionNameCreateContext } from "./PostgreSQLParser.js";
|
||||||
import { Function_nameContext } from "./PostgreSQLParser.js";
|
import { FunctionNameContext } from "./PostgreSQLParser.js";
|
||||||
import { Usual_nameContext } from "./PostgreSQLParser.js";
|
import { Usual_nameContext } from "./PostgreSQLParser.js";
|
||||||
import { AexprconstContext } from "./PostgreSQLParser.js";
|
import { AexprconstContext } from "./PostgreSQLParser.js";
|
||||||
import { XconstContext } from "./PostgreSQLParser.js";
|
import { XconstContext } from "./PostgreSQLParser.js";
|
||||||
@ -1038,11 +1044,12 @@ export class PostgreSQLParserVisitor<Result> extends AbstractParseTreeVisitor<Re
|
|||||||
*/
|
*/
|
||||||
visitCreateschemastmt?: (ctx: CreateschemastmtContext) => Result;
|
visitCreateschemastmt?: (ctx: CreateschemastmtContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.schema_name_create`.
|
* Visit a parse tree produced by the `schemaNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.schema_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitSchema_name_create?: (ctx: Schema_name_createContext) => Result;
|
visitSchemaNameCreate?: (ctx: SchemaNameCreateContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.optschemaeltlist`.
|
* Visit a parse tree produced by `PostgreSQLParser.optschemaeltlist`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -1458,11 +1465,12 @@ export class PostgreSQLParserVisitor<Result> extends AbstractParseTreeVisitor<Re
|
|||||||
*/
|
*/
|
||||||
visitCopy_generic_opt_arg_list_item?: (ctx: Copy_generic_opt_arg_list_itemContext) => Result;
|
visitCopy_generic_opt_arg_list_item?: (ctx: Copy_generic_opt_arg_list_itemContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.createstmt`.
|
* Visit a parse tree produced by the `columnCreateTable`
|
||||||
|
* labeled alternative in `PostgreSQLParser.createstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitCreatestmt?: (ctx: CreatestmtContext) => Result;
|
visitColumnCreateTable?: (ctx: ColumnCreateTableContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.opttemp`.
|
* Visit a parse tree produced by `PostgreSQLParser.opttemp`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -1512,17 +1520,11 @@ export class PostgreSQLParserVisitor<Result> extends AbstractParseTreeVisitor<Re
|
|||||||
*/
|
*/
|
||||||
visitTypedtableelement?: (ctx: TypedtableelementContext) => Result;
|
visitTypedtableelement?: (ctx: TypedtableelementContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.columnDefCluase`.
|
* Visit a parse tree produced by `PostgreSQLParser.column_def`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitColumnDefCluase?: (ctx: ColumnDefCluaseContext) => Result;
|
visitColumn_def?: (ctx: Column_defContext) => Result;
|
||||||
/**
|
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.columnDef`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
* @return the visitor result
|
|
||||||
*/
|
|
||||||
visitColumnDef?: (ctx: ColumnDefContext) => Result;
|
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.compressionCluase`.
|
* Visit a parse tree produced by `PostgreSQLParser.compressionCluase`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -1626,11 +1628,23 @@ export class PostgreSQLParserVisitor<Result> extends AbstractParseTreeVisitor<Re
|
|||||||
*/
|
*/
|
||||||
visitOpt_column_list?: (ctx: Opt_column_listContext) => Result;
|
visitOpt_column_list?: (ctx: Opt_column_listContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.columnlist`.
|
* Visit a parse tree produced by `PostgreSQLParser.opt_column_list_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitColumnlist?: (ctx: ColumnlistContext) => Result;
|
visitOpt_column_list_create?: (ctx: Opt_column_list_createContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by `PostgreSQLParser.column_list`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitColumn_list?: (ctx: Column_listContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by `PostgreSQLParser.column_list_create`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitColumn_list_create?: (ctx: Column_list_createContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.opt_c_include`.
|
* Visit a parse tree produced by `PostgreSQLParser.opt_c_include`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -1770,11 +1784,12 @@ export class PostgreSQLParserVisitor<Result> extends AbstractParseTreeVisitor<Re
|
|||||||
*/
|
*/
|
||||||
visitAlterstatsstmt?: (ctx: AlterstatsstmtContext) => Result;
|
visitAlterstatsstmt?: (ctx: AlterstatsstmtContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.createasstmt`.
|
* Visit a parse tree produced by the `queryCreateTable`
|
||||||
|
* labeled alternative in `PostgreSQLParser.createasstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitCreateasstmt?: (ctx: CreateasstmtContext) => Result;
|
visitQueryCreateTable?: (ctx: QueryCreateTableContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.create_as_target`.
|
* Visit a parse tree produced by `PostgreSQLParser.create_as_target`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -1788,11 +1803,12 @@ export class PostgreSQLParserVisitor<Result> extends AbstractParseTreeVisitor<Re
|
|||||||
*/
|
*/
|
||||||
visitOpt_with_data?: (ctx: Opt_with_dataContext) => Result;
|
visitOpt_with_data?: (ctx: Opt_with_dataContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.creatematviewstmt`.
|
* Visit a parse tree produced by the `createMaterializedView`
|
||||||
|
* labeled alternative in `PostgreSQLParser.creatematviewstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitCreatematviewstmt?: (ctx: CreatematviewstmtContext) => Result;
|
visitCreateMaterializedView?: (ctx: CreateMaterializedViewContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.create_mv_target`.
|
* Visit a parse tree produced by `PostgreSQLParser.create_mv_target`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -2070,11 +2086,19 @@ export class PostgreSQLParserVisitor<Result> extends AbstractParseTreeVisitor<Re
|
|||||||
*/
|
*/
|
||||||
visitAlterforeignserverstmt?: (ctx: AlterforeignserverstmtContext) => Result;
|
visitAlterforeignserverstmt?: (ctx: AlterforeignserverstmtContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.createforeigntablestmt`.
|
* Visit a parse tree produced by the `createForeignTable`
|
||||||
|
* labeled alternative in `PostgreSQLParser.createforeigntablestmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitCreateforeigntablestmt?: (ctx: CreateforeigntablestmtContext) => Result;
|
visitCreateForeignTable?: (ctx: CreateForeignTableContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `createPartitionForeignTable`
|
||||||
|
* labeled alternative in `PostgreSQLParser.createforeigntablestmt`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitCreatePartitionForeignTable?: (ctx: CreatePartitionForeignTableContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.importforeignschemastmt`.
|
* Visit a parse tree produced by `PostgreSQLParser.importforeignschemastmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -3402,11 +3426,12 @@ export class PostgreSQLParserVisitor<Result> extends AbstractParseTreeVisitor<Re
|
|||||||
*/
|
*/
|
||||||
visitOpt_transaction_chain?: (ctx: Opt_transaction_chainContext) => Result;
|
visitOpt_transaction_chain?: (ctx: Opt_transaction_chainContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.viewstmt`.
|
* Visit a parse tree produced by the `createView`
|
||||||
|
* labeled alternative in `PostgreSQLParser.viewstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitViewstmt?: (ctx: ViewstmtContext) => Result;
|
visitCreateView?: (ctx: CreateViewContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.opt_check_option`.
|
* Visit a parse tree produced by `PostgreSQLParser.opt_check_option`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -3420,11 +3445,12 @@ export class PostgreSQLParserVisitor<Result> extends AbstractParseTreeVisitor<Re
|
|||||||
*/
|
*/
|
||||||
visitLoadstmt?: (ctx: LoadstmtContext) => Result;
|
visitLoadstmt?: (ctx: LoadstmtContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.createdbstmt`.
|
* Visit a parse tree produced by the `createDatabase`
|
||||||
|
* labeled alternative in `PostgreSQLParser.createdbstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitCreatedbstmt?: (ctx: CreatedbstmtContext) => Result;
|
visitCreateDatabase?: (ctx: CreateDatabaseContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.createdb_opt_list`.
|
* Visit a parse tree produced by `PostgreSQLParser.createdb_opt_list`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -3738,11 +3764,12 @@ export class PostgreSQLParserVisitor<Result> extends AbstractParseTreeVisitor<Re
|
|||||||
*/
|
*/
|
||||||
visitDeallocatestmt?: (ctx: DeallocatestmtContext) => Result;
|
visitDeallocatestmt?: (ctx: DeallocatestmtContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.insertstmt`.
|
* Visit a parse tree produced by the `insertStatement`
|
||||||
|
* labeled alternative in `PostgreSQLParser.insertstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitInsertstmt?: (ctx: InsertstmtContext) => Result;
|
visitInsertStatement?: (ctx: InsertStatementContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.insert_target`.
|
* Visit a parse tree produced by `PostgreSQLParser.insert_target`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -3888,11 +3915,12 @@ export class PostgreSQLParserVisitor<Result> extends AbstractParseTreeVisitor<Re
|
|||||||
*/
|
*/
|
||||||
visitOpt_hold?: (ctx: Opt_holdContext) => Result;
|
visitOpt_hold?: (ctx: Opt_holdContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.selectstmt`.
|
* Visit a parse tree produced by the `selectStatement`
|
||||||
|
* labeled alternative in `PostgreSQLParser.selectstmt`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitSelectstmt?: (ctx: SelectstmtContext) => Result;
|
visitSelectStatement?: (ctx: SelectStatementContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.select_with_parens`.
|
* Visit a parse tree produced by `PostgreSQLParser.select_with_parens`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -5134,41 +5162,47 @@ export class PostgreSQLParserVisitor<Result> extends AbstractParseTreeVisitor<Re
|
|||||||
*/
|
*/
|
||||||
visitProcedure_name_list?: (ctx: Procedure_name_listContext) => Result;
|
visitProcedure_name_list?: (ctx: Procedure_name_listContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.tablespace_name_create`.
|
* Visit a parse tree produced by the `tablespaceNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.tablespace_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitTablespace_name_create?: (ctx: Tablespace_name_createContext) => Result;
|
visitTablespaceNameCreate?: (ctx: TablespaceNameCreateContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.tablespace_name`.
|
* Visit a parse tree produced by the `tablespaceName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.tablespace_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitTablespace_name?: (ctx: Tablespace_nameContext) => Result;
|
visitTablespaceName?: (ctx: TablespaceNameContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.table_name_create`.
|
* Visit a parse tree produced by the `tableNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.table_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitTable_name_create?: (ctx: Table_name_createContext) => Result;
|
visitTableNameCreate?: (ctx: TableNameCreateContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.table_name`.
|
* Visit a parse tree produced by the `tableName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.table_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitTable_name?: (ctx: Table_nameContext) => Result;
|
visitTableName?: (ctx: TableNameContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.view_name_create`.
|
* Visit a parse tree produced by the `viewNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.view_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitView_name_create?: (ctx: View_name_createContext) => Result;
|
visitViewNameCreate?: (ctx: ViewNameCreateContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.view_name`.
|
* Visit a parse tree produced by the `viewName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.view_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitView_name?: (ctx: View_nameContext) => Result;
|
visitViewName?: (ctx: ViewNameContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.qualified_name`.
|
* Visit a parse tree produced by `PostgreSQLParser.qualified_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -5188,59 +5222,75 @@ export class PostgreSQLParserVisitor<Result> extends AbstractParseTreeVisitor<Re
|
|||||||
*/
|
*/
|
||||||
visitName_list?: (ctx: Name_listContext) => Result;
|
visitName_list?: (ctx: Name_listContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.database_name_create`.
|
* Visit a parse tree produced by the `databaseNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.database_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitDatabase_name_create?: (ctx: Database_name_createContext) => Result;
|
visitDatabaseNameCreate?: (ctx: DatabaseNameCreateContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.database_name`.
|
* Visit a parse tree produced by the `databaseName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.database_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitDatabase_name?: (ctx: Database_nameContext) => Result;
|
visitDatabaseName?: (ctx: DatabaseNameContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.schema_name`.
|
* Visit a parse tree produced by the `schemaName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.schema_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitSchema_name?: (ctx: Schema_nameContext) => Result;
|
visitSchemaName?: (ctx: SchemaNameContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.routine_name_create`.
|
* Visit a parse tree produced by the `routineNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.routine_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitRoutine_name_create?: (ctx: Routine_name_createContext) => Result;
|
visitRoutineNameCreate?: (ctx: RoutineNameCreateContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.routine_name`.
|
* Visit a parse tree produced by the `routineName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.routine_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitRoutine_name?: (ctx: Routine_nameContext) => Result;
|
visitRoutineName?: (ctx: RoutineNameContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.procedure_name`.
|
* Visit a parse tree produced by the `procedureName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.procedure_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitProcedure_name?: (ctx: Procedure_nameContext) => Result;
|
visitProcedureName?: (ctx: ProcedureNameContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.procedure_name_create`.
|
* Visit a parse tree produced by the `procedureNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.procedure_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitProcedure_name_create?: (ctx: Procedure_name_createContext) => Result;
|
visitProcedureNameCreate?: (ctx: ProcedureNameCreateContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.column_name`.
|
* Visit a parse tree produced by the `columnName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.column_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitColumn_name?: (ctx: Column_nameContext) => Result;
|
visitColumnName?: (ctx: ColumnNameContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.column_name_create`.
|
* Visit a parse tree produced by the `columnNameMatch`
|
||||||
|
* labeled alternative in `PostgreSQLParser.column_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitColumn_name_create?: (ctx: Column_name_createContext) => Result;
|
visitColumnNameMatch?: (ctx: ColumnNameMatchContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `columnNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.column_name_create`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitColumnNameCreate?: (ctx: ColumnNameCreateContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.name`.
|
* Visit a parse tree produced by `PostgreSQLParser.name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -5260,17 +5310,19 @@ export class PostgreSQLParserVisitor<Result> extends AbstractParseTreeVisitor<Re
|
|||||||
*/
|
*/
|
||||||
visitFile_name?: (ctx: File_nameContext) => Result;
|
visitFile_name?: (ctx: File_nameContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.function_name_create`.
|
* Visit a parse tree produced by the `functionNameCreate`
|
||||||
|
* labeled alternative in `PostgreSQLParser.function_name_create`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitFunction_name_create?: (ctx: Function_name_createContext) => Result;
|
visitFunctionNameCreate?: (ctx: FunctionNameCreateContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.function_name`.
|
* Visit a parse tree produced by the `functionName`
|
||||||
|
* labeled alternative in `PostgreSQLParser.function_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitFunction_name?: (ctx: Function_nameContext) => Result;
|
visitFunctionName?: (ctx: FunctionNameContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `PostgreSQLParser.usual_name`.
|
* Visit a parse tree produced by `PostgreSQLParser.usual_name`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
import { Parser, TokenStream } from "antlr4ng";
|
import { TokenStream } from "antlr4ng";
|
||||||
export default abstract class PlSqlBaseParser extends Parser {
|
import SQLParserBase from "../SQLParserBase";
|
||||||
|
export default abstract class PlSqlBaseParser extends SQLParserBase {
|
||||||
|
|
||||||
private _isVersion10: boolean = false;
|
private _isVersion10: boolean = false;
|
||||||
private _isVersion12: boolean = true;
|
private _isVersion12: boolean = true;
|
||||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -3,9 +3,102 @@
|
|||||||
import { AbstractParseTreeVisitor } from "antlr4ng";
|
import { AbstractParseTreeVisitor } from "antlr4ng";
|
||||||
|
|
||||||
|
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
|
|
||||||
|
|
||||||
import { ProgramContext } from "./SparkSqlParser.js";
|
import { ProgramContext } from "./SparkSqlParser.js";
|
||||||
import { SingleStatementContext } from "./SparkSqlParser.js";
|
import { SingleStatementContext } from "./SparkSqlParser.js";
|
||||||
import { StatementContext } from "./SparkSqlParser.js";
|
import { StatementDefaultContext } from "./SparkSqlParser.js";
|
||||||
|
import { DmlStatementContext } from "./SparkSqlParser.js";
|
||||||
|
import { UseNamespaceContext } from "./SparkSqlParser.js";
|
||||||
|
import { SetCatalogContext } from "./SparkSqlParser.js";
|
||||||
|
import { CreateNamespaceContext } from "./SparkSqlParser.js";
|
||||||
|
import { SetNamespacePropertiesContext } from "./SparkSqlParser.js";
|
||||||
|
import { SetNamespaceLocationContext } from "./SparkSqlParser.js";
|
||||||
|
import { DropNamespaceContext } from "./SparkSqlParser.js";
|
||||||
|
import { ShowNamespacesContext } from "./SparkSqlParser.js";
|
||||||
|
import { CreateTableContext } from "./SparkSqlParser.js";
|
||||||
|
import { CreateTableLikeContext } from "./SparkSqlParser.js";
|
||||||
|
import { ReplaceTableContext } from "./SparkSqlParser.js";
|
||||||
|
import { AnalyzeContext } from "./SparkSqlParser.js";
|
||||||
|
import { AnalyzeTablesContext } from "./SparkSqlParser.js";
|
||||||
|
import { AlterTableAddColumnContext } from "./SparkSqlParser.js";
|
||||||
|
import { AlterTableAddColumnsContext } from "./SparkSqlParser.js";
|
||||||
|
import { RenameTableColumnContext } from "./SparkSqlParser.js";
|
||||||
|
import { AlterTableDropColumnContext } from "./SparkSqlParser.js";
|
||||||
|
import { DropTableColumnsContext } from "./SparkSqlParser.js";
|
||||||
|
import { RenameTableContext } from "./SparkSqlParser.js";
|
||||||
|
import { SetTablePropertiesContext } from "./SparkSqlParser.js";
|
||||||
|
import { UnsetTablePropertiesContext } from "./SparkSqlParser.js";
|
||||||
|
import { AlterTableAlterColumnContext } from "./SparkSqlParser.js";
|
||||||
|
import { HiveChangeColumnContext } from "./SparkSqlParser.js";
|
||||||
|
import { HiveReplaceColumnsContext } from "./SparkSqlParser.js";
|
||||||
|
import { SetTableSerDeContext } from "./SparkSqlParser.js";
|
||||||
|
import { SetTableSerDePropertiesContext } from "./SparkSqlParser.js";
|
||||||
|
import { AddTablePartitionContext } from "./SparkSqlParser.js";
|
||||||
|
import { RenameTablePartitionContext } from "./SparkSqlParser.js";
|
||||||
|
import { DropTablePartitionsContext } from "./SparkSqlParser.js";
|
||||||
|
import { SetTableLocationContext } from "./SparkSqlParser.js";
|
||||||
|
import { RecoverPartitionsContext } from "./SparkSqlParser.js";
|
||||||
|
import { AlterMaterializedViewRewriteContext } from "./SparkSqlParser.js";
|
||||||
|
import { AlterMaterializedViewPropertiesContext } from "./SparkSqlParser.js";
|
||||||
|
import { DropTableContext } from "./SparkSqlParser.js";
|
||||||
|
import { DropViewContext } from "./SparkSqlParser.js";
|
||||||
|
import { DropMaterializedViewContext } from "./SparkSqlParser.js";
|
||||||
|
import { CreateViewContext } from "./SparkSqlParser.js";
|
||||||
|
import { CreateTempViewUsingContext } from "./SparkSqlParser.js";
|
||||||
|
import { AlterViewQueryContext } from "./SparkSqlParser.js";
|
||||||
|
import { CreateFunctionContext } from "./SparkSqlParser.js";
|
||||||
|
import { CreateMaterializedViewContext } from "./SparkSqlParser.js";
|
||||||
|
import { DropFunctionContext } from "./SparkSqlParser.js";
|
||||||
|
import { DeclareVariableContext } from "./SparkSqlParser.js";
|
||||||
|
import { DropVariableContext } from "./SparkSqlParser.js";
|
||||||
|
import { ExplainStatementContext } from "./SparkSqlParser.js";
|
||||||
|
import { ShowTablesContext } from "./SparkSqlParser.js";
|
||||||
|
import { ShowTableExtendedContext } from "./SparkSqlParser.js";
|
||||||
|
import { ShowTblPropertiesContext } from "./SparkSqlParser.js";
|
||||||
|
import { ShowColumnsContext } from "./SparkSqlParser.js";
|
||||||
|
import { ShowViewsContext } from "./SparkSqlParser.js";
|
||||||
|
import { ShowPartitionsContext } from "./SparkSqlParser.js";
|
||||||
|
import { ShowFunctionsContext } from "./SparkSqlParser.js";
|
||||||
|
import { ShowCreateTableContext } from "./SparkSqlParser.js";
|
||||||
|
import { ShowCurrentNamespaceContext } from "./SparkSqlParser.js";
|
||||||
|
import { ShowCatalogsContext } from "./SparkSqlParser.js";
|
||||||
|
import { ShowMaterializedViewsContext } from "./SparkSqlParser.js";
|
||||||
|
import { ShowCreateMaterializedViewContext } from "./SparkSqlParser.js";
|
||||||
|
import { DescribeFunctionContext } from "./SparkSqlParser.js";
|
||||||
|
import { DescribeNamespaceContext } from "./SparkSqlParser.js";
|
||||||
|
import { DescribeRelationContext } from "./SparkSqlParser.js";
|
||||||
|
import { DescribeQueryContext } from "./SparkSqlParser.js";
|
||||||
|
import { CommentNamespaceContext } from "./SparkSqlParser.js";
|
||||||
|
import { CommentTableContext } from "./SparkSqlParser.js";
|
||||||
|
import { RefreshTableContext } from "./SparkSqlParser.js";
|
||||||
|
import { RefreshFunctionContext } from "./SparkSqlParser.js";
|
||||||
|
import { RefreshResourceContext } from "./SparkSqlParser.js";
|
||||||
|
import { RefreshMaterializedViewContext } from "./SparkSqlParser.js";
|
||||||
|
import { CacheTableContext } from "./SparkSqlParser.js";
|
||||||
|
import { UnCacheTableContext } from "./SparkSqlParser.js";
|
||||||
|
import { ClearCacheContext } from "./SparkSqlParser.js";
|
||||||
|
import { LoadDataContext } from "./SparkSqlParser.js";
|
||||||
|
import { TruncateTableContext } from "./SparkSqlParser.js";
|
||||||
|
import { RepairTableContext } from "./SparkSqlParser.js";
|
||||||
|
import { ManageResourceContext } from "./SparkSqlParser.js";
|
||||||
|
import { FailNativeCommandContext } from "./SparkSqlParser.js";
|
||||||
|
import { SetTimeZoneIntervalContext } from "./SparkSqlParser.js";
|
||||||
|
import { SetTimeZoneContext } from "./SparkSqlParser.js";
|
||||||
|
import { SetTimeZoneAnyContext } from "./SparkSqlParser.js";
|
||||||
|
import { SetVariableAssignmentContext } from "./SparkSqlParser.js";
|
||||||
|
import { SetVariableMultiAssignmentContext } from "./SparkSqlParser.js";
|
||||||
|
import { SetConfigContext } from "./SparkSqlParser.js";
|
||||||
|
import { SetConfigAndValueContext } from "./SparkSqlParser.js";
|
||||||
|
import { SetConfigAnyKeyContext } from "./SparkSqlParser.js";
|
||||||
|
import { SetAnyContext } from "./SparkSqlParser.js";
|
||||||
|
import { ResetConfigContext } from "./SparkSqlParser.js";
|
||||||
|
import { ResetAnyContext } from "./SparkSqlParser.js";
|
||||||
|
import { CreateIndexContext } from "./SparkSqlParser.js";
|
||||||
|
import { DropIndexContext } from "./SparkSqlParser.js";
|
||||||
|
import { OptimizeTableContext } from "./SparkSqlParser.js";
|
||||||
|
import { UnsupportHiveCommandsContext } from "./SparkSqlParser.js";
|
||||||
import { TimezoneContext } from "./SparkSqlParser.js";
|
import { TimezoneContext } from "./SparkSqlParser.js";
|
||||||
import { ConfigKeyContext } from "./SparkSqlParser.js";
|
import { ConfigKeyContext } from "./SparkSqlParser.js";
|
||||||
import { ConfigValueContext } from "./SparkSqlParser.js";
|
import { ConfigValueContext } from "./SparkSqlParser.js";
|
||||||
@ -16,13 +109,13 @@ import { BucketSpecContext } from "./SparkSqlParser.js";
|
|||||||
import { SkewSpecContext } from "./SparkSqlParser.js";
|
import { SkewSpecContext } from "./SparkSqlParser.js";
|
||||||
import { LocationSpecContext } from "./SparkSqlParser.js";
|
import { LocationSpecContext } from "./SparkSqlParser.js";
|
||||||
import { CommentSpecContext } from "./SparkSqlParser.js";
|
import { CommentSpecContext } from "./SparkSqlParser.js";
|
||||||
import { QueryContext } from "./SparkSqlParser.js";
|
import { QueryStatementContext } from "./SparkSqlParser.js";
|
||||||
import { InsertIntoContext } from "./SparkSqlParser.js";
|
import { InsertIntoContext } from "./SparkSqlParser.js";
|
||||||
import { PartitionSpecLocationContext } from "./SparkSqlParser.js";
|
import { PartitionSpecLocationContext } from "./SparkSqlParser.js";
|
||||||
import { PartitionSpecContext } from "./SparkSqlParser.js";
|
import { PartitionSpecContext } from "./SparkSqlParser.js";
|
||||||
import { PartitionValContext } from "./SparkSqlParser.js";
|
import { PartitionValContext } from "./SparkSqlParser.js";
|
||||||
import { DbSchemaContext } from "./SparkSqlParser.js";
|
import { NamespaceContext } from "./SparkSqlParser.js";
|
||||||
import { DbSchemasContext } from "./SparkSqlParser.js";
|
import { NamespacesContext } from "./SparkSqlParser.js";
|
||||||
import { DescribeFuncNameContext } from "./SparkSqlParser.js";
|
import { DescribeFuncNameContext } from "./SparkSqlParser.js";
|
||||||
import { DescribeColNameContext } from "./SparkSqlParser.js";
|
import { DescribeColNameContext } from "./SparkSqlParser.js";
|
||||||
import { CtesContext } from "./SparkSqlParser.js";
|
import { CtesContext } from "./SparkSqlParser.js";
|
||||||
@ -41,9 +134,13 @@ import { CreateFileFormatContext } from "./SparkSqlParser.js";
|
|||||||
import { FileFormatContext } from "./SparkSqlParser.js";
|
import { FileFormatContext } from "./SparkSqlParser.js";
|
||||||
import { StorageHandlerContext } from "./SparkSqlParser.js";
|
import { StorageHandlerContext } from "./SparkSqlParser.js";
|
||||||
import { ResourceContext } from "./SparkSqlParser.js";
|
import { ResourceContext } from "./SparkSqlParser.js";
|
||||||
import { DmlStatementNoWithContext } from "./SparkSqlParser.js";
|
import { InsertFromQueryContext } from "./SparkSqlParser.js";
|
||||||
import { DbSchemaNameContext } from "./SparkSqlParser.js";
|
import { MultipleInsertContext } from "./SparkSqlParser.js";
|
||||||
import { DbSchemaNameCreateContext } from "./SparkSqlParser.js";
|
import { DeleteFromTableContext } from "./SparkSqlParser.js";
|
||||||
|
import { UpdateTableContext } from "./SparkSqlParser.js";
|
||||||
|
import { MergeIntoTableContext } from "./SparkSqlParser.js";
|
||||||
|
import { NamespaceNameContext } from "./SparkSqlParser.js";
|
||||||
|
import { NamespaceNameCreateContext } from "./SparkSqlParser.js";
|
||||||
import { TableNameCreateContext } from "./SparkSqlParser.js";
|
import { TableNameCreateContext } from "./SparkSqlParser.js";
|
||||||
import { TableNameContext } from "./SparkSqlParser.js";
|
import { TableNameContext } from "./SparkSqlParser.js";
|
||||||
import { ViewNameCreateContext } from "./SparkSqlParser.js";
|
import { ViewNameCreateContext } from "./SparkSqlParser.js";
|
||||||
@ -225,11 +322,642 @@ export class SparkSqlParserVisitor<Result> extends AbstractParseTreeVisitor<Resu
|
|||||||
*/
|
*/
|
||||||
visitSingleStatement?: (ctx: SingleStatementContext) => Result;
|
visitSingleStatement?: (ctx: SingleStatementContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `SparkSqlParser.statement`.
|
* Visit a parse tree produced by the `statementDefault`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitStatement?: (ctx: StatementContext) => Result;
|
visitStatementDefault?: (ctx: StatementDefaultContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `dmlStatement`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitDmlStatement?: (ctx: DmlStatementContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `useNamespace`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitUseNamespace?: (ctx: UseNamespaceContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `setCatalog`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitSetCatalog?: (ctx: SetCatalogContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `createNamespace`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitCreateNamespace?: (ctx: CreateNamespaceContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `setNamespaceProperties`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitSetNamespaceProperties?: (ctx: SetNamespacePropertiesContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `setNamespaceLocation`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitSetNamespaceLocation?: (ctx: SetNamespaceLocationContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `dropNamespace`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitDropNamespace?: (ctx: DropNamespaceContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `showNamespaces`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitShowNamespaces?: (ctx: ShowNamespacesContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `createTable`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitCreateTable?: (ctx: CreateTableContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `createTableLike`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitCreateTableLike?: (ctx: CreateTableLikeContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `replaceTable`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitReplaceTable?: (ctx: ReplaceTableContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `analyze`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitAnalyze?: (ctx: AnalyzeContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `analyzeTables`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitAnalyzeTables?: (ctx: AnalyzeTablesContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `alterTableAddColumn`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitAlterTableAddColumn?: (ctx: AlterTableAddColumnContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `alterTableAddColumns`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitAlterTableAddColumns?: (ctx: AlterTableAddColumnsContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `renameTableColumn`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitRenameTableColumn?: (ctx: RenameTableColumnContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `alterTableDropColumn`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitAlterTableDropColumn?: (ctx: AlterTableDropColumnContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `dropTableColumns`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitDropTableColumns?: (ctx: DropTableColumnsContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `renameTable`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitRenameTable?: (ctx: RenameTableContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `setTableProperties`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitSetTableProperties?: (ctx: SetTablePropertiesContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `unsetTableProperties`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitUnsetTableProperties?: (ctx: UnsetTablePropertiesContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `alterTableAlterColumn`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitAlterTableAlterColumn?: (ctx: AlterTableAlterColumnContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `hiveChangeColumn`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitHiveChangeColumn?: (ctx: HiveChangeColumnContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `hiveReplaceColumns`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitHiveReplaceColumns?: (ctx: HiveReplaceColumnsContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `setTableSerDe`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitSetTableSerDe?: (ctx: SetTableSerDeContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `setTableSerDeProperties`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitSetTableSerDeProperties?: (ctx: SetTableSerDePropertiesContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `addTablePartition`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitAddTablePartition?: (ctx: AddTablePartitionContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `renameTablePartition`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitRenameTablePartition?: (ctx: RenameTablePartitionContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `dropTablePartitions`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitDropTablePartitions?: (ctx: DropTablePartitionsContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `setTableLocation`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitSetTableLocation?: (ctx: SetTableLocationContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `recoverPartitions`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitRecoverPartitions?: (ctx: RecoverPartitionsContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `alterMaterializedViewRewrite`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitAlterMaterializedViewRewrite?: (ctx: AlterMaterializedViewRewriteContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `alterMaterializedViewProperties`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitAlterMaterializedViewProperties?: (ctx: AlterMaterializedViewPropertiesContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `dropTable`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitDropTable?: (ctx: DropTableContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `dropView`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitDropView?: (ctx: DropViewContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `dropMaterializedView`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitDropMaterializedView?: (ctx: DropMaterializedViewContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `createView`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitCreateView?: (ctx: CreateViewContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `createTempViewUsing`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitCreateTempViewUsing?: (ctx: CreateTempViewUsingContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `alterViewQuery`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitAlterViewQuery?: (ctx: AlterViewQueryContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `createFunction`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitCreateFunction?: (ctx: CreateFunctionContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `createMaterializedView`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitCreateMaterializedView?: (ctx: CreateMaterializedViewContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `dropFunction`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitDropFunction?: (ctx: DropFunctionContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `declareVariable`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitDeclareVariable?: (ctx: DeclareVariableContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `dropVariable`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitDropVariable?: (ctx: DropVariableContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `explainStatement`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitExplainStatement?: (ctx: ExplainStatementContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `showTables`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitShowTables?: (ctx: ShowTablesContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `showTableExtended`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitShowTableExtended?: (ctx: ShowTableExtendedContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `showTblProperties`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitShowTblProperties?: (ctx: ShowTblPropertiesContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `showColumns`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitShowColumns?: (ctx: ShowColumnsContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `showViews`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitShowViews?: (ctx: ShowViewsContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `showPartitions`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitShowPartitions?: (ctx: ShowPartitionsContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `showFunctions`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitShowFunctions?: (ctx: ShowFunctionsContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `showCreateTable`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitShowCreateTable?: (ctx: ShowCreateTableContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `showCurrentNamespace`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitShowCurrentNamespace?: (ctx: ShowCurrentNamespaceContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `showCatalogs`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitShowCatalogs?: (ctx: ShowCatalogsContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `showMaterializedViews`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitShowMaterializedViews?: (ctx: ShowMaterializedViewsContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `showCreateMaterializedView`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitShowCreateMaterializedView?: (ctx: ShowCreateMaterializedViewContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `describeFunction`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitDescribeFunction?: (ctx: DescribeFunctionContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `describeNamespace`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitDescribeNamespace?: (ctx: DescribeNamespaceContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `describeRelation`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitDescribeRelation?: (ctx: DescribeRelationContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `describeQuery`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitDescribeQuery?: (ctx: DescribeQueryContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `commentNamespace`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitCommentNamespace?: (ctx: CommentNamespaceContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `commentTable`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitCommentTable?: (ctx: CommentTableContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `refreshTable`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitRefreshTable?: (ctx: RefreshTableContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `refreshFunction`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitRefreshFunction?: (ctx: RefreshFunctionContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `refreshResource`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitRefreshResource?: (ctx: RefreshResourceContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `refreshMaterializedView`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitRefreshMaterializedView?: (ctx: RefreshMaterializedViewContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `cacheTable`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitCacheTable?: (ctx: CacheTableContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `unCacheTable`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitUnCacheTable?: (ctx: UnCacheTableContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `clearCache`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitClearCache?: (ctx: ClearCacheContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `loadData`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitLoadData?: (ctx: LoadDataContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `truncateTable`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitTruncateTable?: (ctx: TruncateTableContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `repairTable`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitRepairTable?: (ctx: RepairTableContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `manageResource`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitManageResource?: (ctx: ManageResourceContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `failNativeCommand`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitFailNativeCommand?: (ctx: FailNativeCommandContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `setTimeZoneInterval`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitSetTimeZoneInterval?: (ctx: SetTimeZoneIntervalContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `setTimeZone`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitSetTimeZone?: (ctx: SetTimeZoneContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `setTimeZoneAny`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitSetTimeZoneAny?: (ctx: SetTimeZoneAnyContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `setVariableAssignment`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitSetVariableAssignment?: (ctx: SetVariableAssignmentContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `setVariableMultiAssignment`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitSetVariableMultiAssignment?: (ctx: SetVariableMultiAssignmentContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `setConfig`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitSetConfig?: (ctx: SetConfigContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `setConfigAndValue`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitSetConfigAndValue?: (ctx: SetConfigAndValueContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `setConfigAnyKey`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitSetConfigAnyKey?: (ctx: SetConfigAnyKeyContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `setAny`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitSetAny?: (ctx: SetAnyContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `resetConfig`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitResetConfig?: (ctx: ResetConfigContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `resetAny`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitResetAny?: (ctx: ResetAnyContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `createIndex`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitCreateIndex?: (ctx: CreateIndexContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `dropIndex`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitDropIndex?: (ctx: DropIndexContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `optimizeTable`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitOptimizeTable?: (ctx: OptimizeTableContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `unsupportHiveCommands`
|
||||||
|
* labeled alternative in `SparkSqlParser.statement`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitUnsupportHiveCommands?: (ctx: UnsupportHiveCommandsContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `SparkSqlParser.timezone`.
|
* Visit a parse tree produced by `SparkSqlParser.timezone`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -291,11 +1019,12 @@ export class SparkSqlParserVisitor<Result> extends AbstractParseTreeVisitor<Resu
|
|||||||
*/
|
*/
|
||||||
visitCommentSpec?: (ctx: CommentSpecContext) => Result;
|
visitCommentSpec?: (ctx: CommentSpecContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `SparkSqlParser.query`.
|
* Visit a parse tree produced by the `queryStatement`
|
||||||
|
* labeled alternative in `SparkSqlParser.query`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitQuery?: (ctx: QueryContext) => Result;
|
visitQueryStatement?: (ctx: QueryStatementContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `SparkSqlParser.insertInto`.
|
* Visit a parse tree produced by `SparkSqlParser.insertInto`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -321,17 +1050,17 @@ export class SparkSqlParserVisitor<Result> extends AbstractParseTreeVisitor<Resu
|
|||||||
*/
|
*/
|
||||||
visitPartitionVal?: (ctx: PartitionValContext) => Result;
|
visitPartitionVal?: (ctx: PartitionValContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `SparkSqlParser.dbSchema`.
|
* Visit a parse tree produced by `SparkSqlParser.namespace`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitDbSchema?: (ctx: DbSchemaContext) => Result;
|
visitNamespace?: (ctx: NamespaceContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `SparkSqlParser.dbSchemas`.
|
* Visit a parse tree produced by `SparkSqlParser.namespaces`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitDbSchemas?: (ctx: DbSchemasContext) => Result;
|
visitNamespaces?: (ctx: NamespacesContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `SparkSqlParser.describeFuncName`.
|
* Visit a parse tree produced by `SparkSqlParser.describeFuncName`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
@ -441,23 +1170,52 @@ export class SparkSqlParserVisitor<Result> extends AbstractParseTreeVisitor<Resu
|
|||||||
*/
|
*/
|
||||||
visitResource?: (ctx: ResourceContext) => Result;
|
visitResource?: (ctx: ResourceContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `SparkSqlParser.dmlStatementNoWith`.
|
* Visit a parse tree produced by the `insertFromQuery`
|
||||||
|
* labeled alternative in `SparkSqlParser.dmlStatementNoWith`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitDmlStatementNoWith?: (ctx: DmlStatementNoWithContext) => Result;
|
visitInsertFromQuery?: (ctx: InsertFromQueryContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `SparkSqlParser.dbSchemaName`.
|
* Visit a parse tree produced by the `multipleInsert`
|
||||||
|
* labeled alternative in `SparkSqlParser.dmlStatementNoWith`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitDbSchemaName?: (ctx: DbSchemaNameContext) => Result;
|
visitMultipleInsert?: (ctx: MultipleInsertContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `SparkSqlParser.dbSchemaNameCreate`.
|
* Visit a parse tree produced by the `deleteFromTable`
|
||||||
|
* labeled alternative in `SparkSqlParser.dmlStatementNoWith`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitDbSchemaNameCreate?: (ctx: DbSchemaNameCreateContext) => Result;
|
visitDeleteFromTable?: (ctx: DeleteFromTableContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `updateTable`
|
||||||
|
* labeled alternative in `SparkSqlParser.dmlStatementNoWith`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitUpdateTable?: (ctx: UpdateTableContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by the `mergeIntoTable`
|
||||||
|
* labeled alternative in `SparkSqlParser.dmlStatementNoWith`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitMergeIntoTable?: (ctx: MergeIntoTableContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by `SparkSqlParser.namespaceName`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitNamespaceName?: (ctx: NamespaceNameContext) => Result;
|
||||||
|
/**
|
||||||
|
* Visit a parse tree produced by `SparkSqlParser.namespaceNameCreate`.
|
||||||
|
* @param ctx the parse tree
|
||||||
|
* @return the visitor result
|
||||||
|
*/
|
||||||
|
visitNamespaceNameCreate?: (ctx: NamespaceNameCreateContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `SparkSqlParser.tableNameCreate`.
|
* Visit a parse tree produced by `SparkSqlParser.tableNameCreate`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
|
File diff suppressed because one or more lines are too long
@ -4,6 +4,9 @@ import * as antlr from "antlr4ng";
|
|||||||
import { Token } from "antlr4ng";
|
import { Token } from "antlr4ng";
|
||||||
|
|
||||||
|
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
|
|
||||||
|
|
||||||
export class TrinoSqlLexer extends antlr.Lexer {
|
export class TrinoSqlLexer extends antlr.Lexer {
|
||||||
public static readonly T__0 = 1;
|
public static readonly T__0 = 1;
|
||||||
public static readonly T__1 = 2;
|
public static readonly T__1 = 2;
|
||||||
|
@ -3,6 +3,9 @@
|
|||||||
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
import { ErrorNode, ParseTreeListener, ParserRuleContext, TerminalNode } from "antlr4ng";
|
||||||
|
|
||||||
|
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
|
|
||||||
|
|
||||||
import { ProgramContext } from "./TrinoSqlParser.js";
|
import { ProgramContext } from "./TrinoSqlParser.js";
|
||||||
import { StatementsContext } from "./TrinoSqlParser.js";
|
import { StatementsContext } from "./TrinoSqlParser.js";
|
||||||
import { StandaloneClauseContext } from "./TrinoSqlParser.js";
|
import { StandaloneClauseContext } from "./TrinoSqlParser.js";
|
||||||
@ -83,7 +86,7 @@ import { UpdateContext } from "./TrinoSqlParser.js";
|
|||||||
import { MergeContext } from "./TrinoSqlParser.js";
|
import { MergeContext } from "./TrinoSqlParser.js";
|
||||||
import { ShowTableCommentContext } from "./TrinoSqlParser.js";
|
import { ShowTableCommentContext } from "./TrinoSqlParser.js";
|
||||||
import { ShowColumnCommentContext } from "./TrinoSqlParser.js";
|
import { ShowColumnCommentContext } from "./TrinoSqlParser.js";
|
||||||
import { QueryContext } from "./TrinoSqlParser.js";
|
import { QueryStatementContext } from "./TrinoSqlParser.js";
|
||||||
import { WithContext } from "./TrinoSqlParser.js";
|
import { WithContext } from "./TrinoSqlParser.js";
|
||||||
import { TableElementContext } from "./TrinoSqlParser.js";
|
import { TableElementContext } from "./TrinoSqlParser.js";
|
||||||
import { ColumnDefinitionContext } from "./TrinoSqlParser.js";
|
import { ColumnDefinitionContext } from "./TrinoSqlParser.js";
|
||||||
@ -1233,15 +1236,17 @@ export class TrinoSqlListener implements ParseTreeListener {
|
|||||||
*/
|
*/
|
||||||
exitShowColumnComment?: (ctx: ShowColumnCommentContext) => void;
|
exitShowColumnComment?: (ctx: ShowColumnCommentContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `TrinoSqlParser.query`.
|
* Enter a parse tree produced by the `queryStatement`
|
||||||
|
* labeled alternative in `TrinoSqlParser.query`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
enterQuery?: (ctx: QueryContext) => void;
|
enterQueryStatement?: (ctx: QueryStatementContext) => void;
|
||||||
/**
|
/**
|
||||||
* Exit a parse tree produced by `TrinoSqlParser.query`.
|
* Exit a parse tree produced by the `queryStatement`
|
||||||
|
* labeled alternative in `TrinoSqlParser.query`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitQuery?: (ctx: QueryContext) => void;
|
exitQueryStatement?: (ctx: QueryStatementContext) => void;
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `TrinoSqlParser.with`.
|
* Enter a parse tree produced by `TrinoSqlParser.with`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
|
@ -11,7 +11,10 @@ import { TrinoSqlVisitor } from "./TrinoSqlVisitor.js";
|
|||||||
type int = number;
|
type int = number;
|
||||||
|
|
||||||
|
|
||||||
export class TrinoSqlParser extends antlr.Parser {
|
import SQLParserBase from '../SQLParserBase';
|
||||||
|
|
||||||
|
|
||||||
|
export class TrinoSqlParser extends SQLParserBase {
|
||||||
public static readonly T__0 = 1;
|
public static readonly T__0 = 1;
|
||||||
public static readonly T__1 = 2;
|
public static readonly T__1 = 2;
|
||||||
public static readonly T__2 = 3;
|
public static readonly T__2 = 3;
|
||||||
@ -3358,6 +3361,7 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
this.enterRule(localContext, 18, TrinoSqlParser.RULE_query);
|
this.enterRule(localContext, 18, TrinoSqlParser.RULE_query);
|
||||||
let _la: number;
|
let _la: number;
|
||||||
try {
|
try {
|
||||||
|
localContext = new QueryStatementContext(localContext);
|
||||||
this.enterOuterAlt(localContext, 1);
|
this.enterOuterAlt(localContext, 1);
|
||||||
{
|
{
|
||||||
this.state = 1039;
|
this.state = 1039;
|
||||||
@ -4668,8 +4672,8 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
this.match(TrinoSqlParser.T__0);
|
this.match(TrinoSqlParser.T__0);
|
||||||
this.state = 1258;
|
this.state = 1258;
|
||||||
this.errorHandler.sync(this);
|
this.errorHandler.sync(this);
|
||||||
_la = this.tokenStream.LA(1);
|
switch (this.interpreter.adaptivePredict(this.tokenStream, 157, this.context) ) {
|
||||||
if ((((_la) & ~0x1F) === 0 && ((1 << _la) & 2069757954) !== 0) || ((((_la - 33)) & ~0x1F) === 0 && ((1 << (_la - 33)) & 1476117503) !== 0) || ((((_la - 65)) & ~0x1F) === 0 && ((1 << (_la - 65)) & 2120217677) !== 0) || ((((_la - 97)) & ~0x1F) === 0 && ((1 << (_la - 97)) & 4252345787) !== 0) || ((((_la - 129)) & ~0x1F) === 0 && ((1 << (_la - 129)) & 1325399551) !== 0) || ((((_la - 161)) & ~0x1F) === 0 && ((1 << (_la - 161)) & 3153981439) !== 0) || ((((_la - 193)) & ~0x1F) === 0 && ((1 << (_la - 193)) & 4286054271) !== 0) || ((((_la - 225)) & ~0x1F) === 0 && ((1 << (_la - 225)) & 3237637037) !== 0) || ((((_la - 261)) & ~0x1F) === 0 && ((1 << (_la - 261)) & 2047) !== 0)) {
|
case 1:
|
||||||
{
|
{
|
||||||
this.state = 1250;
|
this.state = 1250;
|
||||||
this.groupingTerm();
|
this.groupingTerm();
|
||||||
@ -4690,8 +4694,8 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
_la = this.tokenStream.LA(1);
|
_la = this.tokenStream.LA(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
this.state = 1260;
|
this.state = 1260;
|
||||||
this.match(TrinoSqlParser.T__1);
|
this.match(TrinoSqlParser.T__1);
|
||||||
}
|
}
|
||||||
@ -4706,8 +4710,8 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
this.match(TrinoSqlParser.T__0);
|
this.match(TrinoSqlParser.T__0);
|
||||||
this.state = 1271;
|
this.state = 1271;
|
||||||
this.errorHandler.sync(this);
|
this.errorHandler.sync(this);
|
||||||
_la = this.tokenStream.LA(1);
|
switch (this.interpreter.adaptivePredict(this.tokenStream, 159, this.context) ) {
|
||||||
if ((((_la) & ~0x1F) === 0 && ((1 << _la) & 2069757954) !== 0) || ((((_la - 33)) & ~0x1F) === 0 && ((1 << (_la - 33)) & 1476117503) !== 0) || ((((_la - 65)) & ~0x1F) === 0 && ((1 << (_la - 65)) & 2120217677) !== 0) || ((((_la - 97)) & ~0x1F) === 0 && ((1 << (_la - 97)) & 4252345787) !== 0) || ((((_la - 129)) & ~0x1F) === 0 && ((1 << (_la - 129)) & 1325399551) !== 0) || ((((_la - 161)) & ~0x1F) === 0 && ((1 << (_la - 161)) & 3153981439) !== 0) || ((((_la - 193)) & ~0x1F) === 0 && ((1 << (_la - 193)) & 4286054271) !== 0) || ((((_la - 225)) & ~0x1F) === 0 && ((1 << (_la - 225)) & 3237637037) !== 0) || ((((_la - 261)) & ~0x1F) === 0 && ((1 << (_la - 261)) & 2047) !== 0)) {
|
case 1:
|
||||||
{
|
{
|
||||||
this.state = 1263;
|
this.state = 1263;
|
||||||
this.groupingTerm();
|
this.groupingTerm();
|
||||||
@ -4728,8 +4732,8 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
_la = this.tokenStream.LA(1);
|
_la = this.tokenStream.LA(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
this.state = 1273;
|
this.state = 1273;
|
||||||
this.match(TrinoSqlParser.T__1);
|
this.match(TrinoSqlParser.T__1);
|
||||||
}
|
}
|
||||||
@ -4797,8 +4801,8 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
this.match(TrinoSqlParser.T__0);
|
this.match(TrinoSqlParser.T__0);
|
||||||
this.state = 1298;
|
this.state = 1298;
|
||||||
this.errorHandler.sync(this);
|
this.errorHandler.sync(this);
|
||||||
_la = this.tokenStream.LA(1);
|
switch (this.interpreter.adaptivePredict(this.tokenStream, 163, this.context) ) {
|
||||||
if ((((_la) & ~0x1F) === 0 && ((1 << _la) & 2069757954) !== 0) || ((((_la - 33)) & ~0x1F) === 0 && ((1 << (_la - 33)) & 1476117503) !== 0) || ((((_la - 65)) & ~0x1F) === 0 && ((1 << (_la - 65)) & 2120217677) !== 0) || ((((_la - 97)) & ~0x1F) === 0 && ((1 << (_la - 97)) & 4252345787) !== 0) || ((((_la - 129)) & ~0x1F) === 0 && ((1 << (_la - 129)) & 1325399551) !== 0) || ((((_la - 161)) & ~0x1F) === 0 && ((1 << (_la - 161)) & 3153981439) !== 0) || ((((_la - 193)) & ~0x1F) === 0 && ((1 << (_la - 193)) & 4286054271) !== 0) || ((((_la - 225)) & ~0x1F) === 0 && ((1 << (_la - 225)) & 3237637037) !== 0) || ((((_la - 261)) & ~0x1F) === 0 && ((1 << (_la - 261)) & 2047) !== 0)) {
|
case 1:
|
||||||
{
|
{
|
||||||
this.state = 1290;
|
this.state = 1290;
|
||||||
this.groupingTerm();
|
this.groupingTerm();
|
||||||
@ -4819,8 +4823,8 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
_la = this.tokenStream.LA(1);
|
_la = this.tokenStream.LA(1);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
this.state = 1300;
|
this.state = 1300;
|
||||||
this.match(TrinoSqlParser.T__1);
|
this.match(TrinoSqlParser.T__1);
|
||||||
}
|
}
|
||||||
@ -10958,10 +10962,25 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
let localContext = new ColumnNameContext(this.context, this.state);
|
let localContext = new ColumnNameContext(this.context, this.state);
|
||||||
this.enterRule(localContext, 200, TrinoSqlParser.RULE_columnName);
|
this.enterRule(localContext, 200, TrinoSqlParser.RULE_columnName);
|
||||||
try {
|
try {
|
||||||
this.enterOuterAlt(localContext, 1);
|
this.state = 2551;
|
||||||
{
|
this.errorHandler.sync(this);
|
||||||
this.state = 2549;
|
switch (this.interpreter.adaptivePredict(this.tokenStream, 330, this.context) ) {
|
||||||
this.qualifiedName();
|
case 1:
|
||||||
|
this.enterOuterAlt(localContext, 1);
|
||||||
|
{
|
||||||
|
this.state = 2549;
|
||||||
|
this.qualifiedName();
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 2:
|
||||||
|
this.enterOuterAlt(localContext, 2);
|
||||||
|
{
|
||||||
|
this.state = 2550;
|
||||||
|
if (!(this.shouldMatchEmpty())) {
|
||||||
|
throw this.createFailedPredicateException("this.shouldMatchEmpty()");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
catch (re) {
|
catch (re) {
|
||||||
@ -10984,7 +11003,7 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
try {
|
try {
|
||||||
this.enterOuterAlt(localContext, 1);
|
this.enterOuterAlt(localContext, 1);
|
||||||
{
|
{
|
||||||
this.state = 2551;
|
this.state = 2553;
|
||||||
this.identifier();
|
this.identifier();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -11009,25 +11028,25 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
let alternative: number;
|
let alternative: number;
|
||||||
this.enterOuterAlt(localContext, 1);
|
this.enterOuterAlt(localContext, 1);
|
||||||
{
|
{
|
||||||
this.state = 2553;
|
this.state = 2555;
|
||||||
this.identifier();
|
this.identifier();
|
||||||
this.state = 2558;
|
this.state = 2560;
|
||||||
this.errorHandler.sync(this);
|
this.errorHandler.sync(this);
|
||||||
alternative = this.interpreter.adaptivePredict(this.tokenStream, 330, this.context);
|
alternative = this.interpreter.adaptivePredict(this.tokenStream, 331, this.context);
|
||||||
while (alternative !== 2 && alternative !== antlr.ATN.INVALID_ALT_NUMBER) {
|
while (alternative !== 2 && alternative !== antlr.ATN.INVALID_ALT_NUMBER) {
|
||||||
if (alternative === 1) {
|
if (alternative === 1) {
|
||||||
{
|
{
|
||||||
{
|
{
|
||||||
this.state = 2554;
|
this.state = 2556;
|
||||||
this.match(TrinoSqlParser.T__3);
|
this.match(TrinoSqlParser.T__3);
|
||||||
this.state = 2555;
|
this.state = 2557;
|
||||||
this.identifier();
|
this.identifier();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
this.state = 2560;
|
this.state = 2562;
|
||||||
this.errorHandler.sync(this);
|
this.errorHandler.sync(this);
|
||||||
alternative = this.interpreter.adaptivePredict(this.tokenStream, 330, this.context);
|
alternative = this.interpreter.adaptivePredict(this.tokenStream, 331, this.context);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -11049,7 +11068,7 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
let localContext = new GrantorContext(this.context, this.state);
|
let localContext = new GrantorContext(this.context, this.state);
|
||||||
this.enterRule(localContext, 206, TrinoSqlParser.RULE_grantor);
|
this.enterRule(localContext, 206, TrinoSqlParser.RULE_grantor);
|
||||||
try {
|
try {
|
||||||
this.state = 2564;
|
this.state = 2566;
|
||||||
this.errorHandler.sync(this);
|
this.errorHandler.sync(this);
|
||||||
switch (this.tokenStream.LA(1)) {
|
switch (this.tokenStream.LA(1)) {
|
||||||
case TrinoSqlParser.KW_ADD:
|
case TrinoSqlParser.KW_ADD:
|
||||||
@ -11218,7 +11237,7 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
localContext = new SpecifiedPrincipalContext(localContext);
|
localContext = new SpecifiedPrincipalContext(localContext);
|
||||||
this.enterOuterAlt(localContext, 1);
|
this.enterOuterAlt(localContext, 1);
|
||||||
{
|
{
|
||||||
this.state = 2561;
|
this.state = 2563;
|
||||||
this.principal();
|
this.principal();
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -11226,7 +11245,7 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
localContext = new CurrentUserGrantorContext(localContext);
|
localContext = new CurrentUserGrantorContext(localContext);
|
||||||
this.enterOuterAlt(localContext, 2);
|
this.enterOuterAlt(localContext, 2);
|
||||||
{
|
{
|
||||||
this.state = 2562;
|
this.state = 2564;
|
||||||
this.match(TrinoSqlParser.KW_CURRENT_USER);
|
this.match(TrinoSqlParser.KW_CURRENT_USER);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -11234,7 +11253,7 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
localContext = new CurrentRoleGrantorContext(localContext);
|
localContext = new CurrentRoleGrantorContext(localContext);
|
||||||
this.enterOuterAlt(localContext, 3);
|
this.enterOuterAlt(localContext, 3);
|
||||||
{
|
{
|
||||||
this.state = 2563;
|
this.state = 2565;
|
||||||
this.match(TrinoSqlParser.KW_CURRENT_ROLE);
|
this.match(TrinoSqlParser.KW_CURRENT_ROLE);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -11260,14 +11279,14 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
let localContext = new PrincipalContext(this.context, this.state);
|
let localContext = new PrincipalContext(this.context, this.state);
|
||||||
this.enterRule(localContext, 208, TrinoSqlParser.RULE_principal);
|
this.enterRule(localContext, 208, TrinoSqlParser.RULE_principal);
|
||||||
try {
|
try {
|
||||||
this.state = 2571;
|
this.state = 2573;
|
||||||
this.errorHandler.sync(this);
|
this.errorHandler.sync(this);
|
||||||
switch (this.interpreter.adaptivePredict(this.tokenStream, 332, this.context) ) {
|
switch (this.interpreter.adaptivePredict(this.tokenStream, 333, this.context) ) {
|
||||||
case 1:
|
case 1:
|
||||||
localContext = new UnspecifiedPrincipalContext(localContext);
|
localContext = new UnspecifiedPrincipalContext(localContext);
|
||||||
this.enterOuterAlt(localContext, 1);
|
this.enterOuterAlt(localContext, 1);
|
||||||
{
|
{
|
||||||
this.state = 2566;
|
this.state = 2568;
|
||||||
this.identifier();
|
this.identifier();
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -11275,9 +11294,9 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
localContext = new UserPrincipalContext(localContext);
|
localContext = new UserPrincipalContext(localContext);
|
||||||
this.enterOuterAlt(localContext, 2);
|
this.enterOuterAlt(localContext, 2);
|
||||||
{
|
{
|
||||||
this.state = 2567;
|
this.state = 2569;
|
||||||
this.match(TrinoSqlParser.KW_USER);
|
this.match(TrinoSqlParser.KW_USER);
|
||||||
this.state = 2568;
|
this.state = 2570;
|
||||||
this.identifier();
|
this.identifier();
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -11285,9 +11304,9 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
localContext = new RolePrincipalContext(localContext);
|
localContext = new RolePrincipalContext(localContext);
|
||||||
this.enterOuterAlt(localContext, 3);
|
this.enterOuterAlt(localContext, 3);
|
||||||
{
|
{
|
||||||
this.state = 2569;
|
this.state = 2571;
|
||||||
this.match(TrinoSqlParser.KW_ROLE);
|
this.match(TrinoSqlParser.KW_ROLE);
|
||||||
this.state = 2570;
|
this.state = 2572;
|
||||||
this.identifier();
|
this.identifier();
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -11314,21 +11333,21 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
try {
|
try {
|
||||||
this.enterOuterAlt(localContext, 1);
|
this.enterOuterAlt(localContext, 1);
|
||||||
{
|
{
|
||||||
this.state = 2573;
|
this.state = 2575;
|
||||||
this.identifier();
|
this.identifier();
|
||||||
this.state = 2578;
|
this.state = 2580;
|
||||||
this.errorHandler.sync(this);
|
this.errorHandler.sync(this);
|
||||||
_la = this.tokenStream.LA(1);
|
_la = this.tokenStream.LA(1);
|
||||||
while (_la === 3) {
|
while (_la === 3) {
|
||||||
{
|
{
|
||||||
{
|
{
|
||||||
this.state = 2574;
|
this.state = 2576;
|
||||||
this.match(TrinoSqlParser.T__2);
|
this.match(TrinoSqlParser.T__2);
|
||||||
this.state = 2575;
|
this.state = 2577;
|
||||||
this.identifier();
|
this.identifier();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
this.state = 2580;
|
this.state = 2582;
|
||||||
this.errorHandler.sync(this);
|
this.errorHandler.sync(this);
|
||||||
_la = this.tokenStream.LA(1);
|
_la = this.tokenStream.LA(1);
|
||||||
}
|
}
|
||||||
@ -11352,14 +11371,14 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
let localContext = new IdentifierContext(this.context, this.state);
|
let localContext = new IdentifierContext(this.context, this.state);
|
||||||
this.enterRule(localContext, 212, TrinoSqlParser.RULE_identifier);
|
this.enterRule(localContext, 212, TrinoSqlParser.RULE_identifier);
|
||||||
try {
|
try {
|
||||||
this.state = 2586;
|
this.state = 2588;
|
||||||
this.errorHandler.sync(this);
|
this.errorHandler.sync(this);
|
||||||
switch (this.tokenStream.LA(1)) {
|
switch (this.tokenStream.LA(1)) {
|
||||||
case TrinoSqlParser.IDENTIFIER:
|
case TrinoSqlParser.IDENTIFIER:
|
||||||
localContext = new UnquotedIdentifierContext(localContext);
|
localContext = new UnquotedIdentifierContext(localContext);
|
||||||
this.enterOuterAlt(localContext, 1);
|
this.enterOuterAlt(localContext, 1);
|
||||||
{
|
{
|
||||||
this.state = 2581;
|
this.state = 2583;
|
||||||
this.match(TrinoSqlParser.IDENTIFIER);
|
this.match(TrinoSqlParser.IDENTIFIER);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -11367,7 +11386,7 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
localContext = new QuotedIdentifierContext(localContext);
|
localContext = new QuotedIdentifierContext(localContext);
|
||||||
this.enterOuterAlt(localContext, 2);
|
this.enterOuterAlt(localContext, 2);
|
||||||
{
|
{
|
||||||
this.state = 2582;
|
this.state = 2584;
|
||||||
this.match(TrinoSqlParser.QUOTED_IDENTIFIER);
|
this.match(TrinoSqlParser.QUOTED_IDENTIFIER);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -11533,7 +11552,7 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
localContext = new UnquotedIdentifierContext(localContext);
|
localContext = new UnquotedIdentifierContext(localContext);
|
||||||
this.enterOuterAlt(localContext, 3);
|
this.enterOuterAlt(localContext, 3);
|
||||||
{
|
{
|
||||||
this.state = 2583;
|
this.state = 2585;
|
||||||
this.nonReserved();
|
this.nonReserved();
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -11541,7 +11560,7 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
localContext = new BackQuotedIdentifierContext(localContext);
|
localContext = new BackQuotedIdentifierContext(localContext);
|
||||||
this.enterOuterAlt(localContext, 4);
|
this.enterOuterAlt(localContext, 4);
|
||||||
{
|
{
|
||||||
this.state = 2584;
|
this.state = 2586;
|
||||||
this.match(TrinoSqlParser.BACKQUOTED_IDENTIFIER);
|
this.match(TrinoSqlParser.BACKQUOTED_IDENTIFIER);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -11549,7 +11568,7 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
localContext = new DigitIdentifierContext(localContext);
|
localContext = new DigitIdentifierContext(localContext);
|
||||||
this.enterOuterAlt(localContext, 5);
|
this.enterOuterAlt(localContext, 5);
|
||||||
{
|
{
|
||||||
this.state = 2585;
|
this.state = 2587;
|
||||||
this.match(TrinoSqlParser.DIGIT_IDENTIFIER);
|
this.match(TrinoSqlParser.DIGIT_IDENTIFIER);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -11576,24 +11595,24 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
this.enterRule(localContext, 214, TrinoSqlParser.RULE_number);
|
this.enterRule(localContext, 214, TrinoSqlParser.RULE_number);
|
||||||
let _la: number;
|
let _la: number;
|
||||||
try {
|
try {
|
||||||
this.state = 2600;
|
this.state = 2602;
|
||||||
this.errorHandler.sync(this);
|
this.errorHandler.sync(this);
|
||||||
switch (this.interpreter.adaptivePredict(this.tokenStream, 338, this.context) ) {
|
switch (this.interpreter.adaptivePredict(this.tokenStream, 339, this.context) ) {
|
||||||
case 1:
|
case 1:
|
||||||
localContext = new DecimalLiteralContext(localContext);
|
localContext = new DecimalLiteralContext(localContext);
|
||||||
this.enterOuterAlt(localContext, 1);
|
this.enterOuterAlt(localContext, 1);
|
||||||
{
|
{
|
||||||
this.state = 2589;
|
this.state = 2591;
|
||||||
this.errorHandler.sync(this);
|
this.errorHandler.sync(this);
|
||||||
_la = this.tokenStream.LA(1);
|
_la = this.tokenStream.LA(1);
|
||||||
if (_la === 256) {
|
if (_la === 256) {
|
||||||
{
|
{
|
||||||
this.state = 2588;
|
this.state = 2590;
|
||||||
this.match(TrinoSqlParser.MINUS);
|
this.match(TrinoSqlParser.MINUS);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
this.state = 2591;
|
this.state = 2593;
|
||||||
this.match(TrinoSqlParser.DECIMAL_VALUE);
|
this.match(TrinoSqlParser.DECIMAL_VALUE);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -11601,17 +11620,17 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
localContext = new DoubleLiteralContext(localContext);
|
localContext = new DoubleLiteralContext(localContext);
|
||||||
this.enterOuterAlt(localContext, 2);
|
this.enterOuterAlt(localContext, 2);
|
||||||
{
|
{
|
||||||
this.state = 2593;
|
this.state = 2595;
|
||||||
this.errorHandler.sync(this);
|
this.errorHandler.sync(this);
|
||||||
_la = this.tokenStream.LA(1);
|
_la = this.tokenStream.LA(1);
|
||||||
if (_la === 256) {
|
if (_la === 256) {
|
||||||
{
|
{
|
||||||
this.state = 2592;
|
this.state = 2594;
|
||||||
this.match(TrinoSqlParser.MINUS);
|
this.match(TrinoSqlParser.MINUS);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
this.state = 2595;
|
this.state = 2597;
|
||||||
this.match(TrinoSqlParser.DOUBLE_VALUE);
|
this.match(TrinoSqlParser.DOUBLE_VALUE);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -11619,17 +11638,17 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
localContext = new IntegerLiteralContext(localContext);
|
localContext = new IntegerLiteralContext(localContext);
|
||||||
this.enterOuterAlt(localContext, 3);
|
this.enterOuterAlt(localContext, 3);
|
||||||
{
|
{
|
||||||
this.state = 2597;
|
this.state = 2599;
|
||||||
this.errorHandler.sync(this);
|
this.errorHandler.sync(this);
|
||||||
_la = this.tokenStream.LA(1);
|
_la = this.tokenStream.LA(1);
|
||||||
if (_la === 256) {
|
if (_la === 256) {
|
||||||
{
|
{
|
||||||
this.state = 2596;
|
this.state = 2598;
|
||||||
this.match(TrinoSqlParser.MINUS);
|
this.match(TrinoSqlParser.MINUS);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
this.state = 2599;
|
this.state = 2601;
|
||||||
this.match(TrinoSqlParser.INTEGER_VALUE);
|
this.match(TrinoSqlParser.INTEGER_VALUE);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -11656,7 +11675,7 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
try {
|
try {
|
||||||
this.enterOuterAlt(localContext, 1);
|
this.enterOuterAlt(localContext, 1);
|
||||||
{
|
{
|
||||||
this.state = 2602;
|
this.state = 2604;
|
||||||
_la = this.tokenStream.LA(1);
|
_la = this.tokenStream.LA(1);
|
||||||
if(!(((((_la - 17)) & ~0x1F) === 0 && ((1 << (_la - 17)) & 1140014511) !== 0) || ((((_la - 56)) & ~0x1F) === 0 && ((1 << (_la - 56)) & 3192429231) !== 0) || ((((_la - 90)) & ~0x1F) === 0 && ((1 << (_la - 90)) & 3134381375) !== 0) || ((((_la - 123)) & ~0x1F) === 0 && ((1 << (_la - 123)) & 3162472435) !== 0) || ((((_la - 155)) & ~0x1F) === 0 && ((1 << (_la - 155)) & 4286316499) !== 0) || ((((_la - 188)) & ~0x1F) === 0 && ((1 << (_la - 188)) & 4009750519) !== 0) || ((((_la - 220)) & ~0x1F) === 0 && ((1 << (_la - 220)) & 525170103) !== 0))) {
|
if(!(((((_la - 17)) & ~0x1F) === 0 && ((1 << (_la - 17)) & 1140014511) !== 0) || ((((_la - 56)) & ~0x1F) === 0 && ((1 << (_la - 56)) & 3192429231) !== 0) || ((((_la - 90)) & ~0x1F) === 0 && ((1 << (_la - 90)) & 3134381375) !== 0) || ((((_la - 123)) & ~0x1F) === 0 && ((1 << (_la - 123)) & 3162472435) !== 0) || ((((_la - 155)) & ~0x1F) === 0 && ((1 << (_la - 155)) & 4286316499) !== 0) || ((((_la - 188)) & ~0x1F) === 0 && ((1 << (_la - 188)) & 4009750519) !== 0) || ((((_la - 220)) & ~0x1F) === 0 && ((1 << (_la - 220)) & 525170103) !== 0))) {
|
||||||
this.errorHandler.recoverInline(this);
|
this.errorHandler.recoverInline(this);
|
||||||
@ -11698,6 +11717,8 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
return this.type_sempred(localContext as TypeContext, predIndex);
|
return this.type_sempred(localContext as TypeContext, predIndex);
|
||||||
case 76:
|
case 76:
|
||||||
return this.rowPattern_sempred(localContext as RowPatternContext, predIndex);
|
return this.rowPattern_sempred(localContext as RowPatternContext, predIndex);
|
||||||
|
case 100:
|
||||||
|
return this.columnName_sempred(localContext as ColumnNameContext, predIndex);
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -11764,9 +11785,16 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
private columnName_sempred(localContext: ColumnNameContext | null, predIndex: number): boolean {
|
||||||
|
switch (predIndex) {
|
||||||
|
case 14:
|
||||||
|
return this.shouldMatchEmpty();
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
public static readonly _serializedATN: number[] = [
|
public static readonly _serializedATN: number[] = [
|
||||||
4,1,277,2605,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,
|
4,1,277,2607,2,0,7,0,2,1,7,1,2,2,7,2,2,3,7,3,2,4,7,4,2,5,7,5,2,6,
|
||||||
7,6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,
|
7,6,2,7,7,7,2,8,7,8,2,9,7,9,2,10,7,10,2,11,7,11,2,12,7,12,2,13,7,
|
||||||
13,2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7,19,2,
|
13,2,14,7,14,2,15,7,15,2,16,7,16,2,17,7,17,2,18,7,18,2,19,7,19,2,
|
||||||
20,7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2,26,7,
|
20,7,20,2,21,7,21,2,22,7,22,2,23,7,23,2,24,7,24,2,25,7,25,2,26,7,
|
||||||
@ -11980,192 +12008,192 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
1,92,1,92,1,92,1,92,1,92,1,92,1,92,1,92,1,92,1,92,1,92,3,92,2518,
|
1,92,1,92,1,92,1,92,1,92,1,92,1,92,1,92,1,92,1,92,1,92,3,92,2518,
|
||||||
8,92,1,93,1,93,1,93,1,93,1,93,1,93,1,93,1,93,1,93,1,93,1,93,3,93,
|
8,92,1,93,1,93,1,93,1,93,1,93,1,93,1,93,1,93,1,93,1,93,1,93,3,93,
|
||||||
2531,8,93,1,94,1,94,1,95,1,95,1,96,1,96,1,96,1,96,1,96,3,96,2542,
|
2531,8,93,1,94,1,94,1,95,1,95,1,96,1,96,1,96,1,96,1,96,3,96,2542,
|
||||||
8,96,1,97,1,97,1,98,1,98,1,99,1,99,1,100,1,100,1,101,1,101,1,102,
|
8,96,1,97,1,97,1,98,1,98,1,99,1,99,1,100,1,100,3,100,2552,8,100,
|
||||||
1,102,1,102,5,102,2557,8,102,10,102,12,102,2560,9,102,1,103,1,103,
|
1,101,1,101,1,102,1,102,1,102,5,102,2559,8,102,10,102,12,102,2562,
|
||||||
1,103,3,103,2565,8,103,1,104,1,104,1,104,1,104,1,104,3,104,2572,
|
9,102,1,103,1,103,1,103,3,103,2567,8,103,1,104,1,104,1,104,1,104,
|
||||||
8,104,1,105,1,105,1,105,5,105,2577,8,105,10,105,12,105,2580,9,105,
|
1,104,3,104,2574,8,104,1,105,1,105,1,105,5,105,2579,8,105,10,105,
|
||||||
1,106,1,106,1,106,1,106,1,106,3,106,2587,8,106,1,107,3,107,2590,
|
12,105,2582,9,105,1,106,1,106,1,106,1,106,1,106,3,106,2589,8,106,
|
||||||
8,107,1,107,1,107,3,107,2594,8,107,1,107,1,107,3,107,2598,8,107,
|
1,107,3,107,2592,8,107,1,107,1,107,3,107,2596,8,107,1,107,1,107,
|
||||||
1,107,3,107,2601,8,107,1,108,1,108,1,108,0,7,42,68,104,108,110,132,
|
3,107,2600,8,107,1,107,3,107,2603,8,107,1,108,1,108,1,108,0,7,42,
|
||||||
152,109,0,2,4,6,8,10,12,14,16,18,20,22,24,26,28,30,32,34,36,38,40,
|
68,104,108,110,132,152,109,0,2,4,6,8,10,12,14,16,18,20,22,24,26,
|
||||||
42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,72,74,76,78,80,82,84,
|
28,30,32,34,36,38,40,42,44,46,48,50,52,54,56,58,60,62,64,66,68,70,
|
||||||
86,88,90,92,94,96,98,100,102,104,106,108,110,112,114,116,118,120,
|
72,74,76,78,80,82,84,86,88,90,92,94,96,98,100,102,104,106,108,110,
|
||||||
122,124,126,128,130,132,134,136,138,140,142,144,146,148,150,152,
|
112,114,116,118,120,122,124,126,128,130,132,134,136,138,140,142,
|
||||||
154,156,158,160,162,164,166,168,170,172,174,176,178,180,182,184,
|
144,146,148,150,152,154,156,158,160,162,164,166,168,170,172,174,
|
||||||
186,188,190,192,194,196,198,200,202,204,206,208,210,212,214,216,
|
176,178,180,182,184,186,188,190,192,194,196,198,200,202,204,206,
|
||||||
0,27,2,0,34,34,185,185,2,0,61,61,112,112,2,0,88,88,103,103,2,0,75,
|
208,210,212,214,216,0,27,2,0,34,34,185,185,2,0,61,61,112,112,2,0,
|
||||||
75,104,104,1,0,192,193,2,0,84,84,139,139,2,0,261,261,265,265,2,0,
|
88,88,103,103,2,0,75,75,104,104,1,0,192,193,2,0,84,84,139,139,2,
|
||||||
74,74,229,229,2,0,27,27,63,63,2,0,84,84,118,118,2,0,20,20,66,66,
|
0,261,261,265,265,2,0,74,74,229,229,2,0,27,27,63,63,2,0,84,84,118,
|
||||||
2,0,30,30,211,211,2,0,105,105,199,199,1,0,255,256,1,0,257,259,2,
|
118,2,0,20,20,66,66,2,0,30,30,211,211,2,0,105,105,199,199,1,0,255,
|
||||||
0,83,83,194,194,1,0,249,254,3,0,20,20,24,24,206,206,2,0,80,80,223,
|
256,1,0,257,259,2,0,83,83,194,194,1,0,249,254,3,0,20,20,24,24,206,
|
||||||
223,5,0,58,58,100,100,136,137,197,197,247,247,1,0,140,143,2,0,85,
|
206,2,0,80,80,223,223,5,0,58,58,100,100,136,137,197,197,247,247,
|
||||||
85,171,171,3,0,95,95,117,117,215,215,4,0,67,67,113,113,127,127,236,
|
1,0,140,143,2,0,85,85,171,171,3,0,95,95,117,117,215,215,4,0,67,67,
|
||||||
236,2,0,155,155,246,246,4,0,62,62,108,108,200,200,232,232,49,0,17,
|
113,113,127,127,236,236,2,0,155,155,246,246,4,0,62,62,108,108,200,
|
||||||
20,22,22,24,25,27,30,33,34,37,42,47,47,56,59,61,61,63,63,65,65,67,
|
200,232,232,49,0,17,20,22,22,24,25,27,30,33,34,37,42,47,47,56,59,
|
||||||
68,71,71,75,75,78,78,81,85,87,87,90,95,98,98,100,102,104,105,107,
|
61,61,63,63,65,65,67,68,71,71,75,75,78,78,81,85,87,87,90,95,98,98,
|
||||||
107,110,110,112,113,115,115,117,119,121,121,123,124,127,137,139,
|
100,102,104,105,107,107,110,110,112,113,115,115,117,119,121,121,
|
||||||
145,149,152,154,156,159,159,161,172,174,177,179,186,188,190,192,
|
123,124,127,137,139,145,149,152,154,156,159,159,161,172,174,177,
|
||||||
199,201,211,213,215,217,222,224,225,227,228,230,230,232,234,236,
|
179,186,188,190,192,199,201,211,213,215,217,222,224,225,227,228,
|
||||||
236,238,239,242,242,244,248,3005,0,221,1,0,0,0,2,226,1,0,0,0,4,232,
|
230,230,232,234,236,236,238,239,242,242,244,248,3008,0,221,1,0,0,
|
||||||
1,0,0,0,6,234,1,0,0,0,8,238,1,0,0,0,10,242,1,0,0,0,12,246,1,0,0,
|
0,2,226,1,0,0,0,4,232,1,0,0,0,6,234,1,0,0,0,8,238,1,0,0,0,10,242,
|
||||||
0,14,250,1,0,0,0,16,1036,1,0,0,0,18,1039,1,0,0,0,20,1043,1,0,0,0,
|
1,0,0,0,12,246,1,0,0,0,14,250,1,0,0,0,16,1036,1,0,0,0,18,1039,1,
|
||||||
22,1057,1,0,0,0,24,1059,1,0,0,0,26,1073,1,0,0,0,28,1079,1,0,0,0,
|
0,0,0,20,1043,1,0,0,0,22,1057,1,0,0,0,24,1059,1,0,0,0,26,1073,1,
|
||||||
30,1083,1,0,0,0,32,1091,1,0,0,0,34,1097,1,0,0,0,36,1099,1,0,0,0,
|
0,0,0,28,1079,1,0,0,0,30,1083,1,0,0,0,32,1091,1,0,0,0,34,1097,1,
|
||||||
38,1136,1,0,0,0,40,1138,1,0,0,0,42,1140,1,0,0,0,44,1176,1,0,0,0,
|
0,0,0,36,1099,1,0,0,0,38,1136,1,0,0,0,40,1138,1,0,0,0,42,1140,1,
|
||||||
46,1180,1,0,0,0,48,1189,1,0,0,0,50,1237,1,0,0,0,52,1287,1,0,0,0,
|
0,0,0,44,1176,1,0,0,0,46,1180,1,0,0,0,48,1189,1,0,0,0,50,1237,1,
|
||||||
54,1302,1,0,0,0,56,1306,1,0,0,0,58,1308,1,0,0,0,60,1315,1,0,0,0,
|
0,0,0,52,1287,1,0,0,0,54,1302,1,0,0,0,56,1306,1,0,0,0,58,1308,1,
|
||||||
62,1344,1,0,0,0,64,1353,1,0,0,0,66,1373,1,0,0,0,68,1375,1,0,0,0,
|
0,0,0,60,1315,1,0,0,0,62,1344,1,0,0,0,64,1353,1,0,0,0,66,1373,1,
|
||||||
70,1414,1,0,0,0,72,1430,1,0,0,0,74,1432,1,0,0,0,76,1441,1,0,0,0,
|
0,0,0,68,1375,1,0,0,0,70,1414,1,0,0,0,72,1430,1,0,0,0,74,1432,1,
|
||||||
78,1443,1,0,0,0,80,1528,1,0,0,0,82,1543,1,0,0,0,84,1554,1,0,0,0,
|
0,0,0,76,1441,1,0,0,0,78,1443,1,0,0,0,80,1528,1,0,0,0,82,1543,1,
|
||||||
86,1575,1,0,0,0,88,1577,1,0,0,0,90,1590,1,0,0,0,92,1594,1,0,0,0,
|
0,0,0,84,1554,1,0,0,0,86,1575,1,0,0,0,88,1577,1,0,0,0,90,1590,1,
|
||||||
94,1604,1,0,0,0,96,1615,1,0,0,0,98,1626,1,0,0,0,100,1666,1,0,0,0,
|
0,0,0,92,1594,1,0,0,0,94,1604,1,0,0,0,96,1615,1,0,0,0,98,1626,1,
|
||||||
102,1668,1,0,0,0,104,1677,1,0,0,0,106,1751,1,0,0,0,108,1757,1,0,
|
0,0,0,100,1666,1,0,0,0,102,1668,1,0,0,0,104,1677,1,0,0,0,106,1751,
|
||||||
0,0,110,2025,1,0,0,0,112,2040,1,0,0,0,114,2046,1,0,0,0,116,2054,
|
1,0,0,0,108,1757,1,0,0,0,110,2025,1,0,0,0,112,2040,1,0,0,0,114,2046,
|
||||||
1,0,0,0,118,2062,1,0,0,0,120,2064,1,0,0,0,122,2066,1,0,0,0,124,2068,
|
1,0,0,0,116,2054,1,0,0,0,118,2062,1,0,0,0,120,2064,1,0,0,0,122,2066,
|
||||||
1,0,0,0,126,2070,1,0,0,0,128,2080,1,0,0,0,130,2082,1,0,0,0,132,2175,
|
1,0,0,0,124,2068,1,0,0,0,126,2070,1,0,0,0,128,2080,1,0,0,0,130,2082,
|
||||||
1,0,0,0,134,2193,1,0,0,0,136,2197,1,0,0,0,138,2199,1,0,0,0,140,2204,
|
1,0,0,0,132,2175,1,0,0,0,134,2193,1,0,0,0,136,2197,1,0,0,0,138,2199,
|
||||||
1,0,0,0,142,2274,1,0,0,0,144,2276,1,0,0,0,146,2293,1,0,0,0,148,2357,
|
1,0,0,0,140,2204,1,0,0,0,142,2274,1,0,0,0,144,2276,1,0,0,0,146,2293,
|
||||||
1,0,0,0,150,2368,1,0,0,0,152,2370,1,0,0,0,154,2410,1,0,0,0,156,2442,
|
1,0,0,0,148,2357,1,0,0,0,150,2368,1,0,0,0,152,2370,1,0,0,0,154,2410,
|
||||||
1,0,0,0,158,2444,1,0,0,0,160,2452,1,0,0,0,162,2459,1,0,0,0,164,2468,
|
1,0,0,0,156,2442,1,0,0,0,158,2444,1,0,0,0,160,2452,1,0,0,0,162,2459,
|
||||||
1,0,0,0,166,2475,1,0,0,0,168,2482,1,0,0,0,170,2484,1,0,0,0,172,2492,
|
1,0,0,0,164,2468,1,0,0,0,166,2475,1,0,0,0,168,2482,1,0,0,0,170,2484,
|
||||||
1,0,0,0,174,2496,1,0,0,0,176,2498,1,0,0,0,178,2500,1,0,0,0,180,2502,
|
1,0,0,0,172,2492,1,0,0,0,174,2496,1,0,0,0,176,2498,1,0,0,0,178,2500,
|
||||||
1,0,0,0,182,2504,1,0,0,0,184,2517,1,0,0,0,186,2530,1,0,0,0,188,2532,
|
1,0,0,0,180,2502,1,0,0,0,182,2504,1,0,0,0,184,2517,1,0,0,0,186,2530,
|
||||||
1,0,0,0,190,2534,1,0,0,0,192,2541,1,0,0,0,194,2543,1,0,0,0,196,2545,
|
1,0,0,0,188,2532,1,0,0,0,190,2534,1,0,0,0,192,2541,1,0,0,0,194,2543,
|
||||||
1,0,0,0,198,2547,1,0,0,0,200,2549,1,0,0,0,202,2551,1,0,0,0,204,2553,
|
1,0,0,0,196,2545,1,0,0,0,198,2547,1,0,0,0,200,2551,1,0,0,0,202,2553,
|
||||||
1,0,0,0,206,2564,1,0,0,0,208,2571,1,0,0,0,210,2573,1,0,0,0,212,2586,
|
1,0,0,0,204,2555,1,0,0,0,206,2566,1,0,0,0,208,2573,1,0,0,0,210,2575,
|
||||||
1,0,0,0,214,2600,1,0,0,0,216,2602,1,0,0,0,218,220,3,2,1,0,219,218,
|
1,0,0,0,212,2588,1,0,0,0,214,2602,1,0,0,0,216,2604,1,0,0,0,218,220,
|
||||||
1,0,0,0,220,223,1,0,0,0,221,219,1,0,0,0,221,222,1,0,0,0,222,224,
|
3,2,1,0,219,218,1,0,0,0,220,223,1,0,0,0,221,219,1,0,0,0,221,222,
|
||||||
1,0,0,0,223,221,1,0,0,0,224,225,5,0,0,1,225,1,1,0,0,0,226,227,3,
|
1,0,0,0,222,224,1,0,0,0,223,221,1,0,0,0,224,225,5,0,0,1,225,1,1,
|
||||||
6,3,0,227,3,1,0,0,0,228,233,3,8,4,0,229,233,3,10,5,0,230,233,3,12,
|
0,0,0,226,227,3,6,3,0,227,3,1,0,0,0,228,233,3,8,4,0,229,233,3,10,
|
||||||
6,0,231,233,3,14,7,0,232,228,1,0,0,0,232,229,1,0,0,0,232,230,1,0,
|
5,0,230,233,3,12,6,0,231,233,3,14,7,0,232,228,1,0,0,0,232,229,1,
|
||||||
0,0,232,231,1,0,0,0,233,5,1,0,0,0,234,236,3,16,8,0,235,237,5,272,
|
0,0,0,232,230,1,0,0,0,232,231,1,0,0,0,233,5,1,0,0,0,234,236,3,16,
|
||||||
0,0,236,235,1,0,0,0,236,237,1,0,0,0,237,7,1,0,0,0,238,240,3,102,
|
8,0,235,237,5,272,0,0,236,235,1,0,0,0,236,237,1,0,0,0,237,7,1,0,
|
||||||
51,0,239,241,5,272,0,0,240,239,1,0,0,0,240,241,1,0,0,0,241,9,1,0,
|
0,0,238,240,3,102,51,0,239,241,5,272,0,0,240,239,1,0,0,0,240,241,
|
||||||
0,0,242,244,3,170,85,0,243,245,5,272,0,0,244,243,1,0,0,0,244,245,
|
1,0,0,0,241,9,1,0,0,0,242,244,3,170,85,0,243,245,5,272,0,0,244,243,
|
||||||
1,0,0,0,245,11,1,0,0,0,246,248,3,132,66,0,247,249,5,272,0,0,248,
|
1,0,0,0,244,245,1,0,0,0,245,11,1,0,0,0,246,248,3,132,66,0,247,249,
|
||||||
247,1,0,0,0,248,249,1,0,0,0,249,13,1,0,0,0,250,252,3,152,76,0,251,
|
5,272,0,0,248,247,1,0,0,0,248,249,1,0,0,0,249,13,1,0,0,0,250,252,
|
||||||
253,5,272,0,0,252,251,1,0,0,0,252,253,1,0,0,0,253,15,1,0,0,0,254,
|
3,152,76,0,251,253,5,272,0,0,252,251,1,0,0,0,252,253,1,0,0,0,253,
|
||||||
1037,3,18,9,0,255,256,5,233,0,0,256,1037,3,188,94,0,257,258,5,44,
|
15,1,0,0,0,254,1037,3,18,9,0,255,256,5,233,0,0,256,1037,3,188,94,
|
||||||
0,0,258,262,5,195,0,0,259,260,5,101,0,0,260,261,5,147,0,0,261,263,
|
0,257,258,5,44,0,0,258,262,5,195,0,0,259,260,5,101,0,0,260,261,5,
|
||||||
5,77,0,0,262,259,1,0,0,0,262,263,1,0,0,0,263,264,1,0,0,0,264,267,
|
147,0,0,261,263,5,77,0,0,262,259,1,0,0,0,262,263,1,0,0,0,263,264,
|
||||||
3,190,95,0,265,266,5,29,0,0,266,268,3,208,104,0,267,265,1,0,0,0,
|
1,0,0,0,264,267,3,190,95,0,265,266,5,29,0,0,266,268,3,208,104,0,
|
||||||
267,268,1,0,0,0,268,271,1,0,0,0,269,270,5,243,0,0,270,272,3,28,14,
|
267,265,1,0,0,0,267,268,1,0,0,0,268,271,1,0,0,0,269,270,5,243,0,
|
||||||
0,271,269,1,0,0,0,271,272,1,0,0,0,272,1037,1,0,0,0,273,274,5,69,
|
0,270,272,3,28,14,0,271,269,1,0,0,0,271,272,1,0,0,0,272,1037,1,0,
|
||||||
0,0,274,277,5,195,0,0,275,276,5,101,0,0,276,278,5,77,0,0,277,275,
|
0,0,273,274,5,69,0,0,274,277,5,195,0,0,275,276,5,101,0,0,276,278,
|
||||||
1,0,0,0,277,278,1,0,0,0,278,279,1,0,0,0,279,281,3,188,94,0,280,282,
|
5,77,0,0,277,275,1,0,0,0,277,278,1,0,0,0,278,279,1,0,0,0,279,281,
|
||||||
7,0,0,0,281,280,1,0,0,0,281,282,1,0,0,0,282,1037,1,0,0,0,283,284,
|
3,188,94,0,280,282,7,0,0,0,281,280,1,0,0,0,281,282,1,0,0,0,282,1037,
|
||||||
5,21,0,0,284,285,5,195,0,0,285,286,3,188,94,0,286,287,5,180,0,0,
|
1,0,0,0,283,284,5,21,0,0,284,285,5,195,0,0,285,286,3,188,94,0,286,
|
||||||
287,288,5,220,0,0,288,289,3,190,95,0,289,1037,1,0,0,0,290,291,5,
|
287,5,180,0,0,287,288,5,220,0,0,288,289,3,190,95,0,289,1037,1,0,
|
||||||
21,0,0,291,292,5,195,0,0,292,293,3,188,94,0,293,294,5,203,0,0,294,
|
0,0,290,291,5,21,0,0,291,292,5,195,0,0,292,293,3,188,94,0,293,294,
|
||||||
295,5,29,0,0,295,296,3,208,104,0,296,1037,1,0,0,0,297,298,5,44,0,
|
5,203,0,0,294,295,5,29,0,0,295,296,3,208,104,0,296,1037,1,0,0,0,
|
||||||
0,298,302,5,212,0,0,299,300,5,101,0,0,300,301,5,147,0,0,301,303,
|
297,298,5,44,0,0,298,302,5,212,0,0,299,300,5,101,0,0,300,301,5,147,
|
||||||
5,77,0,0,302,299,1,0,0,0,302,303,1,0,0,0,303,304,1,0,0,0,304,306,
|
0,0,301,303,5,77,0,0,302,299,1,0,0,0,302,303,1,0,0,0,303,304,1,0,
|
||||||
3,178,89,0,305,307,3,94,47,0,306,305,1,0,0,0,306,307,1,0,0,0,307,
|
0,0,304,306,3,178,89,0,305,307,3,94,47,0,306,305,1,0,0,0,306,307,
|
||||||
310,1,0,0,0,308,309,5,40,0,0,309,311,3,116,58,0,310,308,1,0,0,0,
|
1,0,0,0,307,310,1,0,0,0,308,309,5,40,0,0,309,311,3,116,58,0,310,
|
||||||
310,311,1,0,0,0,311,314,1,0,0,0,312,313,5,243,0,0,313,315,3,28,14,
|
308,1,0,0,0,310,311,1,0,0,0,311,314,1,0,0,0,312,313,5,243,0,0,313,
|
||||||
0,314,312,1,0,0,0,314,315,1,0,0,0,315,316,1,0,0,0,316,322,5,26,0,
|
315,3,28,14,0,314,312,1,0,0,0,314,315,1,0,0,0,315,316,1,0,0,0,316,
|
||||||
0,317,323,3,18,9,0,318,319,5,1,0,0,319,320,3,18,9,0,320,321,5,2,
|
322,5,26,0,0,317,323,3,18,9,0,318,319,5,1,0,0,319,320,3,18,9,0,320,
|
||||||
0,0,321,323,1,0,0,0,322,317,1,0,0,0,322,318,1,0,0,0,323,329,1,0,
|
321,5,2,0,0,321,323,1,0,0,0,322,317,1,0,0,0,322,318,1,0,0,0,323,
|
||||||
0,0,324,326,5,243,0,0,325,327,5,144,0,0,326,325,1,0,0,0,326,327,
|
329,1,0,0,0,324,326,5,243,0,0,325,327,5,144,0,0,326,325,1,0,0,0,
|
||||||
1,0,0,0,327,328,1,0,0,0,328,330,5,56,0,0,329,324,1,0,0,0,329,330,
|
326,327,1,0,0,0,327,328,1,0,0,0,328,330,5,56,0,0,329,324,1,0,0,0,
|
||||||
1,0,0,0,330,1037,1,0,0,0,331,332,5,44,0,0,332,336,5,212,0,0,333,
|
329,330,1,0,0,0,330,1037,1,0,0,0,331,332,5,44,0,0,332,336,5,212,
|
||||||
334,5,101,0,0,334,335,5,147,0,0,335,337,5,77,0,0,336,333,1,0,0,0,
|
0,0,333,334,5,101,0,0,334,335,5,147,0,0,335,337,5,77,0,0,336,333,
|
||||||
336,337,1,0,0,0,337,338,1,0,0,0,338,339,3,178,89,0,339,340,5,1,0,
|
1,0,0,0,336,337,1,0,0,0,337,338,1,0,0,0,338,339,3,178,89,0,339,340,
|
||||||
0,340,345,3,22,11,0,341,342,5,3,0,0,342,344,3,22,11,0,343,341,1,
|
5,1,0,0,340,345,3,22,11,0,341,342,5,3,0,0,342,344,3,22,11,0,343,
|
||||||
0,0,0,344,347,1,0,0,0,345,343,1,0,0,0,345,346,1,0,0,0,346,348,1,
|
341,1,0,0,0,344,347,1,0,0,0,345,343,1,0,0,0,345,346,1,0,0,0,346,
|
||||||
0,0,0,347,345,1,0,0,0,348,351,5,2,0,0,349,350,5,40,0,0,350,352,3,
|
348,1,0,0,0,347,345,1,0,0,0,348,351,5,2,0,0,349,350,5,40,0,0,350,
|
||||||
116,58,0,351,349,1,0,0,0,351,352,1,0,0,0,352,355,1,0,0,0,353,354,
|
352,3,116,58,0,351,349,1,0,0,0,351,352,1,0,0,0,352,355,1,0,0,0,353,
|
||||||
5,243,0,0,354,356,3,28,14,0,355,353,1,0,0,0,355,356,1,0,0,0,356,
|
354,5,243,0,0,354,356,3,28,14,0,355,353,1,0,0,0,355,356,1,0,0,0,
|
||||||
1037,1,0,0,0,357,358,5,69,0,0,358,361,5,212,0,0,359,360,5,101,0,
|
356,1037,1,0,0,0,357,358,5,69,0,0,358,361,5,212,0,0,359,360,5,101,
|
||||||
0,360,362,5,77,0,0,361,359,1,0,0,0,361,362,1,0,0,0,362,363,1,0,0,
|
0,0,360,362,5,77,0,0,361,359,1,0,0,0,361,362,1,0,0,0,362,363,1,0,
|
||||||
0,363,1037,3,176,88,0,364,365,5,108,0,0,365,366,5,111,0,0,366,368,
|
0,0,363,1037,3,176,88,0,364,365,5,108,0,0,365,366,5,111,0,0,366,
|
||||||
3,176,88,0,367,369,3,96,48,0,368,367,1,0,0,0,368,369,1,0,0,0,369,
|
368,3,176,88,0,367,369,3,96,48,0,368,367,1,0,0,0,368,369,1,0,0,0,
|
||||||
370,1,0,0,0,370,371,3,18,9,0,371,1037,1,0,0,0,372,373,5,62,0,0,373,
|
369,370,1,0,0,0,370,371,3,18,9,0,371,1037,1,0,0,0,372,373,5,62,0,
|
||||||
374,5,88,0,0,374,377,3,176,88,0,375,376,5,241,0,0,376,378,3,104,
|
0,373,374,5,88,0,0,374,377,3,176,88,0,375,376,5,241,0,0,376,378,
|
||||||
52,0,377,375,1,0,0,0,377,378,1,0,0,0,378,1037,1,0,0,0,379,380,5,
|
3,104,52,0,377,375,1,0,0,0,377,378,1,0,0,0,378,1037,1,0,0,0,379,
|
||||||
222,0,0,380,381,5,212,0,0,381,1037,3,176,88,0,382,383,5,21,0,0,383,
|
380,5,222,0,0,380,381,5,212,0,0,381,1037,3,176,88,0,382,383,5,21,
|
||||||
386,5,212,0,0,384,385,5,101,0,0,385,387,5,77,0,0,386,384,1,0,0,0,
|
0,0,383,386,5,212,0,0,384,385,5,101,0,0,385,387,5,77,0,0,386,384,
|
||||||
386,387,1,0,0,0,387,388,1,0,0,0,388,389,3,176,88,0,389,390,5,180,
|
1,0,0,0,386,387,1,0,0,0,387,388,1,0,0,0,388,389,3,176,88,0,389,390,
|
||||||
0,0,390,391,5,220,0,0,391,392,3,178,89,0,392,1037,1,0,0,0,393,394,
|
5,180,0,0,390,391,5,220,0,0,391,392,3,178,89,0,392,1037,1,0,0,0,
|
||||||
5,40,0,0,394,395,5,153,0,0,395,396,5,212,0,0,396,397,3,176,88,0,
|
393,394,5,40,0,0,394,395,5,153,0,0,395,396,5,212,0,0,396,397,3,176,
|
||||||
397,400,5,114,0,0,398,401,3,116,58,0,399,401,5,148,0,0,400,398,1,
|
88,0,397,400,5,114,0,0,398,401,3,116,58,0,399,401,5,148,0,0,400,
|
||||||
0,0,0,400,399,1,0,0,0,401,1037,1,0,0,0,402,403,5,40,0,0,403,404,
|
398,1,0,0,0,400,399,1,0,0,0,401,1037,1,0,0,0,402,403,5,40,0,0,403,
|
||||||
5,153,0,0,404,405,5,38,0,0,405,406,3,200,100,0,406,409,5,114,0,0,
|
404,5,153,0,0,404,405,5,38,0,0,405,406,3,200,100,0,406,409,5,114,
|
||||||
407,410,3,116,58,0,408,410,5,148,0,0,409,407,1,0,0,0,409,408,1,0,
|
0,0,407,410,3,116,58,0,408,410,5,148,0,0,409,407,1,0,0,0,409,408,
|
||||||
0,0,410,1037,1,0,0,0,411,412,5,21,0,0,412,415,5,212,0,0,413,414,
|
1,0,0,0,410,1037,1,0,0,0,411,412,5,21,0,0,412,415,5,212,0,0,413,
|
||||||
5,101,0,0,414,416,5,77,0,0,415,413,1,0,0,0,415,416,1,0,0,0,416,417,
|
414,5,101,0,0,414,416,5,77,0,0,415,413,1,0,0,0,415,416,1,0,0,0,416,
|
||||||
1,0,0,0,417,418,3,176,88,0,418,419,5,180,0,0,419,422,5,38,0,0,420,
|
417,1,0,0,0,417,418,3,176,88,0,418,419,5,180,0,0,419,422,5,38,0,
|
||||||
421,5,101,0,0,421,423,5,77,0,0,422,420,1,0,0,0,422,423,1,0,0,0,423,
|
0,420,421,5,101,0,0,421,423,5,77,0,0,422,420,1,0,0,0,422,423,1,0,
|
||||||
424,1,0,0,0,424,425,3,200,100,0,425,426,5,220,0,0,426,427,3,202,
|
0,0,423,424,1,0,0,0,424,425,3,200,100,0,425,426,5,220,0,0,426,427,
|
||||||
101,0,427,1037,1,0,0,0,428,429,5,21,0,0,429,432,5,212,0,0,430,431,
|
3,202,101,0,427,1037,1,0,0,0,428,429,5,21,0,0,429,432,5,212,0,0,
|
||||||
5,101,0,0,431,433,5,77,0,0,432,430,1,0,0,0,432,433,1,0,0,0,433,434,
|
430,431,5,101,0,0,431,433,5,77,0,0,432,430,1,0,0,0,432,433,1,0,0,
|
||||||
1,0,0,0,434,435,3,176,88,0,435,436,5,69,0,0,436,439,5,38,0,0,437,
|
0,433,434,1,0,0,0,434,435,3,176,88,0,435,436,5,69,0,0,436,439,5,
|
||||||
438,5,101,0,0,438,440,5,77,0,0,439,437,1,0,0,0,439,440,1,0,0,0,440,
|
38,0,0,437,438,5,101,0,0,438,440,5,77,0,0,439,437,1,0,0,0,439,440,
|
||||||
441,1,0,0,0,441,442,3,200,100,0,442,1037,1,0,0,0,443,444,5,21,0,
|
1,0,0,0,440,441,1,0,0,0,441,442,3,200,100,0,442,1037,1,0,0,0,443,
|
||||||
0,444,447,5,212,0,0,445,446,5,101,0,0,446,448,5,77,0,0,447,445,1,
|
444,5,21,0,0,444,447,5,212,0,0,445,446,5,101,0,0,446,448,5,77,0,
|
||||||
0,0,0,447,448,1,0,0,0,448,449,1,0,0,0,449,450,3,176,88,0,450,451,
|
0,447,445,1,0,0,0,447,448,1,0,0,0,448,449,1,0,0,0,449,450,3,176,
|
||||||
5,17,0,0,451,455,5,38,0,0,452,453,5,101,0,0,453,454,5,147,0,0,454,
|
88,0,450,451,5,17,0,0,451,455,5,38,0,0,452,453,5,101,0,0,453,454,
|
||||||
456,5,77,0,0,455,452,1,0,0,0,455,456,1,0,0,0,456,457,1,0,0,0,457,
|
5,147,0,0,454,456,5,77,0,0,455,452,1,0,0,0,455,456,1,0,0,0,456,457,
|
||||||
458,3,24,12,0,458,1037,1,0,0,0,459,460,5,21,0,0,460,461,5,212,0,
|
1,0,0,0,457,458,3,24,12,0,458,1037,1,0,0,0,459,460,5,21,0,0,460,
|
||||||
0,461,462,3,176,88,0,462,463,5,203,0,0,463,464,5,29,0,0,464,465,
|
461,5,212,0,0,461,462,3,176,88,0,462,463,5,203,0,0,463,464,5,29,
|
||||||
3,208,104,0,465,1037,1,0,0,0,466,467,5,21,0,0,467,468,5,212,0,0,
|
0,0,464,465,3,208,104,0,465,1037,1,0,0,0,466,467,5,21,0,0,467,468,
|
||||||
468,469,3,176,88,0,469,470,5,203,0,0,470,471,5,175,0,0,471,472,3,
|
5,212,0,0,468,469,3,176,88,0,469,470,5,203,0,0,470,471,5,175,0,0,
|
||||||
30,15,0,472,1037,1,0,0,0,473,474,5,21,0,0,474,475,5,212,0,0,475,
|
471,472,3,30,15,0,472,1037,1,0,0,0,473,474,5,21,0,0,474,475,5,212,
|
||||||
476,3,176,88,0,476,477,5,76,0,0,477,490,3,212,106,0,478,487,5,1,
|
0,0,475,476,3,176,88,0,476,477,5,76,0,0,477,490,3,212,106,0,478,
|
||||||
0,0,479,484,3,166,83,0,480,481,5,3,0,0,481,483,3,166,83,0,482,480,
|
487,5,1,0,0,479,484,3,166,83,0,480,481,5,3,0,0,481,483,3,166,83,
|
||||||
1,0,0,0,483,486,1,0,0,0,484,482,1,0,0,0,484,485,1,0,0,0,485,488,
|
0,482,480,1,0,0,0,483,486,1,0,0,0,484,482,1,0,0,0,484,485,1,0,0,
|
||||||
1,0,0,0,486,484,1,0,0,0,487,479,1,0,0,0,487,488,1,0,0,0,488,489,
|
0,485,488,1,0,0,0,486,484,1,0,0,0,487,479,1,0,0,0,487,488,1,0,0,
|
||||||
1,0,0,0,489,491,5,2,0,0,490,478,1,0,0,0,490,491,1,0,0,0,491,494,
|
0,488,489,1,0,0,0,489,491,5,2,0,0,490,478,1,0,0,0,490,491,1,0,0,
|
||||||
1,0,0,0,492,493,5,241,0,0,493,495,3,104,52,0,494,492,1,0,0,0,494,
|
0,491,494,1,0,0,0,492,493,5,241,0,0,493,495,3,104,52,0,494,492,1,
|
||||||
495,1,0,0,0,495,1037,1,0,0,0,496,497,5,22,0,0,497,500,3,176,88,0,
|
0,0,0,494,495,1,0,0,0,495,1037,1,0,0,0,496,497,5,22,0,0,497,500,
|
||||||
498,499,5,243,0,0,499,501,3,28,14,0,500,498,1,0,0,0,500,501,1,0,
|
3,176,88,0,498,499,5,243,0,0,499,501,3,28,14,0,500,498,1,0,0,0,500,
|
||||||
0,0,501,1037,1,0,0,0,502,505,5,44,0,0,503,504,5,157,0,0,504,506,
|
501,1,0,0,0,501,1037,1,0,0,0,502,505,5,44,0,0,503,504,5,157,0,0,
|
||||||
5,182,0,0,505,503,1,0,0,0,505,506,1,0,0,0,506,507,1,0,0,0,507,508,
|
504,506,5,182,0,0,505,503,1,0,0,0,505,506,1,0,0,0,506,507,1,0,0,
|
||||||
5,133,0,0,508,512,5,239,0,0,509,510,5,101,0,0,510,511,5,147,0,0,
|
0,507,508,5,133,0,0,508,512,5,239,0,0,509,510,5,101,0,0,510,511,
|
||||||
511,513,5,77,0,0,512,509,1,0,0,0,512,513,1,0,0,0,513,514,1,0,0,0,
|
5,147,0,0,511,513,5,77,0,0,512,509,1,0,0,0,512,513,1,0,0,0,513,514,
|
||||||
514,517,3,182,91,0,515,516,5,40,0,0,516,518,3,116,58,0,517,515,1,
|
1,0,0,0,514,517,3,182,91,0,515,516,5,40,0,0,516,518,3,116,58,0,517,
|
||||||
0,0,0,517,518,1,0,0,0,518,521,1,0,0,0,519,520,5,243,0,0,520,522,
|
515,1,0,0,0,517,518,1,0,0,0,518,521,1,0,0,0,519,520,5,243,0,0,520,
|
||||||
3,28,14,0,521,519,1,0,0,0,521,522,1,0,0,0,522,523,1,0,0,0,523,524,
|
522,3,28,14,0,521,519,1,0,0,0,521,522,1,0,0,0,522,523,1,0,0,0,523,
|
||||||
5,26,0,0,524,525,3,18,9,0,525,1037,1,0,0,0,526,529,5,44,0,0,527,
|
524,5,26,0,0,524,525,3,18,9,0,525,1037,1,0,0,0,526,529,5,44,0,0,
|
||||||
528,5,157,0,0,528,530,5,182,0,0,529,527,1,0,0,0,529,530,1,0,0,0,
|
527,528,5,157,0,0,528,530,5,182,0,0,529,527,1,0,0,0,529,530,1,0,
|
||||||
530,531,1,0,0,0,531,532,5,239,0,0,532,535,3,182,91,0,533,534,5,40,
|
0,0,530,531,1,0,0,0,531,532,5,239,0,0,532,535,3,182,91,0,533,534,
|
||||||
0,0,534,536,3,116,58,0,535,533,1,0,0,0,535,536,1,0,0,0,536,539,1,
|
5,40,0,0,534,536,3,116,58,0,535,533,1,0,0,0,535,536,1,0,0,0,536,
|
||||||
0,0,0,537,538,5,198,0,0,538,540,7,1,0,0,539,537,1,0,0,0,539,540,
|
539,1,0,0,0,537,538,5,198,0,0,538,540,7,1,0,0,539,537,1,0,0,0,539,
|
||||||
1,0,0,0,540,541,1,0,0,0,541,542,5,26,0,0,542,543,3,18,9,0,543,1037,
|
540,1,0,0,0,540,541,1,0,0,0,541,542,5,26,0,0,542,543,3,18,9,0,543,
|
||||||
1,0,0,0,544,545,5,179,0,0,545,546,5,133,0,0,546,547,5,239,0,0,547,
|
1037,1,0,0,0,544,545,5,179,0,0,545,546,5,133,0,0,546,547,5,239,0,
|
||||||
1037,3,180,90,0,548,549,5,69,0,0,549,550,5,133,0,0,550,553,5,239,
|
0,547,1037,3,180,90,0,548,549,5,69,0,0,549,550,5,133,0,0,550,553,
|
||||||
0,0,551,552,5,101,0,0,552,554,5,77,0,0,553,551,1,0,0,0,553,554,1,
|
5,239,0,0,551,552,5,101,0,0,552,554,5,77,0,0,553,551,1,0,0,0,553,
|
||||||
0,0,0,554,555,1,0,0,0,555,1037,3,180,90,0,556,557,5,21,0,0,557,558,
|
554,1,0,0,0,554,555,1,0,0,0,555,1037,3,180,90,0,556,557,5,21,0,0,
|
||||||
5,133,0,0,558,561,5,239,0,0,559,560,5,101,0,0,560,562,5,77,0,0,561,
|
557,558,5,133,0,0,558,561,5,239,0,0,559,560,5,101,0,0,560,562,5,
|
||||||
559,1,0,0,0,561,562,1,0,0,0,562,563,1,0,0,0,563,564,3,180,90,0,564,
|
77,0,0,561,559,1,0,0,0,561,562,1,0,0,0,562,563,1,0,0,0,563,564,3,
|
||||||
565,5,180,0,0,565,566,5,220,0,0,566,567,3,182,91,0,567,1037,1,0,
|
180,90,0,564,565,5,180,0,0,565,566,5,220,0,0,566,567,3,182,91,0,
|
||||||
0,0,568,569,5,21,0,0,569,570,5,133,0,0,570,571,5,239,0,0,571,572,
|
567,1037,1,0,0,0,568,569,5,21,0,0,569,570,5,133,0,0,570,571,5,239,
|
||||||
3,180,90,0,572,573,5,203,0,0,573,574,5,175,0,0,574,575,3,30,15,0,
|
0,0,571,572,3,180,90,0,572,573,5,203,0,0,573,574,5,175,0,0,574,575,
|
||||||
575,1037,1,0,0,0,576,577,5,69,0,0,577,580,5,239,0,0,578,579,5,101,
|
3,30,15,0,575,1037,1,0,0,0,576,577,5,69,0,0,577,580,5,239,0,0,578,
|
||||||
0,0,579,581,5,77,0,0,580,578,1,0,0,0,580,581,1,0,0,0,581,582,1,0,
|
579,5,101,0,0,579,581,5,77,0,0,580,578,1,0,0,0,580,581,1,0,0,0,581,
|
||||||
0,0,582,1037,3,180,90,0,583,584,5,21,0,0,584,585,5,239,0,0,585,586,
|
582,1,0,0,0,582,1037,3,180,90,0,583,584,5,21,0,0,584,585,5,239,0,
|
||||||
3,180,90,0,586,587,5,180,0,0,587,588,5,220,0,0,588,589,3,182,91,
|
0,585,586,3,180,90,0,586,587,5,180,0,0,587,588,5,220,0,0,588,589,
|
||||||
0,589,1037,1,0,0,0,590,591,5,21,0,0,591,592,5,239,0,0,592,593,3,
|
3,182,91,0,589,1037,1,0,0,0,590,591,5,21,0,0,591,592,5,239,0,0,592,
|
||||||
180,90,0,593,594,5,203,0,0,594,595,5,29,0,0,595,596,3,208,104,0,
|
593,3,180,90,0,593,594,5,203,0,0,594,595,5,29,0,0,595,596,3,208,
|
||||||
596,1037,1,0,0,0,597,598,5,33,0,0,598,599,3,198,99,0,599,608,5,1,
|
104,0,596,1037,1,0,0,0,597,598,5,33,0,0,598,599,3,198,99,0,599,608,
|
||||||
0,0,600,605,3,166,83,0,601,602,5,3,0,0,602,604,3,166,83,0,603,601,
|
5,1,0,0,600,605,3,166,83,0,601,602,5,3,0,0,602,604,3,166,83,0,603,
|
||||||
1,0,0,0,604,607,1,0,0,0,605,603,1,0,0,0,605,606,1,0,0,0,606,609,
|
601,1,0,0,0,604,607,1,0,0,0,605,603,1,0,0,0,605,606,1,0,0,0,606,
|
||||||
1,0,0,0,607,605,1,0,0,0,608,600,1,0,0,0,608,609,1,0,0,0,609,610,
|
609,1,0,0,0,607,605,1,0,0,0,608,600,1,0,0,0,608,609,1,0,0,0,609,
|
||||||
1,0,0,0,610,611,5,2,0,0,611,1037,1,0,0,0,612,613,5,44,0,0,613,614,
|
610,1,0,0,0,610,611,5,2,0,0,611,1037,1,0,0,0,612,613,5,44,0,0,613,
|
||||||
5,188,0,0,614,618,3,212,106,0,615,616,5,243,0,0,616,617,5,18,0,0,
|
614,5,188,0,0,614,618,3,212,106,0,615,616,5,243,0,0,616,617,5,18,
|
||||||
617,619,3,206,103,0,618,615,1,0,0,0,618,619,1,0,0,0,619,622,1,0,
|
0,0,617,619,3,206,103,0,618,615,1,0,0,0,618,619,1,0,0,0,619,622,
|
||||||
0,0,620,621,5,103,0,0,621,623,3,194,97,0,622,620,1,0,0,0,622,623,
|
1,0,0,0,620,621,5,103,0,0,621,623,3,194,97,0,622,620,1,0,0,0,622,
|
||||||
1,0,0,0,623,1037,1,0,0,0,624,625,5,69,0,0,625,626,5,188,0,0,626,
|
623,1,0,0,0,623,1037,1,0,0,0,624,625,5,69,0,0,625,626,5,188,0,0,
|
||||||
1037,3,212,106,0,627,628,5,91,0,0,628,629,3,210,105,0,629,630,5,
|
626,1037,3,212,106,0,627,628,5,91,0,0,628,629,3,210,105,0,629,630,
|
||||||
220,0,0,630,635,3,208,104,0,631,632,5,3,0,0,632,634,3,208,104,0,
|
5,220,0,0,630,635,3,208,104,0,631,632,5,3,0,0,632,634,3,208,104,
|
||||||
633,631,1,0,0,0,634,637,1,0,0,0,635,633,1,0,0,0,635,636,1,0,0,0,
|
0,633,631,1,0,0,0,634,637,1,0,0,0,635,633,1,0,0,0,635,636,1,0,0,
|
||||||
636,641,1,0,0,0,637,635,1,0,0,0,638,639,5,243,0,0,639,640,5,18,0,
|
0,636,641,1,0,0,0,637,635,1,0,0,0,638,639,5,243,0,0,639,640,5,18,
|
||||||
0,640,642,5,156,0,0,641,638,1,0,0,0,641,642,1,0,0,0,642,646,1,0,
|
0,0,640,642,5,156,0,0,641,638,1,0,0,0,641,642,1,0,0,0,642,646,1,
|
||||||
0,0,643,644,5,92,0,0,644,645,5,32,0,0,645,647,3,206,103,0,646,643,
|
0,0,0,643,644,5,92,0,0,644,645,5,32,0,0,645,647,3,206,103,0,646,
|
||||||
1,0,0,0,646,647,1,0,0,0,647,650,1,0,0,0,648,649,5,103,0,0,649,651,
|
643,1,0,0,0,646,647,1,0,0,0,647,650,1,0,0,0,648,649,5,103,0,0,649,
|
||||||
3,194,97,0,650,648,1,0,0,0,650,651,1,0,0,0,651,1037,1,0,0,0,652,
|
651,3,194,97,0,650,648,1,0,0,0,650,651,1,0,0,0,651,1037,1,0,0,0,
|
||||||
656,5,186,0,0,653,654,5,18,0,0,654,655,5,156,0,0,655,657,5,86,0,
|
652,656,5,186,0,0,653,654,5,18,0,0,654,655,5,156,0,0,655,657,5,86,
|
||||||
0,656,653,1,0,0,0,656,657,1,0,0,0,657,658,1,0,0,0,658,659,3,210,
|
0,0,656,653,1,0,0,0,656,657,1,0,0,0,657,658,1,0,0,0,658,659,3,210,
|
||||||
105,0,659,660,5,88,0,0,660,665,3,208,104,0,661,662,5,3,0,0,662,664,
|
105,0,659,660,5,88,0,0,660,665,3,208,104,0,661,662,5,3,0,0,662,664,
|
||||||
3,208,104,0,663,661,1,0,0,0,664,667,1,0,0,0,665,663,1,0,0,0,665,
|
3,208,104,0,663,661,1,0,0,0,664,667,1,0,0,0,665,663,1,0,0,0,665,
|
||||||
666,1,0,0,0,666,671,1,0,0,0,667,665,1,0,0,0,668,669,5,92,0,0,669,
|
666,1,0,0,0,666,671,1,0,0,0,667,665,1,0,0,0,668,669,5,92,0,0,669,
|
||||||
@ -12820,27 +12848,28 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
212,106,0,2540,2542,1,0,0,0,2541,2536,1,0,0,0,2541,2537,1,0,0,0,
|
212,106,0,2540,2542,1,0,0,0,2541,2536,1,0,0,0,2541,2537,1,0,0,0,
|
||||||
2542,193,1,0,0,0,2543,2544,3,212,106,0,2544,195,1,0,0,0,2545,2546,
|
2542,193,1,0,0,0,2543,2544,3,212,106,0,2544,195,1,0,0,0,2545,2546,
|
||||||
3,212,106,0,2546,197,1,0,0,0,2547,2548,3,204,102,0,2548,199,1,0,
|
3,212,106,0,2546,197,1,0,0,0,2547,2548,3,204,102,0,2548,199,1,0,
|
||||||
0,0,2549,2550,3,204,102,0,2550,201,1,0,0,0,2551,2552,3,212,106,0,
|
0,0,2549,2552,3,204,102,0,2550,2552,4,100,14,0,2551,2549,1,0,0,0,
|
||||||
2552,203,1,0,0,0,2553,2558,3,212,106,0,2554,2555,5,4,0,0,2555,2557,
|
2551,2550,1,0,0,0,2552,201,1,0,0,0,2553,2554,3,212,106,0,2554,203,
|
||||||
3,212,106,0,2556,2554,1,0,0,0,2557,2560,1,0,0,0,2558,2556,1,0,0,
|
1,0,0,0,2555,2560,3,212,106,0,2556,2557,5,4,0,0,2557,2559,3,212,
|
||||||
0,2558,2559,1,0,0,0,2559,205,1,0,0,0,2560,2558,1,0,0,0,2561,2565,
|
106,0,2558,2556,1,0,0,0,2559,2562,1,0,0,0,2560,2558,1,0,0,0,2560,
|
||||||
3,208,104,0,2562,2565,5,55,0,0,2563,2565,5,51,0,0,2564,2561,1,0,
|
2561,1,0,0,0,2561,205,1,0,0,0,2562,2560,1,0,0,0,2563,2567,3,208,
|
||||||
0,0,2564,2562,1,0,0,0,2564,2563,1,0,0,0,2565,207,1,0,0,0,2566,2572,
|
104,0,2564,2567,5,55,0,0,2565,2567,5,51,0,0,2566,2563,1,0,0,0,2566,
|
||||||
3,212,106,0,2567,2568,5,234,0,0,2568,2572,3,212,106,0,2569,2570,
|
2564,1,0,0,0,2566,2565,1,0,0,0,2567,207,1,0,0,0,2568,2574,3,212,
|
||||||
5,188,0,0,2570,2572,3,212,106,0,2571,2566,1,0,0,0,2571,2567,1,0,
|
106,0,2569,2570,5,234,0,0,2570,2574,3,212,106,0,2571,2572,5,188,
|
||||||
0,0,2571,2569,1,0,0,0,2572,209,1,0,0,0,2573,2578,3,212,106,0,2574,
|
0,0,2572,2574,3,212,106,0,2573,2568,1,0,0,0,2573,2569,1,0,0,0,2573,
|
||||||
2575,5,3,0,0,2575,2577,3,212,106,0,2576,2574,1,0,0,0,2577,2580,1,
|
2571,1,0,0,0,2574,209,1,0,0,0,2575,2580,3,212,106,0,2576,2577,5,
|
||||||
0,0,0,2578,2576,1,0,0,0,2578,2579,1,0,0,0,2579,211,1,0,0,0,2580,
|
3,0,0,2577,2579,3,212,106,0,2578,2576,1,0,0,0,2579,2582,1,0,0,0,
|
||||||
2578,1,0,0,0,2581,2587,5,268,0,0,2582,2587,5,270,0,0,2583,2587,3,
|
2580,2578,1,0,0,0,2580,2581,1,0,0,0,2581,211,1,0,0,0,2582,2580,1,
|
||||||
216,108,0,2584,2587,5,271,0,0,2585,2587,5,269,0,0,2586,2581,1,0,
|
0,0,0,2583,2589,5,268,0,0,2584,2589,5,270,0,0,2585,2589,3,216,108,
|
||||||
0,0,2586,2582,1,0,0,0,2586,2583,1,0,0,0,2586,2584,1,0,0,0,2586,2585,
|
0,2586,2589,5,271,0,0,2587,2589,5,269,0,0,2588,2583,1,0,0,0,2588,
|
||||||
1,0,0,0,2587,213,1,0,0,0,2588,2590,5,256,0,0,2589,2588,1,0,0,0,2589,
|
2584,1,0,0,0,2588,2585,1,0,0,0,2588,2586,1,0,0,0,2588,2587,1,0,0,
|
||||||
2590,1,0,0,0,2590,2591,1,0,0,0,2591,2601,5,266,0,0,2592,2594,5,256,
|
0,2589,213,1,0,0,0,2590,2592,5,256,0,0,2591,2590,1,0,0,0,2591,2592,
|
||||||
0,0,2593,2592,1,0,0,0,2593,2594,1,0,0,0,2594,2595,1,0,0,0,2595,2601,
|
1,0,0,0,2592,2593,1,0,0,0,2593,2603,5,266,0,0,2594,2596,5,256,0,
|
||||||
5,267,0,0,2596,2598,5,256,0,0,2597,2596,1,0,0,0,2597,2598,1,0,0,
|
0,2595,2594,1,0,0,0,2595,2596,1,0,0,0,2596,2597,1,0,0,0,2597,2603,
|
||||||
0,2598,2599,1,0,0,0,2599,2601,5,265,0,0,2600,2589,1,0,0,0,2600,2593,
|
5,267,0,0,2598,2600,5,256,0,0,2599,2598,1,0,0,0,2599,2600,1,0,0,
|
||||||
1,0,0,0,2600,2597,1,0,0,0,2601,215,1,0,0,0,2602,2603,7,26,0,0,2603,
|
0,2600,2601,1,0,0,0,2601,2603,5,265,0,0,2602,2591,1,0,0,0,2602,2595,
|
||||||
217,1,0,0,0,339,221,232,236,240,244,248,252,262,267,271,277,281,
|
1,0,0,0,2602,2599,1,0,0,0,2603,215,1,0,0,0,2604,2605,7,26,0,0,2605,
|
||||||
|
217,1,0,0,0,340,221,232,236,240,244,248,252,262,267,271,277,281,
|
||||||
302,306,310,314,322,326,329,336,345,351,355,361,368,377,386,400,
|
302,306,310,314,322,326,329,336,345,351,355,361,368,377,386,400,
|
||||||
409,415,422,432,439,447,455,484,487,490,494,500,505,512,517,521,
|
409,415,422,432,439,447,455,484,487,490,494,500,505,512,517,521,
|
||||||
529,535,539,553,561,580,605,608,618,622,635,641,646,650,656,665,
|
529,535,539,553,561,580,605,608,618,622,635,641,646,650,656,665,
|
||||||
@ -12864,7 +12893,7 @@ export class TrinoSqlParser extends antlr.Parser {
|
|||||||
2255,2260,2269,2274,2282,2290,2293,2299,2302,2309,2317,2320,2328,
|
2255,2260,2269,2274,2282,2290,2293,2299,2302,2309,2317,2320,2328,
|
||||||
2331,2357,2368,2373,2380,2382,2395,2410,2414,2418,2422,2428,2432,
|
2331,2357,2368,2373,2380,2382,2395,2410,2414,2418,2422,2428,2432,
|
||||||
2436,2440,2442,2452,2459,2468,2475,2482,2489,2496,2517,2530,2541,
|
2436,2440,2442,2452,2459,2468,2475,2482,2489,2496,2517,2530,2541,
|
||||||
2558,2564,2571,2578,2586,2589,2593,2597,2600
|
2551,2560,2566,2573,2580,2588,2591,2595,2599,2602
|
||||||
];
|
];
|
||||||
|
|
||||||
private static __ATN: antlr.ATN;
|
private static __ATN: antlr.ATN;
|
||||||
@ -16394,28 +16423,37 @@ export class QueryContext extends antlr.ParserRuleContext {
|
|||||||
public constructor(parent: antlr.ParserRuleContext | null, invokingState: number) {
|
public constructor(parent: antlr.ParserRuleContext | null, invokingState: number) {
|
||||||
super(parent, invokingState);
|
super(parent, invokingState);
|
||||||
}
|
}
|
||||||
|
public override get ruleIndex(): number {
|
||||||
|
return TrinoSqlParser.RULE_query;
|
||||||
|
}
|
||||||
|
public override copyFrom(ctx: QueryContext): void {
|
||||||
|
super.copyFrom(ctx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
export class QueryStatementContext extends QueryContext {
|
||||||
|
public constructor(ctx: QueryContext) {
|
||||||
|
super(ctx.parent, ctx.invokingState);
|
||||||
|
super.copyFrom(ctx);
|
||||||
|
}
|
||||||
public queryNoWith(): QueryNoWithContext {
|
public queryNoWith(): QueryNoWithContext {
|
||||||
return this.getRuleContext(0, QueryNoWithContext)!;
|
return this.getRuleContext(0, QueryNoWithContext)!;
|
||||||
}
|
}
|
||||||
public with(): WithContext | null {
|
public with(): WithContext | null {
|
||||||
return this.getRuleContext(0, WithContext);
|
return this.getRuleContext(0, WithContext);
|
||||||
}
|
}
|
||||||
public override get ruleIndex(): number {
|
|
||||||
return TrinoSqlParser.RULE_query;
|
|
||||||
}
|
|
||||||
public override enterRule(listener: TrinoSqlListener): void {
|
public override enterRule(listener: TrinoSqlListener): void {
|
||||||
if(listener.enterQuery) {
|
if(listener.enterQueryStatement) {
|
||||||
listener.enterQuery(this);
|
listener.enterQueryStatement(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
public override exitRule(listener: TrinoSqlListener): void {
|
public override exitRule(listener: TrinoSqlListener): void {
|
||||||
if(listener.exitQuery) {
|
if(listener.exitQueryStatement) {
|
||||||
listener.exitQuery(this);
|
listener.exitQueryStatement(this);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
public override accept<Result>(visitor: TrinoSqlVisitor<Result>): Result | null {
|
public override accept<Result>(visitor: TrinoSqlVisitor<Result>): Result | null {
|
||||||
if (visitor.visitQuery) {
|
if (visitor.visitQueryStatement) {
|
||||||
return visitor.visitQuery(this);
|
return visitor.visitQueryStatement(this);
|
||||||
} else {
|
} else {
|
||||||
return visitor.visitChildren(this);
|
return visitor.visitChildren(this);
|
||||||
}
|
}
|
||||||
@ -23283,8 +23321,8 @@ export class ColumnNameContext extends antlr.ParserRuleContext {
|
|||||||
public constructor(parent: antlr.ParserRuleContext | null, invokingState: number) {
|
public constructor(parent: antlr.ParserRuleContext | null, invokingState: number) {
|
||||||
super(parent, invokingState);
|
super(parent, invokingState);
|
||||||
}
|
}
|
||||||
public qualifiedName(): QualifiedNameContext {
|
public qualifiedName(): QualifiedNameContext | null {
|
||||||
return this.getRuleContext(0, QualifiedNameContext)!;
|
return this.getRuleContext(0, QualifiedNameContext);
|
||||||
}
|
}
|
||||||
public override get ruleIndex(): number {
|
public override get ruleIndex(): number {
|
||||||
return TrinoSqlParser.RULE_columnName;
|
return TrinoSqlParser.RULE_columnName;
|
||||||
|
@ -3,6 +3,9 @@
|
|||||||
import { AbstractParseTreeVisitor } from "antlr4ng";
|
import { AbstractParseTreeVisitor } from "antlr4ng";
|
||||||
|
|
||||||
|
|
||||||
|
import SQLParserBase from '../SQLParserBase';
|
||||||
|
|
||||||
|
|
||||||
import { ProgramContext } from "./TrinoSqlParser.js";
|
import { ProgramContext } from "./TrinoSqlParser.js";
|
||||||
import { StatementsContext } from "./TrinoSqlParser.js";
|
import { StatementsContext } from "./TrinoSqlParser.js";
|
||||||
import { StandaloneClauseContext } from "./TrinoSqlParser.js";
|
import { StandaloneClauseContext } from "./TrinoSqlParser.js";
|
||||||
@ -83,7 +86,7 @@ import { UpdateContext } from "./TrinoSqlParser.js";
|
|||||||
import { MergeContext } from "./TrinoSqlParser.js";
|
import { MergeContext } from "./TrinoSqlParser.js";
|
||||||
import { ShowTableCommentContext } from "./TrinoSqlParser.js";
|
import { ShowTableCommentContext } from "./TrinoSqlParser.js";
|
||||||
import { ShowColumnCommentContext } from "./TrinoSqlParser.js";
|
import { ShowColumnCommentContext } from "./TrinoSqlParser.js";
|
||||||
import { QueryContext } from "./TrinoSqlParser.js";
|
import { QueryStatementContext } from "./TrinoSqlParser.js";
|
||||||
import { WithContext } from "./TrinoSqlParser.js";
|
import { WithContext } from "./TrinoSqlParser.js";
|
||||||
import { TableElementContext } from "./TrinoSqlParser.js";
|
import { TableElementContext } from "./TrinoSqlParser.js";
|
||||||
import { ColumnDefinitionContext } from "./TrinoSqlParser.js";
|
import { ColumnDefinitionContext } from "./TrinoSqlParser.js";
|
||||||
@ -844,11 +847,12 @@ export class TrinoSqlVisitor<Result> extends AbstractParseTreeVisitor<Result> {
|
|||||||
*/
|
*/
|
||||||
visitShowColumnComment?: (ctx: ShowColumnCommentContext) => Result;
|
visitShowColumnComment?: (ctx: ShowColumnCommentContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `TrinoSqlParser.query`.
|
* Visit a parse tree produced by the `queryStatement`
|
||||||
|
* labeled alternative in `TrinoSqlParser.query`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitQuery?: (ctx: QueryContext) => Result;
|
visitQueryStatement?: (ctx: QueryStatementContext) => Result;
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `TrinoSqlParser.with`.
|
* Visit a parse tree produced by `TrinoSqlParser.with`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
|
@ -1,3 +1,6 @@
|
|||||||
|
import { WordRange } from './textAndWord';
|
||||||
|
import { StmtContextType } from './entityCollector';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The insertion position of the candidate list.
|
* The insertion position of the candidate list.
|
||||||
* Such as cursor position
|
* Such as cursor position
|
||||||
@ -12,7 +15,7 @@ export interface CaretPosition {
|
|||||||
/**
|
/**
|
||||||
* Syntax context type at caret position
|
* Syntax context type at caret position
|
||||||
*/
|
*/
|
||||||
export enum SyntaxContextType {
|
export enum EntityContextType {
|
||||||
/** catalog name */
|
/** catalog name */
|
||||||
CATALOG = 'catalog',
|
CATALOG = 'catalog',
|
||||||
/** catalog name that will be created */
|
/** catalog name that will be created */
|
||||||
@ -43,26 +46,11 @@ export enum SyntaxContextType {
|
|||||||
COLUMN_CREATE = 'columnCreate',
|
COLUMN_CREATE = 'columnCreate',
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface WordRange {
|
|
||||||
/** content of word */
|
|
||||||
readonly text: string;
|
|
||||||
/** start at 0 */
|
|
||||||
readonly startIndex: number;
|
|
||||||
/** end at ..n-1 */
|
|
||||||
readonly endIndex: number;
|
|
||||||
/** start at 1 */
|
|
||||||
readonly line: number;
|
|
||||||
/** start at 1 */
|
|
||||||
readonly startColumn: number;
|
|
||||||
/** end at ..n + 1 */
|
|
||||||
readonly stopColumn: number;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Suggested information analyzed from the input
|
* Suggested information analyzed from the input
|
||||||
*/
|
*/
|
||||||
export interface SyntaxSuggestion<T = WordRange> {
|
export interface SyntaxSuggestion<T = WordRange> {
|
||||||
readonly syntaxContextType: SyntaxContextType;
|
readonly syntaxContextType: EntityContextType | StmtContextType;
|
||||||
readonly wordRanges: T[];
|
readonly wordRanges: T[];
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -79,19 +67,3 @@ export interface Suggestions<T = WordRange> {
|
|||||||
*/
|
*/
|
||||||
readonly keywords: string[];
|
readonly keywords: string[];
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface TextSlice {
|
|
||||||
/** start at 0 */
|
|
||||||
readonly startIndex: number;
|
|
||||||
/** end at ..n-1 */
|
|
||||||
readonly endIndex: number;
|
|
||||||
/** start at 1 */
|
|
||||||
readonly startLine: number;
|
|
||||||
/** end at ..n */
|
|
||||||
readonly endLine: number;
|
|
||||||
/** start at 1 */
|
|
||||||
readonly startColumn: number;
|
|
||||||
/** end at ..n + 1 */
|
|
||||||
readonly endColumn: number;
|
|
||||||
readonly text: string;
|
|
||||||
}
|
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
import {
|
import {
|
||||||
Parser,
|
|
||||||
Lexer,
|
Lexer,
|
||||||
Token,
|
Token,
|
||||||
CharStreams,
|
CharStreams,
|
||||||
@ -11,25 +10,15 @@ import {
|
|||||||
PredictionMode,
|
PredictionMode,
|
||||||
} from 'antlr4ng';
|
} from 'antlr4ng';
|
||||||
import { CandidatesCollection, CodeCompletionCore } from 'antlr4-c3';
|
import { CandidatesCollection, CodeCompletionCore } from 'antlr4-c3';
|
||||||
import { findCaretTokenIndex } from './utils/findCaretTokenIndex';
|
import SQLParserBase from '../../lib/SQLParserBase';
|
||||||
import {
|
import { findCaretTokenIndex } from './findCaretTokenIndex';
|
||||||
CaretPosition,
|
import { ctxToText, tokenToWord, WordRange, TextSlice } from './textAndWord';
|
||||||
Suggestions,
|
import { CaretPosition, Suggestions, SyntaxSuggestion } from './basic-parser-types';
|
||||||
SyntaxSuggestion,
|
|
||||||
WordRange,
|
|
||||||
TextSlice,
|
|
||||||
} from './basic-parser-types';
|
|
||||||
import ParseErrorListener, { ParseError, ErrorListener } from './parseErrorListener';
|
import ParseErrorListener, { ParseError, ErrorListener } from './parseErrorListener';
|
||||||
import { ErrorStrategy } from './errorStrategy';
|
import { ErrorStrategy } from './errorStrategy';
|
||||||
|
import type SplitListener from './splitListener';
|
||||||
interface IParser<IParserRuleContext extends ParserRuleContext> extends Parser {
|
import type EntityCollector from './entityCollector';
|
||||||
// Customized in our parser
|
import { EntityContext } from './entityCollector';
|
||||||
program(): IParserRuleContext;
|
|
||||||
}
|
|
||||||
|
|
||||||
interface SplitListener extends ParseTreeListener {
|
|
||||||
statementsContext: ParserRuleContext[];
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Custom Parser class, subclass needs extends it.
|
* Custom Parser class, subclass needs extends it.
|
||||||
@ -37,7 +26,7 @@ interface SplitListener extends ParseTreeListener {
|
|||||||
export default abstract class BasicParser<
|
export default abstract class BasicParser<
|
||||||
L extends Lexer = Lexer,
|
L extends Lexer = Lexer,
|
||||||
PRC extends ParserRuleContext = ParserRuleContext,
|
PRC extends ParserRuleContext = ParserRuleContext,
|
||||||
P extends IParser<PRC> = IParser<PRC>,
|
P extends SQLParserBase<PRC> = SQLParserBase<PRC>,
|
||||||
> {
|
> {
|
||||||
/** members for cache start */
|
/** members for cache start */
|
||||||
protected _charStreams: CharStream;
|
protected _charStreams: CharStream;
|
||||||
@ -85,9 +74,17 @@ export default abstract class BasicParser<
|
|||||||
): Suggestions<Token>;
|
): Suggestions<Token>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get splitListener instance.
|
* Get a new splitListener instance.
|
||||||
*/
|
*/
|
||||||
protected abstract get splitListener(): SplitListener;
|
protected abstract get splitListener(): SplitListener<ParserRuleContext>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get a new entityCollector instance.
|
||||||
|
*/
|
||||||
|
protected abstract createEntityCollector(
|
||||||
|
input: string,
|
||||||
|
caretTokenIndex?: number
|
||||||
|
): EntityCollector;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create an antlr4 lexer from input.
|
* Create an antlr4 lexer from input.
|
||||||
@ -218,7 +215,7 @@ export default abstract class BasicParser<
|
|||||||
*/
|
*/
|
||||||
public listen<PTL extends ParseTreeListener = ParseTreeListener>(
|
public listen<PTL extends ParseTreeListener = ParseTreeListener>(
|
||||||
listener: PTL,
|
listener: PTL,
|
||||||
parseTree: PRC
|
parseTree: ParserRuleContext
|
||||||
) {
|
) {
|
||||||
ParseTreeWalker.DEFAULT.walk(listener, parseTree);
|
ParseTreeWalker.DEFAULT.walk(listener, parseTree);
|
||||||
}
|
}
|
||||||
@ -234,22 +231,13 @@ export default abstract class BasicParser<
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
const splitListener = this.splitListener;
|
const splitListener = this.splitListener;
|
||||||
// TODO: add splitListener to all sqlParser implements add remove following if
|
// TODO: add splitListener to all sqlParser implements and remove following if
|
||||||
if (!splitListener) return null;
|
if (!splitListener) return null;
|
||||||
|
|
||||||
this.listen(splitListener, this._parseTree);
|
this.listen(splitListener, this._parseTree);
|
||||||
|
|
||||||
const res = splitListener.statementsContext.map((context) => {
|
const res = splitListener.statementsContext.map((context) => {
|
||||||
const { start, stop } = context;
|
return ctxToText(context, this._parsedInput);
|
||||||
return {
|
|
||||||
startIndex: start.start,
|
|
||||||
endIndex: stop.stop,
|
|
||||||
startLine: start.line,
|
|
||||||
endLine: stop.line,
|
|
||||||
startColumn: start.column + 1,
|
|
||||||
endColumn: stop.column + 1 + stop.text.length,
|
|
||||||
text: this._parsedInput.slice(start.start, stop.stop + 1),
|
|
||||||
};
|
|
||||||
});
|
});
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
@ -266,7 +254,7 @@ export default abstract class BasicParser<
|
|||||||
caretPosition: CaretPosition
|
caretPosition: CaretPosition
|
||||||
): Suggestions | null {
|
): Suggestions | null {
|
||||||
const splitListener = this.splitListener;
|
const splitListener = this.splitListener;
|
||||||
// TODO: add splitListener to all sqlParser implements add remove following if
|
// TODO: add splitListener to all sqlParser implements and remove following if
|
||||||
if (!splitListener) return null;
|
if (!splitListener) return null;
|
||||||
|
|
||||||
this.parseWithCache(input);
|
this.parseWithCache(input);
|
||||||
@ -365,14 +353,7 @@ export default abstract class BasicParser<
|
|||||||
const syntaxSuggestions: SyntaxSuggestion<WordRange>[] = originalSuggestions.syntax.map(
|
const syntaxSuggestions: SyntaxSuggestion<WordRange>[] = originalSuggestions.syntax.map(
|
||||||
(syntaxCtx) => {
|
(syntaxCtx) => {
|
||||||
const wordRanges: WordRange[] = syntaxCtx.wordRanges.map((token) => {
|
const wordRanges: WordRange[] = syntaxCtx.wordRanges.map((token) => {
|
||||||
return {
|
return tokenToWord(token, this._parsedInput);
|
||||||
text: this._parsedInput.slice(token.start, token.stop + 1),
|
|
||||||
startIndex: token.start,
|
|
||||||
endIndex: token.stop,
|
|
||||||
line: token.line,
|
|
||||||
startColumn: token.column + 1,
|
|
||||||
stopColumn: token.column + 1 + token.text.length,
|
|
||||||
};
|
|
||||||
});
|
});
|
||||||
return {
|
return {
|
||||||
syntaxContextType: syntaxCtx.syntaxContextType,
|
syntaxContextType: syntaxCtx.syntaxContextType,
|
||||||
@ -385,4 +366,34 @@ export default abstract class BasicParser<
|
|||||||
keywords: originalSuggestions.keywords,
|
keywords: originalSuggestions.keywords,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public getAllEntities(input: string, caretPosition?: CaretPosition): EntityContext[] | null {
|
||||||
|
const allTokens = this.getAllTokens(input);
|
||||||
|
const caretTokenIndex = findCaretTokenIndex(caretPosition, allTokens);
|
||||||
|
|
||||||
|
const collectListener = this.createEntityCollector(input, caretTokenIndex);
|
||||||
|
// TODO: add entityCollector to all sqlParser implements and remove following if
|
||||||
|
if (!collectListener) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
// const parser = this.createParserWithCache(input);
|
||||||
|
|
||||||
|
// parser.entityCollecting = true;
|
||||||
|
// if(caretPosition) {
|
||||||
|
// const allTokens = this.getAllTokens(input);
|
||||||
|
// const tokenIndex = findCaretTokenIndex(caretPosition, allTokens);
|
||||||
|
// parser.caretTokenIndex = tokenIndex;
|
||||||
|
// }
|
||||||
|
|
||||||
|
// const parseTree = parser.program();
|
||||||
|
|
||||||
|
const parseTree = this.parseWithCache(input);
|
||||||
|
|
||||||
|
this.listen(collectListener, parseTree);
|
||||||
|
|
||||||
|
// parser.caretTokenIndex = -1;
|
||||||
|
// parser.entityCollecting = false;
|
||||||
|
|
||||||
|
return collectListener.getEntities();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
279
src/parser/common/entityCollector.ts
Normal file
279
src/parser/common/entityCollector.ts
Normal file
@ -0,0 +1,279 @@
|
|||||||
|
import { ParserRuleContext } from 'antlr4ng';
|
||||||
|
import { EntityContextType } from './basic-parser-types';
|
||||||
|
import { WordPosition, TextPosition } from './textAndWord';
|
||||||
|
import { ctxToText, ctxToWord } from './textAndWord';
|
||||||
|
import SimpleStack from './simpleStack';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* TODO: more stmt type should be supported.
|
||||||
|
*/
|
||||||
|
export enum StmtContextType {
|
||||||
|
/** A self-contained and complete statement */
|
||||||
|
COMMON_STMT = 'commonStmt',
|
||||||
|
CREATE_CATALOG_STMT = 'createCatalogStmt',
|
||||||
|
CREATE_DATABASE_STMT = 'crateDatabaseStmt',
|
||||||
|
CREATE_TABLE_STMT = 'createTableStmt',
|
||||||
|
CREATE_VIEW_STMT = 'createViewStmt',
|
||||||
|
SELECT_STMT = 'selectStmt',
|
||||||
|
INSERT_STMT = 'insertStmt',
|
||||||
|
CREATE_FUNCTION_STMT = 'createFunctionStmt',
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface StmtContext {
|
||||||
|
readonly stmtContextType: StmtContextType;
|
||||||
|
readonly position: TextPosition;
|
||||||
|
readonly rootStmt: StmtContext | null;
|
||||||
|
readonly parentStmt: StmtContext | null;
|
||||||
|
readonly isContainCaret?: boolean;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function toStmtContext(
|
||||||
|
ctx: ParserRuleContext,
|
||||||
|
type: StmtContextType,
|
||||||
|
input: string,
|
||||||
|
rootStmt: StmtContext | null,
|
||||||
|
parentStmt: StmtContext | null,
|
||||||
|
isContainCaret?: boolean
|
||||||
|
): StmtContext {
|
||||||
|
const { text: _, ...position } = ctxToText(ctx, input);
|
||||||
|
return {
|
||||||
|
stmtContextType: type,
|
||||||
|
position,
|
||||||
|
rootStmt: rootStmt ?? null,
|
||||||
|
parentStmt: parentStmt ?? null,
|
||||||
|
isContainCaret,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface BaseAliasContext {
|
||||||
|
readonly isAlias: boolean;
|
||||||
|
alias?: string | EntityContext | null;
|
||||||
|
origin?: string | EntityContext | StmtContext | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const baseAlias: BaseAliasContext = {
|
||||||
|
isAlias: false,
|
||||||
|
origin: null,
|
||||||
|
alias: null,
|
||||||
|
};
|
||||||
|
|
||||||
|
export interface EntityContext extends BaseAliasContext {
|
||||||
|
readonly entityContextType: EntityContextType;
|
||||||
|
readonly text: string;
|
||||||
|
readonly position: WordPosition;
|
||||||
|
readonly belongStmt: StmtContext;
|
||||||
|
relatedEntities: EntityContext[] | null;
|
||||||
|
columns: EntityContext[] | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function toEntityContext(
|
||||||
|
ctx: ParserRuleContext,
|
||||||
|
type: EntityContextType,
|
||||||
|
input: string,
|
||||||
|
belongStmt: StmtContext,
|
||||||
|
alias?: BaseAliasContext
|
||||||
|
): EntityContext {
|
||||||
|
const { text, ...position } = ctxToWord(ctx, input);
|
||||||
|
const finalAlias = Object.assign({}, baseAlias, alias ?? {});
|
||||||
|
return {
|
||||||
|
entityContextType: type,
|
||||||
|
text,
|
||||||
|
position,
|
||||||
|
belongStmt,
|
||||||
|
relatedEntities: null,
|
||||||
|
columns: null,
|
||||||
|
...finalAlias,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @todo: Handle alias, includes column alias, table alias, query as alias and so on.
|
||||||
|
* @todo: [may be need] Combine the entities in each clause.
|
||||||
|
*/
|
||||||
|
abstract class EntityCollector {
|
||||||
|
constructor(input: string, caretTokenIndex?: number) {
|
||||||
|
this._input = input;
|
||||||
|
this._caretTokenIndex = caretTokenIndex ?? -1;
|
||||||
|
this._entitiesSet = new Set();
|
||||||
|
this._stmtStack = new SimpleStack();
|
||||||
|
this._entityStack = new SimpleStack();
|
||||||
|
this._rootStmt = null;
|
||||||
|
}
|
||||||
|
private readonly _input: string;
|
||||||
|
private readonly _caretTokenIndex: number;
|
||||||
|
private readonly _entitiesSet: Set<EntityContext>;
|
||||||
|
/** Staging statements that have already entered. */
|
||||||
|
private readonly _stmtStack: SimpleStack<StmtContext>;
|
||||||
|
/** Staging entities inside a single statement or clause. */
|
||||||
|
private readonly _entityStack: SimpleStack<EntityContext>;
|
||||||
|
/**
|
||||||
|
* Always point to the first non-commonStmt at the bottom of the _stmtStack,
|
||||||
|
* unless there are only commonStmts in the _stmtStack.
|
||||||
|
* */
|
||||||
|
private _rootStmt: StmtContext;
|
||||||
|
|
||||||
|
visitTerminal() {}
|
||||||
|
|
||||||
|
visitErrorNode() {}
|
||||||
|
|
||||||
|
enterEveryRule() {}
|
||||||
|
|
||||||
|
exitEveryRule() {}
|
||||||
|
|
||||||
|
getEntities() {
|
||||||
|
return Array.from(this._entitiesSet);
|
||||||
|
}
|
||||||
|
|
||||||
|
enterProgram() {
|
||||||
|
this._entitiesSet.clear();
|
||||||
|
this._stmtStack.clear();
|
||||||
|
this._entityStack.clear();
|
||||||
|
this._rootStmt = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected pushStmt(ctx: ParserRuleContext, type: StmtContextType) {
|
||||||
|
let isContainCaret;
|
||||||
|
if (this._caretTokenIndex >= 0) {
|
||||||
|
isContainCaret =
|
||||||
|
ctx.start.tokenIndex <= this._caretTokenIndex &&
|
||||||
|
ctx.stop?.tokenIndex >= this._caretTokenIndex;
|
||||||
|
}
|
||||||
|
const stmtContext = toStmtContext(
|
||||||
|
ctx,
|
||||||
|
type,
|
||||||
|
this._input,
|
||||||
|
this._rootStmt,
|
||||||
|
this._stmtStack.peek(),
|
||||||
|
isContainCaret
|
||||||
|
);
|
||||||
|
if (
|
||||||
|
this._stmtStack.isEmpty() ||
|
||||||
|
this._stmtStack.peek()?.stmtContextType === StmtContextType.COMMON_STMT
|
||||||
|
) {
|
||||||
|
this._rootStmt = stmtContext;
|
||||||
|
}
|
||||||
|
this._stmtStack.push(stmtContext);
|
||||||
|
|
||||||
|
return stmtContext;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected popStmt() {
|
||||||
|
const stmtContext = this._stmtStack.pop();
|
||||||
|
if (this._rootStmt === stmtContext) {
|
||||||
|
this._rootStmt = this._stmtStack.peek();
|
||||||
|
if (!this._entityStack.isEmpty()) {
|
||||||
|
this.combineEntitiesAndAdd(stmtContext);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return stmtContext;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected pushEntity(
|
||||||
|
ctx: ParserRuleContext,
|
||||||
|
type: EntityContextType,
|
||||||
|
alias?: BaseAliasContext
|
||||||
|
) {
|
||||||
|
const entityContext = toEntityContext(
|
||||||
|
ctx,
|
||||||
|
type,
|
||||||
|
this._input,
|
||||||
|
this._stmtStack.peek(),
|
||||||
|
alias
|
||||||
|
);
|
||||||
|
if (this._stmtStack.isEmpty()) {
|
||||||
|
this._entitiesSet.add(entityContext);
|
||||||
|
} else {
|
||||||
|
// If is inside a statement
|
||||||
|
this._entityStack.push(entityContext);
|
||||||
|
}
|
||||||
|
return entityContext;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Combine entities that inside a single statement.
|
||||||
|
* e.g. combine tableName and column if they are inside a same createTableStatement.
|
||||||
|
* Then add combined entities into result.
|
||||||
|
*/
|
||||||
|
private combineEntitiesAndAdd(stmtContext: StmtContext) {
|
||||||
|
const entitiesInsideStmt: EntityContext[] = [];
|
||||||
|
while (
|
||||||
|
!this._entityStack.isEmpty() &&
|
||||||
|
(this._entityStack.peek().belongStmt === stmtContext ||
|
||||||
|
this._entityStack.peek().belongStmt.rootStmt === stmtContext)
|
||||||
|
) {
|
||||||
|
entitiesInsideStmt.unshift(this._entityStack.pop());
|
||||||
|
}
|
||||||
|
|
||||||
|
let tmpResults = entitiesInsideStmt;
|
||||||
|
|
||||||
|
tmpResults = this.combineRootStmtEntities(stmtContext, entitiesInsideStmt);
|
||||||
|
|
||||||
|
while (tmpResults.length) {
|
||||||
|
this._entitiesSet.add(tmpResults.shift());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Combined all entities under a rootStmt.
|
||||||
|
*/
|
||||||
|
protected combineRootStmtEntities(
|
||||||
|
stmtContext: StmtContext,
|
||||||
|
entitiesInsideStmt: EntityContext[]
|
||||||
|
): EntityContext[] {
|
||||||
|
if (
|
||||||
|
stmtContext.stmtContextType === StmtContextType.CREATE_VIEW_STMT ||
|
||||||
|
stmtContext.stmtContextType === StmtContextType.CREATE_TABLE_STMT
|
||||||
|
) {
|
||||||
|
return this.combineCreateTableOrViewStmtEntities(stmtContext, entitiesInsideStmt);
|
||||||
|
}
|
||||||
|
return entitiesInsideStmt;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected combineCreateTableOrViewStmtEntities(
|
||||||
|
stmtContext: StmtContext,
|
||||||
|
entitiesInsideStmt: EntityContext[]
|
||||||
|
): EntityContext[] {
|
||||||
|
const columns: EntityContext[] = [];
|
||||||
|
const relatedEntities: EntityContext[] = [];
|
||||||
|
let mainEntity: EntityContext = null;
|
||||||
|
const finalEntities = entitiesInsideStmt.reduce((result, entity) => {
|
||||||
|
if (entity.belongStmt !== stmtContext) {
|
||||||
|
if (
|
||||||
|
entity.entityContextType !== EntityContextType.COLUMN &&
|
||||||
|
entity.entityContextType !== EntityContextType.COLUMN_CREATE
|
||||||
|
) {
|
||||||
|
relatedEntities.push(entity);
|
||||||
|
result.push(entity);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (entity.entityContextType === EntityContextType.COLUMN_CREATE) {
|
||||||
|
columns.push(entity);
|
||||||
|
} else if (
|
||||||
|
entity.entityContextType === EntityContextType.TABLE_CREATE ||
|
||||||
|
entity.entityContextType === EntityContextType.VIEW_CREATE
|
||||||
|
) {
|
||||||
|
mainEntity = entity;
|
||||||
|
result.push(entity);
|
||||||
|
return result;
|
||||||
|
} else if (entity.entityContextType !== EntityContextType.COLUMN) {
|
||||||
|
relatedEntities.push(entity);
|
||||||
|
result.push(entity);
|
||||||
|
}
|
||||||
|
return result;
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
if (columns.length) {
|
||||||
|
mainEntity.columns = columns;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (relatedEntities.length) {
|
||||||
|
mainEntity.relatedEntities = relatedEntities;
|
||||||
|
}
|
||||||
|
|
||||||
|
return finalEntities;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default EntityCollector;
|
@ -1,5 +1,5 @@
|
|||||||
import { Token } from 'antlr4ng';
|
import { Token } from 'antlr4ng';
|
||||||
import { CaretPosition } from '../basic-parser-types';
|
import { CaretPosition } from './basic-parser-types';
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* find token index via caret position (cursor position)
|
* find token index via caret position (cursor position)
|
32
src/parser/common/simpleStack.ts
Normal file
32
src/parser/common/simpleStack.ts
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
class SimpleStack<T> {
|
||||||
|
constructor() {
|
||||||
|
this.stack = [];
|
||||||
|
}
|
||||||
|
private stack: T[];
|
||||||
|
|
||||||
|
push(item: T) {
|
||||||
|
this.stack.push(item);
|
||||||
|
}
|
||||||
|
|
||||||
|
pop(): T {
|
||||||
|
return this.stack.pop();
|
||||||
|
}
|
||||||
|
|
||||||
|
peek(): T {
|
||||||
|
return this.stack[this.stack.length - 1];
|
||||||
|
}
|
||||||
|
|
||||||
|
clear() {
|
||||||
|
this.stack = [];
|
||||||
|
}
|
||||||
|
|
||||||
|
size(): number {
|
||||||
|
return this.stack.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
isEmpty(): boolean {
|
||||||
|
return this.stack.length === 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default SimpleStack;
|
17
src/parser/common/splitListener.ts
Normal file
17
src/parser/common/splitListener.ts
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
abstract class SplitListener<T> {
|
||||||
|
protected _statementsContext: T[] = [];
|
||||||
|
|
||||||
|
visitTerminal() {}
|
||||||
|
|
||||||
|
visitErrorNode() {}
|
||||||
|
|
||||||
|
enterEveryRule() {}
|
||||||
|
|
||||||
|
exitEveryRule() {}
|
||||||
|
|
||||||
|
get statementsContext() {
|
||||||
|
return this._statementsContext;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export default SplitListener;
|
89
src/parser/common/textAndWord.ts
Normal file
89
src/parser/common/textAndWord.ts
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
import type { ParserRuleContext, Token } from 'antlr4ng';
|
||||||
|
|
||||||
|
export interface WordPosition {
|
||||||
|
/** start at 0 */
|
||||||
|
readonly startIndex: number;
|
||||||
|
/** end at ..n-1 */
|
||||||
|
readonly endIndex: number;
|
||||||
|
/** start at 1 */
|
||||||
|
readonly line: number;
|
||||||
|
/** start at 1 */
|
||||||
|
readonly startColumn: number;
|
||||||
|
/** end at ..n + 1 */
|
||||||
|
readonly endColumn: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface WordRange extends WordPosition {
|
||||||
|
/** content of word */
|
||||||
|
readonly text: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TextPosition {
|
||||||
|
/** start at 0 */
|
||||||
|
readonly startIndex: number;
|
||||||
|
/** end at ..n-1 */
|
||||||
|
readonly endIndex: number;
|
||||||
|
/** start at 1 */
|
||||||
|
readonly startLine: number;
|
||||||
|
/** end at ..n */
|
||||||
|
readonly endLine: number;
|
||||||
|
/** start at 1 */
|
||||||
|
readonly startColumn: number;
|
||||||
|
/** end at ..n + 1 */
|
||||||
|
readonly endColumn: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export interface TextSlice extends TextPosition {
|
||||||
|
readonly text: string;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert Token to Word
|
||||||
|
*/
|
||||||
|
export function tokenToWord(token: Token, input: string): WordPosition & { text: string } {
|
||||||
|
const startIndex = token.start;
|
||||||
|
const endIndex = token.stop;
|
||||||
|
return {
|
||||||
|
text: token.text,
|
||||||
|
line: token.line,
|
||||||
|
startIndex,
|
||||||
|
endIndex,
|
||||||
|
startColumn: token.column + 1,
|
||||||
|
endColumn: token.column + 1 + token.text.length,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert ParserRuleContext to Word
|
||||||
|
*/
|
||||||
|
export function ctxToWord(ctx: ParserRuleContext, input: string): WordPosition & { text: string } {
|
||||||
|
const startIndex = ctx.start.start;
|
||||||
|
const endIndex = ctx.stop.stop;
|
||||||
|
const text = input.slice(startIndex, endIndex + 1);
|
||||||
|
return {
|
||||||
|
text,
|
||||||
|
line: ctx.start.line,
|
||||||
|
startIndex,
|
||||||
|
endIndex,
|
||||||
|
startColumn: ctx.start.column + 1,
|
||||||
|
endColumn: ctx.stop.column + 1 + ctx.stop.text.length,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Convert ParserRuleContext to Text
|
||||||
|
*/
|
||||||
|
export function ctxToText(ctx: ParserRuleContext, input: string): TextPosition & { text: string } {
|
||||||
|
const startIndex = ctx.start.start;
|
||||||
|
const endIndex = ctx.stop.stop;
|
||||||
|
const text = input.slice(startIndex, endIndex + 1);
|
||||||
|
return {
|
||||||
|
text,
|
||||||
|
startLine: ctx.start.line,
|
||||||
|
endLine: ctx.stop.line,
|
||||||
|
startIndex,
|
||||||
|
endIndex,
|
||||||
|
startColumn: ctx.start.column + 1,
|
||||||
|
endColumn: ctx.stop.column + 1 + ctx.stop.text.length,
|
||||||
|
};
|
||||||
|
}
|
134
src/parser/flinksql/flinkEntityCollector.ts
Normal file
134
src/parser/flinksql/flinkEntityCollector.ts
Normal file
@ -0,0 +1,134 @@
|
|||||||
|
import { EntityContextType } from '../../parser/common/basic-parser-types';
|
||||||
|
import {
|
||||||
|
CatalogPathContext,
|
||||||
|
CatalogPathCreateContext,
|
||||||
|
ColumnNameCreateContext,
|
||||||
|
CreateCatalogContext,
|
||||||
|
CreateDatabaseContext,
|
||||||
|
CreateFunctionContext,
|
||||||
|
CreateTableContext,
|
||||||
|
CreateViewContext,
|
||||||
|
DatabasePathContext,
|
||||||
|
DatabasePathCreateContext,
|
||||||
|
FunctionNameCreateContext,
|
||||||
|
InsertStatementContext,
|
||||||
|
QueryStatementContext,
|
||||||
|
SqlStatementContext,
|
||||||
|
TablePathContext,
|
||||||
|
TablePathCreateContext,
|
||||||
|
ViewPathContext,
|
||||||
|
ViewPathCreateContext,
|
||||||
|
} from '../../lib/flinksql/FlinkSqlParser';
|
||||||
|
import { FlinkSqlParserListener } from '../../lib/flinksql/FlinkSqlParserListener';
|
||||||
|
import EntityCollector, { StmtContextType } from '../common/entityCollector';
|
||||||
|
|
||||||
|
export default class FlinkEntityCollector
|
||||||
|
extends EntityCollector
|
||||||
|
implements FlinkSqlParserListener
|
||||||
|
{
|
||||||
|
/** ====== Entity Begin */
|
||||||
|
exitCatalogPathCreate(ctx: CatalogPathCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.CATALOG_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCatalogPath(ctx: CatalogPathContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.CATALOG);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitDatabasePathCreate(ctx: DatabasePathCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.DATABASE_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitDatabasePath(ctx: DatabasePathContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.DATABASE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitTablePath(ctx: TablePathContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.TABLE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitTablePathCreate(ctx: TablePathCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.TABLE_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitViewPath(ctx: ViewPathContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.VIEW);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitViewPathCreate(ctx: ViewPathCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.VIEW_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitColumnNameCreate(ctx: ColumnNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.COLUMN_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitFunctionNameCreate(ctx: FunctionNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.FUNCTION_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** ===== Statement begin */
|
||||||
|
enterSqlStatement(ctx: SqlStatementContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.COMMON_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitSqlStatement(ctx: SqlStatementContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateCatalog(ctx: CreateCatalogContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_CATALOG_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateCatalog(ctx: CreateCatalogContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateDatabase(ctx: CreateDatabaseContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_DATABASE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateDatabase(ctx: CreateDatabaseContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateTable(ctx: CreateTableContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_TABLE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateTable(ctx: CreateTableContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateView(ctx: CreateViewContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateView(ctx: CreateViewContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterQueryStatement(ctx: QueryStatementContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.SELECT_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitQueryStatement(ctx: QueryStatementContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateFunction(ctx: CreateFunctionContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_FUNCTION_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateFunction(ctx: CreateFunctionContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterInsertStatement(ctx: InsertStatementContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.INSERT_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitInsertStatement(ctx: InsertStatementContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
}
|
12
src/parser/flinksql/flinkSplitListener.ts
Normal file
12
src/parser/flinksql/flinkSplitListener.ts
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import { SingleStatementContext } from '../../lib/flinksql/FlinkSqlParser';
|
||||||
|
import { FlinkSqlParserListener } from '../../lib/flinksql/FlinkSqlParserListener';
|
||||||
|
import SplitListener from '../common/splitListener';
|
||||||
|
|
||||||
|
export class FlinkSqlSplitListener
|
||||||
|
extends SplitListener<SingleStatementContext>
|
||||||
|
implements FlinkSqlParserListener
|
||||||
|
{
|
||||||
|
exitSingleStatement = (ctx: SingleStatementContext) => {
|
||||||
|
this._statementsContext.push(ctx);
|
||||||
|
};
|
||||||
|
}
|
@ -1,14 +1,14 @@
|
|||||||
import { Token } from 'antlr4ng';
|
import { Token } from 'antlr4ng';
|
||||||
import { CandidatesCollection } from 'antlr4-c3';
|
import { CandidatesCollection } from 'antlr4-c3';
|
||||||
import { FlinkSqlLexer } from '../lib/flinksql/FlinkSqlLexer';
|
import { FlinkSqlLexer } from '../../lib/flinksql/FlinkSqlLexer';
|
||||||
import {
|
import { FlinkSqlParser, ProgramContext } from '../../lib/flinksql/FlinkSqlParser';
|
||||||
FlinkSqlParser,
|
import { EntityContextType, Suggestions, SyntaxSuggestion } from '../common/basic-parser-types';
|
||||||
ProgramContext,
|
import BasicParser from '../common/basicParser';
|
||||||
SingleStatementContext,
|
import { StmtContextType } from '../common/entityCollector';
|
||||||
} from '../lib/flinksql/FlinkSqlParser';
|
import { FlinkSqlSplitListener } from './flinkSplitListener';
|
||||||
import { FlinkSqlParserListener } from '../lib/flinksql/FlinkSqlParserListener';
|
import FlinkEntityCollector from './flinkEntityCollector';
|
||||||
import { SyntaxContextType, Suggestions, SyntaxSuggestion } from './common/basic-parser-types';
|
|
||||||
import BasicParser from './common/basicParser';
|
export { FlinkSqlSplitListener, FlinkEntityCollector };
|
||||||
|
|
||||||
export default class FlinkSQL extends BasicParser<FlinkSqlLexer, ProgramContext, FlinkSqlParser> {
|
export default class FlinkSQL extends BasicParser<FlinkSqlLexer, ProgramContext, FlinkSqlParser> {
|
||||||
protected createLexerFromCharStream(charStreams) {
|
protected createLexerFromCharStream(charStreams) {
|
||||||
@ -39,6 +39,10 @@ export default class FlinkSQL extends BasicParser<FlinkSqlLexer, ProgramContext,
|
|||||||
return new FlinkSqlSplitListener();
|
return new FlinkSqlSplitListener();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected createEntityCollector(input: string, caretTokenIndex?: number) {
|
||||||
|
return new FlinkEntityCollector(input, caretTokenIndex);
|
||||||
|
}
|
||||||
|
|
||||||
protected processCandidates(
|
protected processCandidates(
|
||||||
candidates: CandidatesCollection,
|
candidates: CandidatesCollection,
|
||||||
allTokens: Token[],
|
allTokens: Token[],
|
||||||
@ -56,50 +60,50 @@ export default class FlinkSQL extends BasicParser<FlinkSqlLexer, ProgramContext,
|
|||||||
caretTokenIndex + tokenIndexOffset + 1
|
caretTokenIndex + tokenIndexOffset + 1
|
||||||
);
|
);
|
||||||
|
|
||||||
let syntaxContextType: SyntaxContextType;
|
let syntaxContextType: EntityContextType | StmtContextType;
|
||||||
switch (ruleType) {
|
switch (ruleType) {
|
||||||
case FlinkSqlParser.RULE_catalogPath: {
|
case FlinkSqlParser.RULE_catalogPath: {
|
||||||
syntaxContextType = SyntaxContextType.CATALOG;
|
syntaxContextType = EntityContextType.CATALOG;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case FlinkSqlParser.RULE_databasePath: {
|
case FlinkSqlParser.RULE_databasePath: {
|
||||||
syntaxContextType = SyntaxContextType.DATABASE;
|
syntaxContextType = EntityContextType.DATABASE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case FlinkSqlParser.RULE_databasePathCreate: {
|
case FlinkSqlParser.RULE_databasePathCreate: {
|
||||||
syntaxContextType = SyntaxContextType.DATABASE_CREATE;
|
syntaxContextType = EntityContextType.DATABASE_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case FlinkSqlParser.RULE_tablePath: {
|
case FlinkSqlParser.RULE_tablePath: {
|
||||||
syntaxContextType = SyntaxContextType.TABLE;
|
syntaxContextType = EntityContextType.TABLE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case FlinkSqlParser.RULE_tablePathCreate: {
|
case FlinkSqlParser.RULE_tablePathCreate: {
|
||||||
syntaxContextType = SyntaxContextType.TABLE_CREATE;
|
syntaxContextType = EntityContextType.TABLE_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case FlinkSqlParser.RULE_viewPath: {
|
case FlinkSqlParser.RULE_viewPath: {
|
||||||
syntaxContextType = SyntaxContextType.VIEW;
|
syntaxContextType = EntityContextType.VIEW;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case FlinkSqlParser.RULE_viewPathCreate: {
|
case FlinkSqlParser.RULE_viewPathCreate: {
|
||||||
syntaxContextType = SyntaxContextType.VIEW_CREATE;
|
syntaxContextType = EntityContextType.VIEW_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case FlinkSqlParser.RULE_functionName: {
|
case FlinkSqlParser.RULE_functionName: {
|
||||||
syntaxContextType = SyntaxContextType.FUNCTION;
|
syntaxContextType = EntityContextType.FUNCTION;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case FlinkSqlParser.RULE_functionNameCreate: {
|
case FlinkSqlParser.RULE_functionNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.FUNCTION_CREATE;
|
syntaxContextType = EntityContextType.FUNCTION_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case FlinkSqlParser.RULE_columnName: {
|
case FlinkSqlParser.RULE_columnName: {
|
||||||
syntaxContextType = SyntaxContextType.COLUMN;
|
syntaxContextType = EntityContextType.COLUMN;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case FlinkSqlParser.RULE_columnNameCreate: {
|
case FlinkSqlParser.RULE_columnNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.COLUMN_CREATE;
|
syntaxContextType = EntityContextType.COLUMN_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
@ -131,20 +135,3 @@ export default class FlinkSQL extends BasicParser<FlinkSqlLexer, ProgramContext,
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class FlinkSqlSplitListener implements FlinkSqlParserListener {
|
|
||||||
private _statementsContext: SingleStatementContext[] = [];
|
|
||||||
|
|
||||||
exitSingleStatement = (ctx: SingleStatementContext) => {
|
|
||||||
this._statementsContext.push(ctx);
|
|
||||||
};
|
|
||||||
|
|
||||||
visitTerminal() {}
|
|
||||||
visitErrorNode() {}
|
|
||||||
enterEveryRule() {}
|
|
||||||
exitEveryRule() {}
|
|
||||||
|
|
||||||
get statementsContext() {
|
|
||||||
return this._statementsContext;
|
|
||||||
}
|
|
||||||
}
|
|
140
src/parser/hive/hiveEntityCollector.ts
Normal file
140
src/parser/hive/hiveEntityCollector.ts
Normal file
@ -0,0 +1,140 @@
|
|||||||
|
import { EntityContextType } from '../..';
|
||||||
|
import { HiveSqlParserListener } from '../../lib';
|
||||||
|
import {
|
||||||
|
ColumnNameCreateContext,
|
||||||
|
CreateDatabaseStatementContext,
|
||||||
|
CreateFunctionStatementContext,
|
||||||
|
CreateMaterializedViewStatementContext,
|
||||||
|
CreateTableStatementContext,
|
||||||
|
CreateViewStatementContext,
|
||||||
|
DbSchemaNameContext,
|
||||||
|
DbSchemaNameCreateContext,
|
||||||
|
FromInsertStmtContext,
|
||||||
|
FromSelectStmtContext,
|
||||||
|
FromStatementContext,
|
||||||
|
FunctionNameCreateContext,
|
||||||
|
InsertStmtContext,
|
||||||
|
SelectStatementContext,
|
||||||
|
StatementContext,
|
||||||
|
TableNameContext,
|
||||||
|
TableNameCreateContext,
|
||||||
|
ViewNameContext,
|
||||||
|
ViewNameCreateContext,
|
||||||
|
} from '../../lib/hive/HiveSqlParser';
|
||||||
|
import EntityCollector, { StmtContextType } from '../common/entityCollector';
|
||||||
|
|
||||||
|
export default class HiveEntityCollector extends EntityCollector implements HiveSqlParserListener {
|
||||||
|
/** ====== Entity Begin */
|
||||||
|
exitTableNameCreate = (ctx: TableNameCreateContext) => {
|
||||||
|
this.pushEntity(ctx, EntityContextType.TABLE_CREATE);
|
||||||
|
};
|
||||||
|
|
||||||
|
exitTableName = (ctx: TableNameContext) => {
|
||||||
|
this.pushEntity(ctx, EntityContextType.TABLE);
|
||||||
|
};
|
||||||
|
|
||||||
|
exitColumnNameCreate = (ctx: ColumnNameCreateContext) => {
|
||||||
|
this.pushEntity(ctx, EntityContextType.COLUMN_CREATE);
|
||||||
|
};
|
||||||
|
|
||||||
|
exitViewNameCreate = (ctx: ViewNameCreateContext) => {
|
||||||
|
this.pushEntity(ctx, EntityContextType.VIEW_CREATE);
|
||||||
|
};
|
||||||
|
|
||||||
|
exitViewName = (ctx: ViewNameContext) => {
|
||||||
|
this.pushEntity(ctx, EntityContextType.VIEW);
|
||||||
|
};
|
||||||
|
|
||||||
|
exitDbSchemaNameCreate(ctx: DbSchemaNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.DATABASE_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitDbSchemaName(ctx: DbSchemaNameContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.DATABASE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitFunctionNameCreate(ctx: FunctionNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.FUNCTION_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** ===== Statement begin */
|
||||||
|
enterStatement = (ctx: StatementContext) => {
|
||||||
|
this.pushStmt(ctx, StmtContextType.COMMON_STMT);
|
||||||
|
};
|
||||||
|
|
||||||
|
exitStatement = () => {
|
||||||
|
this.popStmt();
|
||||||
|
};
|
||||||
|
|
||||||
|
enterCreateTableStatement = (ctx: CreateTableStatementContext) => {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_TABLE_STMT);
|
||||||
|
};
|
||||||
|
|
||||||
|
exitCreateTableStatement = () => {
|
||||||
|
this.popStmt();
|
||||||
|
};
|
||||||
|
|
||||||
|
enterSelectStatement = (ctx: SelectStatementContext) => {
|
||||||
|
this.pushStmt(ctx, StmtContextType.SELECT_STMT);
|
||||||
|
};
|
||||||
|
|
||||||
|
exitSelectStatement = (ctx: SelectStatementContext) => {
|
||||||
|
this.popStmt();
|
||||||
|
};
|
||||||
|
|
||||||
|
enterFromSelectStmt = (ctx: FromSelectStmtContext) => {
|
||||||
|
this.pushStmt(ctx, StmtContextType.SELECT_STMT);
|
||||||
|
};
|
||||||
|
|
||||||
|
exitFromSelectStmt = (ctx: FromSelectStmtContext) => {
|
||||||
|
this.popStmt();
|
||||||
|
};
|
||||||
|
|
||||||
|
enterCreateViewStatement = (ctx: CreateViewStatementContext) => {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
};
|
||||||
|
|
||||||
|
exitCreateViewStatement = (ctx: CreateViewStatementContext) => {
|
||||||
|
this.popStmt();
|
||||||
|
};
|
||||||
|
|
||||||
|
enterCreateMaterializedViewStatement = (ctx: CreateMaterializedViewStatementContext) => {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
};
|
||||||
|
|
||||||
|
exitCreateMaterializedViewStatement = (ctx: CreateMaterializedViewStatementContext) => {
|
||||||
|
this.popStmt();
|
||||||
|
};
|
||||||
|
|
||||||
|
enterInsertStmt = (ctx: InsertStmtContext) => {
|
||||||
|
this.pushStmt(ctx, StmtContextType.INSERT_STMT);
|
||||||
|
};
|
||||||
|
|
||||||
|
exitInsertStmt = (ctx: InsertStmtContext) => {
|
||||||
|
this.popStmt();
|
||||||
|
};
|
||||||
|
|
||||||
|
enterFromInsertStmt = (ctx: FromInsertStmtContext) => {
|
||||||
|
this.pushStmt(ctx, StmtContextType.INSERT_STMT);
|
||||||
|
};
|
||||||
|
|
||||||
|
exitFromInsertStmt = (ctx: FromInsertStmtContext) => {
|
||||||
|
this.popStmt();
|
||||||
|
};
|
||||||
|
|
||||||
|
enterCreateDatabaseStatement(ctx: CreateDatabaseStatementContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_DATABASE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateDatabaseStatement(ctx: CreateDatabaseStatementContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterFunctionNameCreate(ctx: FunctionNameCreateContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_FUNCTION_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateFunctionStatement(ctx: CreateFunctionStatementContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
}
|
12
src/parser/hive/hiveSplitListener.ts
Normal file
12
src/parser/hive/hiveSplitListener.ts
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import { StatementContext } from '../../lib/hive/HiveSqlParser';
|
||||||
|
import { HiveSqlParserListener } from '../../lib/hive/HiveSqlParserListener';
|
||||||
|
import SplitListener from '../common/splitListener';
|
||||||
|
|
||||||
|
export class HiveSqlSplitListener
|
||||||
|
extends SplitListener<StatementContext>
|
||||||
|
implements HiveSqlParserListener
|
||||||
|
{
|
||||||
|
exitStatement = (ctx: StatementContext) => {
|
||||||
|
this._statementsContext.push(ctx);
|
||||||
|
};
|
||||||
|
}
|
@ -1,10 +1,15 @@
|
|||||||
import { Token } from 'antlr4ng';
|
import { Token } from 'antlr4ng';
|
||||||
import { CandidatesCollection } from 'antlr4-c3';
|
import { CandidatesCollection } from 'antlr4-c3';
|
||||||
import { HiveSqlLexer } from '../lib/hive/HiveSqlLexer';
|
import { HiveSqlLexer } from '../../lib/hive/HiveSqlLexer';
|
||||||
import { HiveSqlParser, ProgramContext, StatementContext } from '../lib/hive/HiveSqlParser';
|
import { HiveSqlParser, ProgramContext } from '../../lib/hive/HiveSqlParser';
|
||||||
import BasicParser from './common/basicParser';
|
import BasicParser from '../common/basicParser';
|
||||||
import { HiveSqlParserListener } from '../lib/hive/HiveSqlParserListener';
|
|
||||||
import { SyntaxContextType, Suggestions, SyntaxSuggestion } from './common/basic-parser-types';
|
import { EntityContextType, Suggestions, SyntaxSuggestion } from '../common/basic-parser-types';
|
||||||
|
import { StmtContextType } from '../common/entityCollector';
|
||||||
|
import { HiveSqlSplitListener } from './hiveSplitListener';
|
||||||
|
import HiveEntityCollector from './hiveEntityCollector';
|
||||||
|
|
||||||
|
export { HiveEntityCollector, HiveSqlSplitListener };
|
||||||
|
|
||||||
export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, HiveSqlParser> {
|
export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, HiveSqlParser> {
|
||||||
protected createLexerFromCharStream(charStreams) {
|
protected createLexerFromCharStream(charStreams) {
|
||||||
@ -34,6 +39,10 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
|
|||||||
return new HiveSqlSplitListener();
|
return new HiveSqlSplitListener();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected createEntityCollector(input: string, caretTokenIndex?: number) {
|
||||||
|
return new HiveEntityCollector(input, caretTokenIndex);
|
||||||
|
}
|
||||||
|
|
||||||
protected processCandidates(
|
protected processCandidates(
|
||||||
candidates: CandidatesCollection,
|
candidates: CandidatesCollection,
|
||||||
allTokens: Token[],
|
allTokens: Token[],
|
||||||
@ -50,47 +59,47 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
|
|||||||
caretTokenIndex + tokenIndexOffset + 1
|
caretTokenIndex + tokenIndexOffset + 1
|
||||||
);
|
);
|
||||||
|
|
||||||
let syntaxContextType: SyntaxContextType;
|
let syntaxContextType: EntityContextType | StmtContextType;
|
||||||
switch (ruleType) {
|
switch (ruleType) {
|
||||||
case HiveSqlParser.RULE_dbSchemaName: {
|
case HiveSqlParser.RULE_dbSchemaName: {
|
||||||
syntaxContextType = SyntaxContextType.DATABASE;
|
syntaxContextType = EntityContextType.DATABASE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case HiveSqlParser.RULE_dbSchemaNameCreate: {
|
case HiveSqlParser.RULE_dbSchemaNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.DATABASE_CREATE;
|
syntaxContextType = EntityContextType.DATABASE_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case HiveSqlParser.RULE_tableName: {
|
case HiveSqlParser.RULE_tableName: {
|
||||||
syntaxContextType = SyntaxContextType.TABLE;
|
syntaxContextType = EntityContextType.TABLE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case HiveSqlParser.RULE_tableNameCreate: {
|
case HiveSqlParser.RULE_tableNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.TABLE_CREATE;
|
syntaxContextType = EntityContextType.TABLE_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case HiveSqlParser.RULE_viewName: {
|
case HiveSqlParser.RULE_viewName: {
|
||||||
syntaxContextType = SyntaxContextType.VIEW;
|
syntaxContextType = EntityContextType.VIEW;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case HiveSqlParser.RULE_viewNameCreate: {
|
case HiveSqlParser.RULE_viewNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.VIEW_CREATE;
|
syntaxContextType = EntityContextType.VIEW_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case HiveSqlParser.RULE_functionNameForDDL:
|
case HiveSqlParser.RULE_functionNameForDDL:
|
||||||
case HiveSqlParser.RULE_functionNameForInvoke: {
|
case HiveSqlParser.RULE_functionNameForInvoke: {
|
||||||
syntaxContextType = SyntaxContextType.FUNCTION;
|
syntaxContextType = EntityContextType.FUNCTION;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case HiveSqlParser.RULE_functionNameCreate: {
|
case HiveSqlParser.RULE_functionNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.FUNCTION_CREATE;
|
syntaxContextType = EntityContextType.FUNCTION_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case HiveSqlParser.RULE_columnName: {
|
case HiveSqlParser.RULE_columnName: {
|
||||||
syntaxContextType = SyntaxContextType.COLUMN;
|
syntaxContextType = EntityContextType.COLUMN;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case HiveSqlParser.RULE_columnNameCreate: {
|
case HiveSqlParser.RULE_columnNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.COLUMN_CREATE;
|
syntaxContextType = EntityContextType.COLUMN_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
@ -122,20 +131,3 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class HiveSqlSplitListener implements HiveSqlParserListener {
|
|
||||||
private _statementContext: StatementContext[] = [];
|
|
||||||
|
|
||||||
exitStatement = (ctx: StatementContext) => {
|
|
||||||
this._statementContext.push(ctx);
|
|
||||||
};
|
|
||||||
|
|
||||||
visitTerminal() {}
|
|
||||||
visitErrorNode() {}
|
|
||||||
enterEveryRule() {}
|
|
||||||
exitEveryRule() {}
|
|
||||||
|
|
||||||
get statementsContext() {
|
|
||||||
return this._statementContext;
|
|
||||||
}
|
|
||||||
}
|
|
143
src/parser/impala/impalaEntityCollector.ts
Normal file
143
src/parser/impala/impalaEntityCollector.ts
Normal file
@ -0,0 +1,143 @@
|
|||||||
|
import { ImpalaSqlParserListener } from '../../lib';
|
||||||
|
import {
|
||||||
|
ColumnNamePathCreateContext,
|
||||||
|
CreateAggregateFunctionContext,
|
||||||
|
CreateFunctionContext,
|
||||||
|
CreateKuduTableAsSelectContext,
|
||||||
|
CreateSchemaContext,
|
||||||
|
CreateTableLikeContext,
|
||||||
|
CreateTableSelectContext,
|
||||||
|
CreateViewContext,
|
||||||
|
DatabaseNameCreateContext,
|
||||||
|
DatabaseNamePathContext,
|
||||||
|
FunctionNameCreateContext,
|
||||||
|
FunctionNamePathContext,
|
||||||
|
InsertStatementContext,
|
||||||
|
QueryStatementContext,
|
||||||
|
SingleStatementContext,
|
||||||
|
TableNameCreateContext,
|
||||||
|
TableNamePathContext,
|
||||||
|
ViewNameCreateContext,
|
||||||
|
ViewNamePathContext,
|
||||||
|
} from '../../lib/impala/ImpalaSqlParser';
|
||||||
|
import { EntityContextType } from '../common/basic-parser-types';
|
||||||
|
import EntityCollector, { StmtContextType } from '../common/entityCollector';
|
||||||
|
|
||||||
|
export default class ImpalaEntityCollector
|
||||||
|
extends EntityCollector
|
||||||
|
implements ImpalaSqlParserListener
|
||||||
|
{
|
||||||
|
/** ===== Entity begin */
|
||||||
|
exitTableNameCreate(ctx: TableNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.TABLE_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitTableNamePath(ctx: TableNamePathContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.TABLE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitColumnNamePathCreate(ctx: ColumnNamePathCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.COLUMN_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitViewNameCreate(ctx: ViewNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.VIEW_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitViewNamePath(ctx: ViewNamePathContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.VIEW);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitDatabaseNamePath(ctx: DatabaseNamePathContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.DATABASE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitDatabaseNameCreate(ctx: DatabaseNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.DATABASE_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitFunctionNameCreate(ctx: FunctionNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.FUNCTION_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** ===== Statement begin */
|
||||||
|
enterSingleStatement(ctx: SingleStatementContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.COMMON_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitSingleStatement(ctx: SingleStatementContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateTableLike(ctx: CreateTableLikeContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_TABLE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateTableLike(ctx: CreateTableLikeContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateTableSelect(ctx: CreateTableSelectContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_TABLE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateTableSelect(ctx: CreateTableSelectContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateKuduTableAsSelect(ctx: CreateKuduTableAsSelectContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_TABLE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateKuduTableAsSelect(ctx: CreateKuduTableAsSelectContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterQueryStatement(ctx: QueryStatementContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.SELECT_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitQueryStatement(ctx: QueryStatementContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateView(ctx: CreateViewContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateView(ctx: CreateViewContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterInsertStatement(ctx: InsertStatementContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.INSERT_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitInsertStatement(ctx: InsertStatementContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateSchema(ctx: CreateSchemaContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_DATABASE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateSchema(ctx: CreateSchemaContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateAggregateFunction(ctx: CreateAggregateFunctionContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_FUNCTION_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateAggregateFunction(ctx: CreateAggregateFunctionContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateFunction(ctx: CreateFunctionContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_FUNCTION_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateFunction(ctx: CreateFunctionContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
}
|
12
src/parser/impala/impalaSplitListener.ts
Normal file
12
src/parser/impala/impalaSplitListener.ts
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import { SingleStatementContext } from '../../lib/impala/ImpalaSqlParser';
|
||||||
|
import { ImpalaSqlParserListener } from '../../lib/impala/ImpalaSqlParserListener';
|
||||||
|
import SplitListener from '../common/splitListener';
|
||||||
|
|
||||||
|
export class ImpalaSqlSplitListener
|
||||||
|
extends SplitListener<SingleStatementContext>
|
||||||
|
implements ImpalaSqlParserListener
|
||||||
|
{
|
||||||
|
exitSingleStatement = (ctx: SingleStatementContext) => {
|
||||||
|
this._statementsContext.push(ctx);
|
||||||
|
};
|
||||||
|
}
|
@ -1,14 +1,14 @@
|
|||||||
import { Token } from 'antlr4ng';
|
import { Token } from 'antlr4ng';
|
||||||
import { CandidatesCollection } from 'antlr4-c3';
|
import { CandidatesCollection } from 'antlr4-c3';
|
||||||
import { ImpalaSqlLexer } from '../lib/impala/ImpalaSqlLexer';
|
import { ImpalaSqlLexer } from '../../lib/impala/ImpalaSqlLexer';
|
||||||
import {
|
import { ImpalaSqlParser, ProgramContext } from '../../lib/impala/ImpalaSqlParser';
|
||||||
ImpalaSqlParser,
|
import BasicParser from '../common/basicParser';
|
||||||
ProgramContext,
|
import { EntityContextType, Suggestions, SyntaxSuggestion } from '../common/basic-parser-types';
|
||||||
SingleStatementContext,
|
import { StmtContextType } from '../common/entityCollector';
|
||||||
} from '../lib/impala/ImpalaSqlParser';
|
import { ImpalaSqlSplitListener } from './impalaSplitListener';
|
||||||
import BasicParser from './common/basicParser';
|
import ImpalaEntityCollector from './impalaEntityCollector';
|
||||||
import { ImpalaSqlParserListener } from '../lib/impala/ImpalaSqlParserListener';
|
|
||||||
import { SyntaxContextType, Suggestions, SyntaxSuggestion } from './common/basic-parser-types';
|
export { ImpalaEntityCollector, ImpalaSqlSplitListener };
|
||||||
|
|
||||||
export default class ImpalaSQL extends BasicParser<
|
export default class ImpalaSQL extends BasicParser<
|
||||||
ImpalaSqlLexer,
|
ImpalaSqlLexer,
|
||||||
@ -41,6 +41,10 @@ export default class ImpalaSQL extends BasicParser<
|
|||||||
return new ImpalaSqlSplitListener();
|
return new ImpalaSqlSplitListener();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected createEntityCollector(input: string, caretTokenIndex?: number) {
|
||||||
|
return new ImpalaEntityCollector(input, caretTokenIndex);
|
||||||
|
}
|
||||||
|
|
||||||
protected processCandidates(
|
protected processCandidates(
|
||||||
candidates: CandidatesCollection,
|
candidates: CandidatesCollection,
|
||||||
allTokens: Token[],
|
allTokens: Token[],
|
||||||
@ -57,46 +61,46 @@ export default class ImpalaSQL extends BasicParser<
|
|||||||
caretTokenIndex + tokenIndexOffset + 1
|
caretTokenIndex + tokenIndexOffset + 1
|
||||||
);
|
);
|
||||||
|
|
||||||
let syntaxContextType: SyntaxContextType;
|
let syntaxContextType: EntityContextType | StmtContextType;
|
||||||
switch (ruleType) {
|
switch (ruleType) {
|
||||||
case ImpalaSqlParser.RULE_functionNameCreate: {
|
case ImpalaSqlParser.RULE_functionNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.FUNCTION_CREATE;
|
syntaxContextType = EntityContextType.FUNCTION_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case ImpalaSqlParser.RULE_tableNameCreate: {
|
case ImpalaSqlParser.RULE_tableNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.TABLE_CREATE;
|
syntaxContextType = EntityContextType.TABLE_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case ImpalaSqlParser.RULE_databaseNameCreate: {
|
case ImpalaSqlParser.RULE_databaseNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.DATABASE_CREATE;
|
syntaxContextType = EntityContextType.DATABASE_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case ImpalaSqlParser.RULE_viewNameCreate: {
|
case ImpalaSqlParser.RULE_viewNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.VIEW_CREATE;
|
syntaxContextType = EntityContextType.VIEW_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case ImpalaSqlParser.RULE_columnNamePathCreate: {
|
case ImpalaSqlParser.RULE_columnNamePathCreate: {
|
||||||
syntaxContextType = SyntaxContextType.COLUMN_CREATE;
|
syntaxContextType = EntityContextType.COLUMN_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case ImpalaSqlParser.RULE_databaseNamePath: {
|
case ImpalaSqlParser.RULE_databaseNamePath: {
|
||||||
syntaxContextType = SyntaxContextType.DATABASE;
|
syntaxContextType = EntityContextType.DATABASE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case ImpalaSqlParser.RULE_tableNamePath: {
|
case ImpalaSqlParser.RULE_tableNamePath: {
|
||||||
syntaxContextType = SyntaxContextType.TABLE;
|
syntaxContextType = EntityContextType.TABLE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case ImpalaSqlParser.RULE_viewNamePath: {
|
case ImpalaSqlParser.RULE_viewNamePath: {
|
||||||
syntaxContextType = SyntaxContextType.VIEW;
|
syntaxContextType = EntityContextType.VIEW;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case ImpalaSqlParser.RULE_functionNamePath: {
|
case ImpalaSqlParser.RULE_functionNamePath: {
|
||||||
syntaxContextType = SyntaxContextType.FUNCTION;
|
syntaxContextType = EntityContextType.FUNCTION;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case ImpalaSqlParser.RULE_columnNamePath: {
|
case ImpalaSqlParser.RULE_columnNamePath: {
|
||||||
syntaxContextType = SyntaxContextType.COLUMN;
|
syntaxContextType = EntityContextType.COLUMN;
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
break;
|
break;
|
||||||
@ -127,20 +131,3 @@ export default class ImpalaSQL extends BasicParser<
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class ImpalaSqlSplitListener implements ImpalaSqlParserListener {
|
|
||||||
private _statementContext: SingleStatementContext[] = [];
|
|
||||||
|
|
||||||
exitSingleStatement = (ctx: SingleStatementContext) => {
|
|
||||||
this._statementContext.push(ctx);
|
|
||||||
};
|
|
||||||
|
|
||||||
visitTerminal() {}
|
|
||||||
visitErrorNode() {}
|
|
||||||
enterEveryRule() {}
|
|
||||||
exitEveryRule() {}
|
|
||||||
|
|
||||||
get statementsContext() {
|
|
||||||
return this._statementContext;
|
|
||||||
}
|
|
||||||
}
|
|
@ -4,5 +4,5 @@ export { default as HiveSQL } from './hive';
|
|||||||
export { default as FlinkSQL } from './flinksql';
|
export { default as FlinkSQL } from './flinksql';
|
||||||
export { default as SparkSQL } from './spark';
|
export { default as SparkSQL } from './spark';
|
||||||
export { default as PostgresSQL } from './pgsql';
|
export { default as PostgresSQL } from './pgsql';
|
||||||
export { default as TrinoSQL } from './trinosql';
|
export { default as TrinoSQL } from './trino';
|
||||||
export { default as ImpalaSQL } from './impala';
|
export { default as ImpalaSQL } from './impala';
|
||||||
|
@ -1,10 +1,14 @@
|
|||||||
import { Token } from 'antlr4ng';
|
import { Token } from 'antlr4ng';
|
||||||
import { CandidatesCollection } from 'antlr4-c3';
|
import { CandidatesCollection } from 'antlr4-c3';
|
||||||
import { MySqlLexer } from '../lib/mysql/MySqlLexer';
|
import { MySqlLexer } from '../../lib/mysql/MySqlLexer';
|
||||||
import { MySqlParser, ProgramContext, SingleStatementContext } from '../lib/mysql/MySqlParser';
|
import { MySqlParser, ProgramContext } from '../../lib/mysql/MySqlParser';
|
||||||
import BasicParser from './common/basicParser';
|
import BasicParser from '../common/basicParser';
|
||||||
import { Suggestions, SyntaxContextType, SyntaxSuggestion } from './common/basic-parser-types';
|
import { Suggestions, EntityContextType, SyntaxSuggestion } from '../common/basic-parser-types';
|
||||||
import { MySqlParserListener } from '../lib/mysql/MySqlParserListener';
|
import { StmtContextType } from '../common/entityCollector';
|
||||||
|
import MysqlSplitListener from './mysqlSplitListener';
|
||||||
|
import MySqlEntityCollector from './mysqlEntityCollector';
|
||||||
|
|
||||||
|
export { MySqlEntityCollector, MysqlSplitListener };
|
||||||
|
|
||||||
export default class MySQL extends BasicParser<MySqlLexer, ProgramContext, MySqlParser> {
|
export default class MySQL extends BasicParser<MySqlLexer, ProgramContext, MySqlParser> {
|
||||||
protected createLexerFromCharStream(charStreams): MySqlLexer {
|
protected createLexerFromCharStream(charStreams): MySqlLexer {
|
||||||
@ -33,6 +37,10 @@ export default class MySQL extends BasicParser<MySqlLexer, ProgramContext, MySql
|
|||||||
return new MysqlSplitListener();
|
return new MysqlSplitListener();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected createEntityCollector(input: string, caretTokenIndex?: number) {
|
||||||
|
return new MySqlEntityCollector(input, caretTokenIndex);
|
||||||
|
}
|
||||||
|
|
||||||
protected processCandidates(
|
protected processCandidates(
|
||||||
candidates: CandidatesCollection,
|
candidates: CandidatesCollection,
|
||||||
allTokens: Token[],
|
allTokens: Token[],
|
||||||
@ -50,46 +58,46 @@ export default class MySQL extends BasicParser<MySqlLexer, ProgramContext, MySql
|
|||||||
caretTokenIndex + tokenIndexOffset + 1
|
caretTokenIndex + tokenIndexOffset + 1
|
||||||
);
|
);
|
||||||
|
|
||||||
let syntaxContextType: SyntaxContextType;
|
let syntaxContextType: EntityContextType | StmtContextType;
|
||||||
switch (ruleType) {
|
switch (ruleType) {
|
||||||
case MySqlParser.RULE_databaseName: {
|
case MySqlParser.RULE_databaseName: {
|
||||||
syntaxContextType = SyntaxContextType.DATABASE;
|
syntaxContextType = EntityContextType.DATABASE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case MySqlParser.RULE_databaseNameCreate: {
|
case MySqlParser.RULE_databaseNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.DATABASE_CREATE;
|
syntaxContextType = EntityContextType.DATABASE_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case MySqlParser.RULE_tableName: {
|
case MySqlParser.RULE_tableName: {
|
||||||
syntaxContextType = SyntaxContextType.TABLE;
|
syntaxContextType = EntityContextType.TABLE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case MySqlParser.RULE_tableNameCreate: {
|
case MySqlParser.RULE_tableNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.TABLE_CREATE;
|
syntaxContextType = EntityContextType.TABLE_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case MySqlParser.RULE_viewName: {
|
case MySqlParser.RULE_viewName: {
|
||||||
syntaxContextType = SyntaxContextType.VIEW;
|
syntaxContextType = EntityContextType.VIEW;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case MySqlParser.RULE_viewNameCreate: {
|
case MySqlParser.RULE_viewNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.VIEW_CREATE;
|
syntaxContextType = EntityContextType.VIEW_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case MySqlParser.RULE_functionName: {
|
case MySqlParser.RULE_functionName: {
|
||||||
syntaxContextType = SyntaxContextType.FUNCTION;
|
syntaxContextType = EntityContextType.FUNCTION;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case MySqlParser.RULE_functionNameCreate: {
|
case MySqlParser.RULE_functionNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.FUNCTION_CREATE;
|
syntaxContextType = EntityContextType.FUNCTION_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case MySqlParser.RULE_columnName: {
|
case MySqlParser.RULE_columnName: {
|
||||||
syntaxContextType = SyntaxContextType.COLUMN;
|
syntaxContextType = EntityContextType.COLUMN;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case MySqlParser.RULE_columnNameCreate: {
|
case MySqlParser.RULE_columnNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.COLUMN_CREATE;
|
syntaxContextType = EntityContextType.COLUMN_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
@ -122,20 +130,3 @@ export default class MySQL extends BasicParser<MySqlLexer, ProgramContext, MySql
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class MysqlSplitListener implements MySqlParserListener {
|
|
||||||
private _statementsContext: SingleStatementContext[] = [];
|
|
||||||
|
|
||||||
exitSingleStatement = (ctx: SingleStatementContext) => {
|
|
||||||
this._statementsContext.push(ctx);
|
|
||||||
};
|
|
||||||
|
|
||||||
visitTerminal() {}
|
|
||||||
visitErrorNode() {}
|
|
||||||
enterEveryRule() {}
|
|
||||||
exitEveryRule() {}
|
|
||||||
|
|
||||||
get statementsContext() {
|
|
||||||
return this._statementsContext;
|
|
||||||
}
|
|
||||||
}
|
|
147
src/parser/mysql/mysqlEntityCollector.ts
Normal file
147
src/parser/mysql/mysqlEntityCollector.ts
Normal file
@ -0,0 +1,147 @@
|
|||||||
|
import type {
|
||||||
|
ColumnCreateTableContext,
|
||||||
|
ColumnNameCreateContext,
|
||||||
|
CopyCreateTableContext,
|
||||||
|
CreateDatabaseContext,
|
||||||
|
CreateFunctionContext,
|
||||||
|
CreateViewContext,
|
||||||
|
DatabaseNameContext,
|
||||||
|
DatabaseNameCreateContext,
|
||||||
|
FunctionNameCreateContext,
|
||||||
|
InsertStatementContext,
|
||||||
|
QueryCreateTableContext,
|
||||||
|
SelectExpressionContext,
|
||||||
|
SelectStatementContext,
|
||||||
|
SingleStatementContext,
|
||||||
|
TableNameContext,
|
||||||
|
TableNameCreateContext,
|
||||||
|
ViewNameContext,
|
||||||
|
ViewNameCreateContext,
|
||||||
|
} from '../../lib/mysql/MySqlParser';
|
||||||
|
import type { MySqlParserListener } from '../../lib/mysql/MySqlParserListener';
|
||||||
|
import { EntityContextType } from '../common/basic-parser-types';
|
||||||
|
import EntityCollector, { StmtContextType } from '../common/entityCollector';
|
||||||
|
|
||||||
|
export default class MySqlEntityCollector extends EntityCollector implements MySqlParserListener {
|
||||||
|
/** ====== Entity Begin */
|
||||||
|
exitDatabaseName(ctx: DatabaseNameContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.DATABASE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitDatabaseNameCreate(ctx: DatabaseNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.DATABASE_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitTableName(ctx: TableNameContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.TABLE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitTableNameCreate(ctx: TableNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.TABLE_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitViewName(ctx: ViewNameContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.VIEW);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitViewNameCreate(ctx: ViewNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.VIEW_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitFunctionNameCreate(ctx: FunctionNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.FUNCTION_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitColumnNameCreate(ctx: ColumnNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.COLUMN_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** ===== Statement begin */
|
||||||
|
enterSingleStatement(ctx: SingleStatementContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.COMMON_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitSingleStatement(ctx: SingleStatementContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterQueryCreateTable(ctx: QueryCreateTableContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_TABLE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitQueryCreateTable(ctx: QueryCreateTableContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterColumnCreateTable(ctx: ColumnCreateTableContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_TABLE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitColumnCreateTable(ctx: ColumnCreateTableContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCopyCreateTable(ctx: CopyCreateTableContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_TABLE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCopyCreateTable(ctx: CopyCreateTableContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateView(ctx: CreateViewContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateView(ctx: CreateViewContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterSimpleSelect(ctx: SelectStatementContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.SELECT_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitSimpleSelect(ctx: SelectStatementContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterUnionAndLateralSelect(ctx: SelectStatementContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.SELECT_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitUnionAndLateralSelect(ctx: SelectStatementContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterSelectExpression(ctx: SelectStatementContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.SELECT_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitSelectExpression(ctx: SelectStatementContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterInsertStatement(ctx: InsertStatementContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.INSERT_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitInsertStatement(ctx: InsertStatementContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateDatabase(ctx: CreateDatabaseContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_DATABASE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateDatabase(ctx: CreateDatabaseContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateFunction(ctx: CreateFunctionContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_FUNCTION_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateFunction(ctx: CreateFunctionContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
}
|
12
src/parser/mysql/mysqlSplitListener.ts
Normal file
12
src/parser/mysql/mysqlSplitListener.ts
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import { SingleStatementContext } from '../../lib/mysql/MySqlParser';
|
||||||
|
import { MySqlParserListener } from '../../lib/mysql/MySqlParserListener';
|
||||||
|
import SplitListener from '../common/splitListener';
|
||||||
|
|
||||||
|
export default class MysqlSplitListener
|
||||||
|
extends SplitListener<SingleStatementContext>
|
||||||
|
implements MySqlParserListener
|
||||||
|
{
|
||||||
|
exitSingleStatement = (ctx: SingleStatementContext) => {
|
||||||
|
this._statementsContext.push(ctx);
|
||||||
|
};
|
||||||
|
}
|
@ -1,10 +1,15 @@
|
|||||||
import { Token } from 'antlr4ng';
|
|
||||||
import { CandidatesCollection } from 'antlr4-c3';
|
import { CandidatesCollection } from 'antlr4-c3';
|
||||||
import { PostgreSQLLexer } from '../lib/pgsql/PostgreSQLLexer';
|
import { Token } from 'antlr4ng';
|
||||||
import { PostgreSQLParser, ProgramContext, SingleStmtContext } from '../lib/pgsql/PostgreSQLParser';
|
|
||||||
import BasicParser from './common/basicParser';
|
import { PostgreSQLLexer } from '../../lib/pgsql/PostgreSQLLexer';
|
||||||
import { PostgreSQLParserListener } from '../lib/pgsql/PostgreSQLParserListener';
|
import { PostgreSQLParser, ProgramContext } from '../../lib/pgsql/PostgreSQLParser';
|
||||||
import { SyntaxContextType, Suggestions, SyntaxSuggestion } from './common/basic-parser-types';
|
import { EntityContextType, Suggestions, SyntaxSuggestion } from '../common/basic-parser-types';
|
||||||
|
import BasicParser from '../common/basicParser';
|
||||||
|
import { StmtContextType } from '../common/entityCollector';
|
||||||
|
import PostgreSQLEntityCollector from './postgreEntityCollector';
|
||||||
|
import PostgreSqlSplitListener from './postgreSplitListener';
|
||||||
|
|
||||||
|
export { PostgreSQLEntityCollector, PostgreSqlSplitListener };
|
||||||
|
|
||||||
export default class PostgresSQL extends BasicParser<
|
export default class PostgresSQL extends BasicParser<
|
||||||
PostgreSQLLexer,
|
PostgreSQLLexer,
|
||||||
@ -38,7 +43,11 @@ export default class PostgresSQL extends BasicParser<
|
|||||||
]);
|
]);
|
||||||
|
|
||||||
protected get splitListener() {
|
protected get splitListener() {
|
||||||
return new PgSqlSplitListener();
|
return new PostgreSqlSplitListener();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected createEntityCollector(input: string, caretTokenIndex?: number) {
|
||||||
|
return new PostgreSQLEntityCollector(input, caretTokenIndex);
|
||||||
}
|
}
|
||||||
|
|
||||||
protected processCandidates(
|
protected processCandidates(
|
||||||
@ -57,62 +66,62 @@ export default class PostgresSQL extends BasicParser<
|
|||||||
caretTokenIndex + tokenIndexOffset + 1
|
caretTokenIndex + tokenIndexOffset + 1
|
||||||
);
|
);
|
||||||
|
|
||||||
let syntaxContextType: SyntaxContextType;
|
let syntaxContextType: EntityContextType | StmtContextType;
|
||||||
switch (ruleType) {
|
switch (ruleType) {
|
||||||
case PostgreSQLParser.RULE_table_name_create: {
|
case PostgreSQLParser.RULE_table_name_create: {
|
||||||
syntaxContextType = SyntaxContextType.TABLE_CREATE;
|
syntaxContextType = EntityContextType.TABLE_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case PostgreSQLParser.RULE_table_name: {
|
case PostgreSQLParser.RULE_table_name: {
|
||||||
syntaxContextType = SyntaxContextType.TABLE;
|
syntaxContextType = EntityContextType.TABLE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case PostgreSQLParser.RULE_function_name_create: {
|
case PostgreSQLParser.RULE_function_name_create: {
|
||||||
syntaxContextType = SyntaxContextType.FUNCTION_CREATE;
|
syntaxContextType = EntityContextType.FUNCTION_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case PostgreSQLParser.RULE_function_name: {
|
case PostgreSQLParser.RULE_function_name: {
|
||||||
syntaxContextType = SyntaxContextType.FUNCTION;
|
syntaxContextType = EntityContextType.FUNCTION;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case PostgreSQLParser.RULE_schema_name_create: {
|
case PostgreSQLParser.RULE_schema_name_create: {
|
||||||
syntaxContextType = SyntaxContextType.DATABASE_CREATE;
|
syntaxContextType = EntityContextType.DATABASE_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case PostgreSQLParser.RULE_schema_name: {
|
case PostgreSQLParser.RULE_schema_name: {
|
||||||
syntaxContextType = SyntaxContextType.DATABASE;
|
syntaxContextType = EntityContextType.DATABASE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case PostgreSQLParser.RULE_view_name_create: {
|
case PostgreSQLParser.RULE_view_name_create: {
|
||||||
syntaxContextType = SyntaxContextType.VIEW_CREATE;
|
syntaxContextType = EntityContextType.VIEW_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case PostgreSQLParser.RULE_view_name: {
|
case PostgreSQLParser.RULE_view_name: {
|
||||||
syntaxContextType = SyntaxContextType.VIEW;
|
syntaxContextType = EntityContextType.VIEW;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case PostgreSQLParser.RULE_database_name_create: {
|
case PostgreSQLParser.RULE_database_name_create: {
|
||||||
syntaxContextType = SyntaxContextType.DATABASE_CREATE;
|
syntaxContextType = EntityContextType.DATABASE_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case PostgreSQLParser.RULE_database_name: {
|
case PostgreSQLParser.RULE_database_name: {
|
||||||
syntaxContextType = SyntaxContextType.DATABASE;
|
syntaxContextType = EntityContextType.DATABASE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case PostgreSQLParser.RULE_procedure_name_create: {
|
case PostgreSQLParser.RULE_procedure_name_create: {
|
||||||
syntaxContextType = SyntaxContextType.PROCEDURE_CREATE;
|
syntaxContextType = EntityContextType.PROCEDURE_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case PostgreSQLParser.RULE_procedure_name: {
|
case PostgreSQLParser.RULE_procedure_name: {
|
||||||
syntaxContextType = SyntaxContextType.PROCEDURE;
|
syntaxContextType = EntityContextType.PROCEDURE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case PostgreSQLParser.RULE_column_name_create: {
|
case PostgreSQLParser.RULE_column_name_create: {
|
||||||
syntaxContextType = SyntaxContextType.COLUMN_CREATE;
|
syntaxContextType = EntityContextType.COLUMN_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case PostgreSQLParser.RULE_column_name: {
|
case PostgreSQLParser.RULE_column_name: {
|
||||||
syntaxContextType = SyntaxContextType.COLUMN;
|
syntaxContextType = EntityContextType.COLUMN;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
@ -144,20 +153,3 @@ export default class PostgresSQL extends BasicParser<
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class PgSqlSplitListener implements PostgreSQLParserListener {
|
|
||||||
private _statementsContext: SingleStmtContext[] = [];
|
|
||||||
|
|
||||||
exitSingleStmt = (ctx: SingleStmtContext) => {
|
|
||||||
this._statementsContext.push(ctx);
|
|
||||||
};
|
|
||||||
|
|
||||||
visitTerminal() {}
|
|
||||||
visitErrorNode() {}
|
|
||||||
enterEveryRule() {}
|
|
||||||
exitEveryRule() {}
|
|
||||||
|
|
||||||
get statementsContext() {
|
|
||||||
return this._statementsContext;
|
|
||||||
}
|
|
||||||
}
|
|
151
src/parser/pgsql/postgreEntityCollector.ts
Normal file
151
src/parser/pgsql/postgreEntityCollector.ts
Normal file
@ -0,0 +1,151 @@
|
|||||||
|
import type {
|
||||||
|
ColumnCreateTableContext,
|
||||||
|
ColumnNameCreateContext,
|
||||||
|
CreateDatabaseContext,
|
||||||
|
CreateForeignTableContext,
|
||||||
|
CreateMaterializedViewContext,
|
||||||
|
CreatePartitionForeignTableContext,
|
||||||
|
CreateViewContext,
|
||||||
|
CreatefunctionstmtContext,
|
||||||
|
DatabaseNameContext,
|
||||||
|
DatabaseNameCreateContext,
|
||||||
|
FunctionNameCreateContext,
|
||||||
|
InsertStatementContext,
|
||||||
|
QueryCreateTableContext,
|
||||||
|
SelectStatementContext,
|
||||||
|
SingleStmtContext,
|
||||||
|
TableNameContext,
|
||||||
|
TableNameCreateContext,
|
||||||
|
ViewNameContext,
|
||||||
|
ViewNameCreateContext,
|
||||||
|
} from '../../lib/pgsql/PostgreSQLParser';
|
||||||
|
import type { PostgreSQLParserListener } from '../../lib/pgsql/PostgreSQLParserListener';
|
||||||
|
import { EntityContextType } from '../common/basic-parser-types';
|
||||||
|
import EntityCollector, { StmtContextType } from '../common/entityCollector';
|
||||||
|
|
||||||
|
export default class PostgreSQLEntityCollector
|
||||||
|
extends EntityCollector
|
||||||
|
implements PostgreSQLParserListener
|
||||||
|
{
|
||||||
|
/** ====== Entity Begin */
|
||||||
|
exitDatabaseName(ctx: DatabaseNameContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.DATABASE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitDatabaseNameCreate(ctx: DatabaseNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.DATABASE_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitTableName(ctx: TableNameContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.TABLE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitTableNameCreate(ctx: TableNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.TABLE_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitViewName(ctx: ViewNameContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.VIEW);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitViewNameCreate(ctx: ViewNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.VIEW_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitFunctionNameCreate(ctx: FunctionNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.FUNCTION_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitColumnNameCreate(ctx: ColumnNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.COLUMN_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** ===== Statement begin */
|
||||||
|
enterSingleStatement(ctx: SingleStmtContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.COMMON_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitSingleStatement(ctx: SingleStmtContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateDatabase(ctx: CreateDatabaseContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_DATABASE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateDatabase(ctx: CreateDatabaseContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterQueryCreateTable(ctx: QueryCreateTableContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_TABLE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitQueryCreateTable(ctx: QueryCreateTableContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterColumnCreateTable(ctx: ColumnCreateTableContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_TABLE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitColumnCreateTable(ctx: ColumnCreateTableContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateForeignTable(ctx: CreateForeignTableContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_TABLE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateForeignTable(ctx: CreateForeignTableContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreatePartitionForeignTable(ctx: CreatePartitionForeignTableContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_TABLE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreatePartitionForeignTable(ctx: CreatePartitionForeignTableContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateView(ctx: CreateViewContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateView(ctx: CreateViewContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateMaterializedView(ctx: CreateMaterializedViewContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateMaterializedView(ctx: CreateMaterializedViewContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterSelectStatement(ctx: SelectStatementContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.SELECT_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitSelectStatement(ctx: SelectStatementContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterInsertStatement(ctx: InsertStatementContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.INSERT_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitInsertStatement(ctx: InsertStatementContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreatefunctionstmt(ctx: CreatefunctionstmtContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_FUNCTION_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreatefunctionstmt(ctx: CreatefunctionstmtContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
}
|
12
src/parser/pgsql/postgreSplitListener.ts
Normal file
12
src/parser/pgsql/postgreSplitListener.ts
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import { SingleStmtContext } from '../../lib/pgsql/PostgreSQLParser';
|
||||||
|
import { PostgreSQLParserListener } from '../../lib/pgsql/PostgreSQLParserListener';
|
||||||
|
import SplitListener from '../common/splitListener';
|
||||||
|
|
||||||
|
export default class PostgreSqlSplitListener
|
||||||
|
extends SplitListener<SingleStmtContext>
|
||||||
|
implements PostgreSQLParserListener
|
||||||
|
{
|
||||||
|
exitSingleStmt = (ctx: SingleStmtContext) => {
|
||||||
|
this._statementsContext.push(ctx);
|
||||||
|
};
|
||||||
|
}
|
@ -21,6 +21,10 @@ export default class PLSQL extends BasicParser<PlSqlLexer, ProgramContext, PlSql
|
|||||||
return null as any;
|
return null as any;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected createEntityCollector(input: string, caretTokenIndex?: number) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
protected processCandidates(
|
protected processCandidates(
|
||||||
candidates: CandidatesCollection,
|
candidates: CandidatesCollection,
|
||||||
allTokens: Token[],
|
allTokens: Token[],
|
||||||
|
@ -1,14 +1,14 @@
|
|||||||
import { Token } from 'antlr4ng';
|
import { Token } from 'antlr4ng';
|
||||||
import { CandidatesCollection } from 'antlr4-c3';
|
import { CandidatesCollection } from 'antlr4-c3';
|
||||||
import { SparkSqlLexer } from '../lib/spark/SparkSqlLexer';
|
import { SparkSqlLexer } from '../../lib/spark/SparkSqlLexer';
|
||||||
import {
|
import { SparkSqlParser, ProgramContext } from '../../lib/spark/SparkSqlParser';
|
||||||
SparkSqlParser,
|
import BasicParser from '../common/basicParser';
|
||||||
ProgramContext,
|
import { Suggestions, EntityContextType, SyntaxSuggestion } from '../common/basic-parser-types';
|
||||||
SingleStatementContext,
|
import { StmtContextType } from '../common/entityCollector';
|
||||||
} from '../lib/spark/SparkSqlParser';
|
import SparkSqlSplitListener from './sparkSplitListener';
|
||||||
import BasicParser from './common/basicParser';
|
import SparkEntityCollector from './sparkEntityCollector';
|
||||||
import { Suggestions, SyntaxContextType, SyntaxSuggestion } from './common/basic-parser-types';
|
|
||||||
import { SparkSqlParserListener } from '../lib/spark/SparkSqlParserListener';
|
export { SparkSqlSplitListener, SparkEntityCollector };
|
||||||
|
|
||||||
export default class SparkSQL extends BasicParser<SparkSqlLexer, ProgramContext, SparkSqlParser> {
|
export default class SparkSQL extends BasicParser<SparkSqlLexer, ProgramContext, SparkSqlParser> {
|
||||||
protected createLexerFromCharStream(charStreams) {
|
protected createLexerFromCharStream(charStreams) {
|
||||||
@ -22,8 +22,8 @@ export default class SparkSQL extends BasicParser<SparkSqlLexer, ProgramContext,
|
|||||||
}
|
}
|
||||||
|
|
||||||
protected preferredRules: Set<number> = new Set([
|
protected preferredRules: Set<number> = new Set([
|
||||||
SparkSqlParser.RULE_dbSchemaName,
|
SparkSqlParser.RULE_namespaceName,
|
||||||
SparkSqlParser.RULE_dbSchemaNameCreate,
|
SparkSqlParser.RULE_namespaceNameCreate,
|
||||||
SparkSqlParser.RULE_tableName,
|
SparkSqlParser.RULE_tableName,
|
||||||
SparkSqlParser.RULE_tableNameCreate,
|
SparkSqlParser.RULE_tableNameCreate,
|
||||||
SparkSqlParser.RULE_viewName,
|
SparkSqlParser.RULE_viewName,
|
||||||
@ -38,6 +38,10 @@ export default class SparkSQL extends BasicParser<SparkSqlLexer, ProgramContext,
|
|||||||
return new SparkSqlSplitListener();
|
return new SparkSqlSplitListener();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected createEntityCollector(input: string, caretTokenIndex?: number) {
|
||||||
|
return new SparkEntityCollector(input, caretTokenIndex);
|
||||||
|
}
|
||||||
|
|
||||||
protected processCandidates(
|
protected processCandidates(
|
||||||
candidates: CandidatesCollection,
|
candidates: CandidatesCollection,
|
||||||
allTokens: Token[],
|
allTokens: Token[],
|
||||||
@ -55,46 +59,46 @@ export default class SparkSQL extends BasicParser<SparkSqlLexer, ProgramContext,
|
|||||||
caretTokenIndex + tokenIndexOffset + 1
|
caretTokenIndex + tokenIndexOffset + 1
|
||||||
);
|
);
|
||||||
|
|
||||||
let syntaxContextType: SyntaxContextType;
|
let syntaxContextType: EntityContextType | StmtContextType;
|
||||||
switch (ruleType) {
|
switch (ruleType) {
|
||||||
case SparkSqlParser.RULE_dbSchemaName: {
|
case SparkSqlParser.RULE_namespaceName: {
|
||||||
syntaxContextType = SyntaxContextType.DATABASE;
|
syntaxContextType = EntityContextType.DATABASE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case SparkSqlParser.RULE_dbSchemaNameCreate: {
|
case SparkSqlParser.RULE_namespaceNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.DATABASE_CREATE;
|
syntaxContextType = EntityContextType.DATABASE_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case SparkSqlParser.RULE_tableName: {
|
case SparkSqlParser.RULE_tableName: {
|
||||||
syntaxContextType = SyntaxContextType.TABLE;
|
syntaxContextType = EntityContextType.TABLE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case SparkSqlParser.RULE_tableNameCreate: {
|
case SparkSqlParser.RULE_tableNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.TABLE_CREATE;
|
syntaxContextType = EntityContextType.TABLE_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case SparkSqlParser.RULE_viewName: {
|
case SparkSqlParser.RULE_viewName: {
|
||||||
syntaxContextType = SyntaxContextType.VIEW;
|
syntaxContextType = EntityContextType.VIEW;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case SparkSqlParser.RULE_viewNameCreate: {
|
case SparkSqlParser.RULE_viewNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.VIEW_CREATE;
|
syntaxContextType = EntityContextType.VIEW_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case SparkSqlParser.RULE_functionName: {
|
case SparkSqlParser.RULE_functionName: {
|
||||||
syntaxContextType = SyntaxContextType.FUNCTION;
|
syntaxContextType = EntityContextType.FUNCTION;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case SparkSqlParser.RULE_functionNameCreate: {
|
case SparkSqlParser.RULE_functionNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.FUNCTION_CREATE;
|
syntaxContextType = EntityContextType.FUNCTION_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case SparkSqlParser.RULE_columnName: {
|
case SparkSqlParser.RULE_columnName: {
|
||||||
syntaxContextType = SyntaxContextType.COLUMN;
|
syntaxContextType = EntityContextType.COLUMN;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case SparkSqlParser.RULE_columnNameCreate: {
|
case SparkSqlParser.RULE_columnNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.COLUMN_CREATE;
|
syntaxContextType = EntityContextType.COLUMN_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
@ -127,20 +131,3 @@ export default class SparkSQL extends BasicParser<SparkSqlLexer, ProgramContext,
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class SparkSqlSplitListener implements SparkSqlParserListener {
|
|
||||||
private _statementsContext: SingleStatementContext[] = [];
|
|
||||||
|
|
||||||
exitSingleStatement = (ctx: SingleStatementContext) => {
|
|
||||||
this._statementsContext.push(ctx);
|
|
||||||
};
|
|
||||||
|
|
||||||
visitTerminal() {}
|
|
||||||
visitErrorNode() {}
|
|
||||||
enterEveryRule() {}
|
|
||||||
exitEveryRule() {}
|
|
||||||
|
|
||||||
get statementsContext() {
|
|
||||||
return this._statementsContext;
|
|
||||||
}
|
|
||||||
}
|
|
151
src/parser/spark/sparkEntityCollector.ts
Normal file
151
src/parser/spark/sparkEntityCollector.ts
Normal file
@ -0,0 +1,151 @@
|
|||||||
|
import type {
|
||||||
|
NamespaceNameContext,
|
||||||
|
NamespaceNameCreateContext,
|
||||||
|
SingleStatementContext,
|
||||||
|
TableNameContext,
|
||||||
|
TableNameCreateContext,
|
||||||
|
ViewNameContext,
|
||||||
|
ViewNameCreateContext,
|
||||||
|
FunctionNameCreateContext,
|
||||||
|
ColumnNameCreateContext,
|
||||||
|
CreateTableContext,
|
||||||
|
CreateTableLikeContext,
|
||||||
|
ReplaceTableContext,
|
||||||
|
QueryStatementContext,
|
||||||
|
InsertFromQueryContext,
|
||||||
|
MultipleInsertContext,
|
||||||
|
CreateViewContext,
|
||||||
|
CreateTempViewUsingContext,
|
||||||
|
CreateNamespaceContext,
|
||||||
|
CreateFunctionContext,
|
||||||
|
} from '../../lib/spark/SparkSqlParser';
|
||||||
|
import type { SparkSqlParserListener } from '../../lib/spark/SparkSqlParserListener';
|
||||||
|
import { EntityContextType } from '../common/basic-parser-types';
|
||||||
|
import EntityCollector, { StmtContextType } from '../common/entityCollector';
|
||||||
|
|
||||||
|
export default class SparkEntityCollector
|
||||||
|
extends EntityCollector
|
||||||
|
implements SparkSqlParserListener
|
||||||
|
{
|
||||||
|
/** ====== Entity Begin */
|
||||||
|
exitNamespaceName(ctx: NamespaceNameContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.DATABASE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitNamespaceNameCreate(ctx: NamespaceNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.DATABASE_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitTableName(ctx: TableNameContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.TABLE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitTableNameCreate(ctx: TableNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.TABLE_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitViewName(ctx: ViewNameContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.VIEW);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitViewNameCreate(ctx: ViewNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.VIEW_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitFunctionNameCreate(ctx: FunctionNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.FUNCTION_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitColumnNameCreate(ctx: ColumnNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.COLUMN_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** ===== Statement begin */
|
||||||
|
enterSingleStatement(ctx: SingleStatementContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.COMMON_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitSingleStatement(ctx: SingleStatementContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateTable(ctx: CreateTableContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_TABLE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateTable(ctx: CreateTableContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateTableLike(ctx: CreateTableLikeContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_TABLE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateTableLike(ctx: CreateTableLikeContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterReplaceTable(ctx: ReplaceTableContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_TABLE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitReplaceTable(ctx: ReplaceTableContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateView(ctx: CreateViewContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateView(ctx: CreateViewContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateTempViewUsing(ctx: CreateTempViewUsingContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateTempViewUsing(ctx: CreateTempViewUsingContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterQueryStatement(ctx: QueryStatementContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.SELECT_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitQueryStatement(ctx: QueryStatementContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterInsertFromQuery(ctx: InsertFromQueryContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.INSERT_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitInsertFromQuery(ctx: InsertFromQueryContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterMultipleInsert(ctx: MultipleInsertContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.INSERT_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitMultipleInsert(ctx: MultipleInsertContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateNamespace(ctx: CreateNamespaceContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_DATABASE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateNamespace(ctx: CreateNamespaceContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateFunction(ctx: CreateFunctionContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_FUNCTION_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateFunction(ctx: CreateFunctionContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
}
|
13
src/parser/spark/sparkSplitListener.ts
Normal file
13
src/parser/spark/sparkSplitListener.ts
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
import { SingleStatementContext } from '../../lib/spark/SparkSqlParser';
|
||||||
|
|
||||||
|
import { SparkSqlParserListener } from '../../lib/spark/SparkSqlParserListener';
|
||||||
|
import SplitListener from '../common/splitListener';
|
||||||
|
|
||||||
|
export default class SparkSqlSplitListener
|
||||||
|
extends SplitListener<SingleStatementContext>
|
||||||
|
implements SparkSqlParserListener
|
||||||
|
{
|
||||||
|
exitSingleStatement = (ctx: SingleStatementContext) => {
|
||||||
|
this._statementsContext.push(ctx);
|
||||||
|
};
|
||||||
|
}
|
@ -1,14 +1,14 @@
|
|||||||
import { Token } from 'antlr4ng';
|
import { Token } from 'antlr4ng';
|
||||||
import { CandidatesCollection } from 'antlr4-c3';
|
import { CandidatesCollection } from 'antlr4-c3';
|
||||||
import { TrinoSqlLexer } from '../lib/trinosql/TrinoSqlLexer';
|
import { TrinoSqlLexer } from '../../lib/trinosql/TrinoSqlLexer';
|
||||||
import {
|
import { TrinoSqlParser, ProgramContext } from '../../lib/trinosql/TrinoSqlParser';
|
||||||
TrinoSqlParser,
|
import BasicParser from '../common/basicParser';
|
||||||
ProgramContext,
|
import { Suggestions, EntityContextType, SyntaxSuggestion } from '../common/basic-parser-types';
|
||||||
SingleStatementContext,
|
import { StmtContextType } from '../common/entityCollector';
|
||||||
} from '../lib/trinosql/TrinoSqlParser';
|
import TrinoSqlSplitListener from './trinoSplitListener';
|
||||||
import { TrinoSqlListener } from '../lib/trinosql/TrinoSqlListener';
|
import TrinoEntityCollector from './trinoEntityCollector';
|
||||||
import BasicParser from './common/basicParser';
|
|
||||||
import { Suggestions, SyntaxContextType, SyntaxSuggestion } from './common/basic-parser-types';
|
export { TrinoSqlSplitListener, TrinoEntityCollector };
|
||||||
|
|
||||||
export default class TrinoSQL extends BasicParser<TrinoSqlLexer, ProgramContext, TrinoSqlParser> {
|
export default class TrinoSQL extends BasicParser<TrinoSqlLexer, ProgramContext, TrinoSqlParser> {
|
||||||
protected createLexerFromCharStream(charStreams) {
|
protected createLexerFromCharStream(charStreams) {
|
||||||
@ -25,6 +25,10 @@ export default class TrinoSQL extends BasicParser<TrinoSqlLexer, ProgramContext,
|
|||||||
return new TrinoSqlSplitListener();
|
return new TrinoSqlSplitListener();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected createEntityCollector(input: string, caretTokenIndex?: number) {
|
||||||
|
return new TrinoEntityCollector(input, caretTokenIndex);
|
||||||
|
}
|
||||||
|
|
||||||
protected preferredRules: Set<number> = new Set([
|
protected preferredRules: Set<number> = new Set([
|
||||||
TrinoSqlParser.RULE_catalogName,
|
TrinoSqlParser.RULE_catalogName,
|
||||||
TrinoSqlParser.RULE_catalogNameCreate,
|
TrinoSqlParser.RULE_catalogNameCreate,
|
||||||
@ -56,46 +60,46 @@ export default class TrinoSQL extends BasicParser<TrinoSqlLexer, ProgramContext,
|
|||||||
caretTokenIndex + tokenIndexOffset + 1
|
caretTokenIndex + tokenIndexOffset + 1
|
||||||
);
|
);
|
||||||
|
|
||||||
let syntaxContextType: SyntaxContextType;
|
let syntaxContextType: EntityContextType | StmtContextType;
|
||||||
switch (ruleType) {
|
switch (ruleType) {
|
||||||
case TrinoSqlParser.RULE_catalogName: {
|
case TrinoSqlParser.RULE_catalogName: {
|
||||||
syntaxContextType = SyntaxContextType.CATALOG;
|
syntaxContextType = EntityContextType.CATALOG;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case TrinoSqlParser.RULE_schemaName: {
|
case TrinoSqlParser.RULE_schemaName: {
|
||||||
syntaxContextType = SyntaxContextType.DATABASE;
|
syntaxContextType = EntityContextType.DATABASE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case TrinoSqlParser.RULE_schemaNameCreate: {
|
case TrinoSqlParser.RULE_schemaNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.DATABASE_CREATE;
|
syntaxContextType = EntityContextType.DATABASE_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case TrinoSqlParser.RULE_tableName: {
|
case TrinoSqlParser.RULE_tableName: {
|
||||||
syntaxContextType = SyntaxContextType.TABLE;
|
syntaxContextType = EntityContextType.TABLE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case TrinoSqlParser.RULE_tableNameCreate: {
|
case TrinoSqlParser.RULE_tableNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.TABLE_CREATE;
|
syntaxContextType = EntityContextType.TABLE_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case TrinoSqlParser.RULE_viewName: {
|
case TrinoSqlParser.RULE_viewName: {
|
||||||
syntaxContextType = SyntaxContextType.VIEW;
|
syntaxContextType = EntityContextType.VIEW;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case TrinoSqlParser.RULE_viewNameCreate: {
|
case TrinoSqlParser.RULE_viewNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.VIEW_CREATE;
|
syntaxContextType = EntityContextType.VIEW_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case TrinoSqlParser.RULE_functionName: {
|
case TrinoSqlParser.RULE_functionName: {
|
||||||
syntaxContextType = SyntaxContextType.FUNCTION;
|
syntaxContextType = EntityContextType.FUNCTION;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case TrinoSqlParser.RULE_columnNameCreate: {
|
case TrinoSqlParser.RULE_columnNameCreate: {
|
||||||
syntaxContextType = SyntaxContextType.COLUMN_CREATE;
|
syntaxContextType = EntityContextType.COLUMN_CREATE;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case TrinoSqlParser.RULE_columnName: {
|
case TrinoSqlParser.RULE_columnName: {
|
||||||
syntaxContextType = SyntaxContextType.COLUMN;
|
syntaxContextType = EntityContextType.COLUMN;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
default:
|
default:
|
||||||
@ -127,20 +131,3 @@ export default class TrinoSQL extends BasicParser<TrinoSqlLexer, ProgramContext,
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export class TrinoSqlSplitListener implements TrinoSqlListener {
|
|
||||||
private _statementsContext: SingleStatementContext[] = [];
|
|
||||||
|
|
||||||
exitSingleStatement = (ctx: SingleStatementContext) => {
|
|
||||||
this._statementsContext.push(ctx);
|
|
||||||
};
|
|
||||||
|
|
||||||
visitTerminal() {}
|
|
||||||
visitErrorNode() {}
|
|
||||||
enterEveryRule() {}
|
|
||||||
exitEveryRule() {}
|
|
||||||
|
|
||||||
get statementsContext() {
|
|
||||||
return this._statementsContext;
|
|
||||||
}
|
|
||||||
}
|
|
117
src/parser/trino/trinoEntityCollector.ts
Normal file
117
src/parser/trino/trinoEntityCollector.ts
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
import type {
|
||||||
|
ColumnNameCreateContext,
|
||||||
|
CreateMaterializedViewContext,
|
||||||
|
CreateSchemaContext,
|
||||||
|
CreateTableAsSelectContext,
|
||||||
|
CreateTableContext,
|
||||||
|
CreateViewContext,
|
||||||
|
InsertIntoContext,
|
||||||
|
QueryStatementContext,
|
||||||
|
SchemaNameContext,
|
||||||
|
SchemaNameCreateContext,
|
||||||
|
SingleStatementContext,
|
||||||
|
TableNameContext,
|
||||||
|
TableNameCreateContext,
|
||||||
|
ViewNameContext,
|
||||||
|
ViewNameCreateContext,
|
||||||
|
} from '../../lib/trinosql/TrinoSqlParser';
|
||||||
|
import type { TrinoSqlListener } from '../../lib/trinosql/TrinoSqlListener';
|
||||||
|
import { EntityContextType } from '../common/basic-parser-types';
|
||||||
|
import EntityCollector, { StmtContextType } from '../common/entityCollector';
|
||||||
|
|
||||||
|
export default class TrinoEntityCollector extends EntityCollector implements TrinoSqlListener {
|
||||||
|
/** ====== Entity Begin */
|
||||||
|
exitSchemaName(ctx: SchemaNameContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.DATABASE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitSchemaNameCreate(ctx: SchemaNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.DATABASE_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitTableName(ctx: TableNameContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.TABLE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitTableNameCreate(ctx: TableNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.TABLE_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitViewName(ctx: ViewNameContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.VIEW);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitViewNameCreate(ctx: ViewNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.VIEW_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitColumnNameCreate(ctx: ColumnNameCreateContext) {
|
||||||
|
this.pushEntity(ctx, EntityContextType.COLUMN_CREATE);
|
||||||
|
}
|
||||||
|
|
||||||
|
/** ===== Statement begin */
|
||||||
|
|
||||||
|
enterSingleStatement(ctx: SingleStatementContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.COMMON_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitSingleStatement(ctx: SingleStatementContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateSchema(ctx: CreateSchemaContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_DATABASE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateSchema(ctx: CreateSchemaContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateTableAsSelect(ctx: CreateTableAsSelectContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_TABLE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateTableAsSelect(ctx: CreateTableAsSelectContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateTable(ctx: CreateTableContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_TABLE_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateTable(ctx: CreateTableContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateView(ctx: CreateViewContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateView(ctx: CreateViewContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterCreateMaterializedView(ctx: CreateMaterializedViewContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitCreateMaterializedView(ctx: CreateMaterializedViewContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterQueryStatement(ctx: QueryStatementContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.SELECT_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitQueryStatement(ctx: QueryStatementContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
|
||||||
|
enterInsertInto(ctx: InsertIntoContext) {
|
||||||
|
this.pushStmt(ctx, StmtContextType.INSERT_STMT);
|
||||||
|
}
|
||||||
|
|
||||||
|
exitInsertInto(ctx: InsertIntoContext) {
|
||||||
|
this.popStmt();
|
||||||
|
}
|
||||||
|
}
|
12
src/parser/trino/trinoSplitListener.ts
Normal file
12
src/parser/trino/trinoSplitListener.ts
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
import { SingleStatementContext } from '../../lib/trinosql/TrinoSqlParser';
|
||||||
|
import { TrinoSqlListener } from '../../lib/trinosql/TrinoSqlListener';
|
||||||
|
import SplitListener from '../common/splitListener';
|
||||||
|
|
||||||
|
export default class TrinoSqlSplitListener
|
||||||
|
extends SplitListener<SingleStatementContext>
|
||||||
|
implements TrinoSqlListener
|
||||||
|
{
|
||||||
|
exitSingleStatement = (ctx: SingleStatementContext) => {
|
||||||
|
this._statementsContext.push(ctx);
|
||||||
|
};
|
||||||
|
}
|
395
test/parser/flinksql/contextCollect/entityCollector.test.ts
Normal file
395
test/parser/flinksql/contextCollect/entityCollector.test.ts
Normal file
@ -0,0 +1,395 @@
|
|||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import { ParseTreeListener } from 'antlr4ng';
|
||||||
|
import FlinkSQL, { FlinkEntityCollector, FlinkSqlSplitListener } from 'src/parser/flinksql';
|
||||||
|
import { FlinkSqlParserListener } from 'src/lib/flinksql/FlinkSqlParserListener';
|
||||||
|
import { EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
import { StmtContextType } from 'src/parser/common/entityCollector';
|
||||||
|
|
||||||
|
const commonSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'common.sql'), 'utf-8');
|
||||||
|
|
||||||
|
describe('Flink entity collector tests', () => {
|
||||||
|
const flink = new FlinkSQL();
|
||||||
|
const parseTree = flink.parse(commonSql);
|
||||||
|
const splitListener = new FlinkSqlSplitListener();
|
||||||
|
flink.listen(splitListener as FlinkSqlParserListener, parseTree);
|
||||||
|
|
||||||
|
test('validate common sql', () => {
|
||||||
|
expect(flink.validate(commonSql).length).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('split results', () => {
|
||||||
|
expect(splitListener.statementsContext.length).toBe(12);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table by columns', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[0];
|
||||||
|
|
||||||
|
const collectListener = new FlinkEntityCollector(commonSql);
|
||||||
|
flink.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('MyTable');
|
||||||
|
expect(tableCreateEntity.position).toEqual({
|
||||||
|
startIndex: 13,
|
||||||
|
endIndex: 19,
|
||||||
|
line: 1,
|
||||||
|
startColumn: 14,
|
||||||
|
endColumn: 21,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_TABLE_STMT
|
||||||
|
);
|
||||||
|
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||||
|
startIndex: 0,
|
||||||
|
endIndex: 85,
|
||||||
|
startLine: 1,
|
||||||
|
endLine: 1,
|
||||||
|
startColumn: 1,
|
||||||
|
endColumn: 87,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.relatedEntities).toBeNull();
|
||||||
|
|
||||||
|
expect(tableCreateEntity.columns.length).toBe(2);
|
||||||
|
tableCreateEntity.columns.forEach((columEntity) => {
|
||||||
|
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||||
|
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||||
|
expect(columEntity.text).toBe(
|
||||||
|
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table as select', () => {
|
||||||
|
const createTableBySelectContext = splitListener.statementsContext[1];
|
||||||
|
|
||||||
|
const collectListener = new FlinkEntityCollector(commonSql);
|
||||||
|
flink.listen(collectListener as ParseTreeListener, createTableBySelectContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('my_ctas_table');
|
||||||
|
expect(tableCreateEntity.position).toEqual({
|
||||||
|
startIndex: 102,
|
||||||
|
endIndex: 114,
|
||||||
|
line: 3,
|
||||||
|
startColumn: 14,
|
||||||
|
endColumn: 27,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_TABLE_STMT
|
||||||
|
);
|
||||||
|
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||||
|
startIndex: 89,
|
||||||
|
endIndex: 228,
|
||||||
|
startLine: 3,
|
||||||
|
endLine: 11,
|
||||||
|
startColumn: 1,
|
||||||
|
endColumn: 20,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.columns).toBeNull();
|
||||||
|
|
||||||
|
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||||
|
tableCreateEntity.relatedEntities.forEach((relatedEntity) => {
|
||||||
|
expect(relatedEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(allEntities.some((en) => relatedEntity === en)).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(allEntities[1].text).toBe('source_table');
|
||||||
|
expect(allEntities[1].belongStmt.rootStmt).toBe(allEntities[0].belongStmt);
|
||||||
|
expect(allEntities[1].position).toEqual({
|
||||||
|
startIndex: 191,
|
||||||
|
endIndex: 202,
|
||||||
|
line: 9,
|
||||||
|
startColumn: 5,
|
||||||
|
endColumn: 17,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table like', () => {
|
||||||
|
const createTableLikeContext = splitListener.statementsContext[2];
|
||||||
|
|
||||||
|
const collectListener = new FlinkEntityCollector(commonSql);
|
||||||
|
flink.listen(collectListener as ParseTreeListener, createTableLikeContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
const originTableEntity = allEntities[1];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('Orders_with_watermark');
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_TABLE_STMT
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(tableCreateEntity.columns.length).toBe(1);
|
||||||
|
expect(tableCreateEntity.columns[0].text).toBe('id');
|
||||||
|
expect(tableCreateEntity.columns[0].entityContextType).toBe(
|
||||||
|
EntityContextType.COLUMN_CREATE
|
||||||
|
);
|
||||||
|
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||||
|
expect(tableCreateEntity.relatedEntities[0]).toBe(originTableEntity);
|
||||||
|
|
||||||
|
expect(originTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(originTableEntity.text).toBe('Orders_in_file');
|
||||||
|
expect(originTableEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('common select from table', () => {
|
||||||
|
const selectTableContext = splitListener.statementsContext[3];
|
||||||
|
const collectListener = new FlinkEntityCollector(commonSql);
|
||||||
|
flink.listen(collectListener as ParseTreeListener, selectTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const tableEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableEntity.text).toBe('Orders');
|
||||||
|
expect(tableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
|
||||||
|
expect(tableEntity.columns).toBeNull();
|
||||||
|
expect(tableEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('select from table join', () => {
|
||||||
|
const selectTableContext = splitListener.statementsContext[4];
|
||||||
|
const collectListener = new FlinkEntityCollector(commonSql);
|
||||||
|
flink.listen(collectListener as ParseTreeListener, selectTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const tableEntity1 = allEntities[0];
|
||||||
|
const tableEntity2 = allEntities[1];
|
||||||
|
|
||||||
|
expect(tableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableEntity1.text).toBe('Orders');
|
||||||
|
expect(tableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
|
||||||
|
expect(tableEntity1.columns).toBeNull();
|
||||||
|
expect(tableEntity1.relatedEntities).toBeNull();
|
||||||
|
|
||||||
|
expect(tableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableEntity2.text).toBe('Product');
|
||||||
|
expect(tableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
|
||||||
|
expect(tableEntity2.columns).toBeNull();
|
||||||
|
expect(tableEntity2.relatedEntities).toBeNull();
|
||||||
|
|
||||||
|
expect(tableEntity1.belongStmt).toBe(tableEntity2.belongStmt);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('union select', () => {
|
||||||
|
const selectTableContext = splitListener.statementsContext[5];
|
||||||
|
const collectListener = new FlinkEntityCollector(commonSql);
|
||||||
|
flink.listen(collectListener as ParseTreeListener, selectTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const tableEntity1 = allEntities[0];
|
||||||
|
const tableEntity2 = allEntities[1];
|
||||||
|
|
||||||
|
expect(tableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableEntity1.text).toBe('t1');
|
||||||
|
expect(tableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
|
||||||
|
expect(tableEntity1.columns).toBeNull();
|
||||||
|
expect(tableEntity1.relatedEntities).toBeNull();
|
||||||
|
|
||||||
|
expect(tableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableEntity2.text).toBe('t2');
|
||||||
|
expect(tableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
|
||||||
|
expect(tableEntity2.columns).toBeNull();
|
||||||
|
expect(tableEntity2.relatedEntities).toBeNull();
|
||||||
|
|
||||||
|
expect(tableEntity1.belongStmt.rootStmt).toBe(tableEntity2.belongStmt.rootStmt);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('insert into table values', () => {
|
||||||
|
const insertTableContext = splitListener.statementsContext[6];
|
||||||
|
const collectListener = new FlinkEntityCollector(commonSql);
|
||||||
|
flink.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const tableEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableEntity.text).toBe('country_page_view');
|
||||||
|
expect(tableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||||
|
|
||||||
|
expect(tableEntity.columns).toBeNull();
|
||||||
|
expect(tableEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('insert into table select', () => {
|
||||||
|
const insertTableContext = splitListener.statementsContext[7];
|
||||||
|
const collectListener = new FlinkEntityCollector(commonSql);
|
||||||
|
flink.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const insertTableEntity = allEntities[0];
|
||||||
|
const fromTableEntity1 = allEntities[1];
|
||||||
|
|
||||||
|
expect(insertTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(insertTableEntity.text).toBe('catalog1.db1.country_page_view');
|
||||||
|
expect(insertTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||||
|
|
||||||
|
expect(fromTableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(fromTableEntity1.text).toBe('page_view_source');
|
||||||
|
expect(fromTableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
|
||||||
|
expect(fromTableEntity1.belongStmt.parentStmt).toBe(insertTableEntity.belongStmt);
|
||||||
|
expect(fromTableEntity1.belongStmt.rootStmt).toBe(insertTableEntity.belongStmt);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('multiple insert', () => {
|
||||||
|
const insertTableContext = splitListener.statementsContext[8];
|
||||||
|
const collectListener = new FlinkEntityCollector(commonSql);
|
||||||
|
flink.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const insertTableEntity1 = allEntities[0];
|
||||||
|
const insertTableEntity2 = allEntities[1];
|
||||||
|
|
||||||
|
expect(insertTableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(insertTableEntity1.text).toBe('country_page_view1');
|
||||||
|
expect(insertTableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||||
|
|
||||||
|
expect(insertTableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(insertTableEntity2.text).toBe('country_page_view2');
|
||||||
|
expect(insertTableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||||
|
|
||||||
|
expect(insertTableEntity2.belongStmt.parentStmt).toBe(
|
||||||
|
insertTableEntity1.belongStmt.parentStmt
|
||||||
|
);
|
||||||
|
expect(insertTableEntity2.belongStmt.rootStmt).toBe(
|
||||||
|
insertTableEntity1.belongStmt.parentStmt
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create view as select table', () => {
|
||||||
|
const insertTableContext = splitListener.statementsContext[9];
|
||||||
|
const collectListener = new FlinkEntityCollector(commonSql);
|
||||||
|
flink.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
expect(allEntities[0].entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||||
|
expect(allEntities[0].text).toBe('view1');
|
||||||
|
expect(allEntities[0].belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
|
||||||
|
expect(allEntities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(allEntities[1].text).toBe('tbl');
|
||||||
|
expect(allEntities[1].belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create database', () => {
|
||||||
|
const dbCreateContext = splitListener.statementsContext[10];
|
||||||
|
|
||||||
|
const collectListener = new FlinkEntityCollector(commonSql);
|
||||||
|
flink.listen(collectListener as ParseTreeListener, dbCreateContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const dbEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(dbEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||||
|
expect(dbEntity.text).toBe('db1');
|
||||||
|
expect(dbEntity.position).toEqual({
|
||||||
|
endColumn: 34,
|
||||||
|
endIndex: 1160,
|
||||||
|
line: 44,
|
||||||
|
startColumn: 31,
|
||||||
|
startIndex: 1158,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dbEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_DATABASE_STMT);
|
||||||
|
expect(dbEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 82,
|
||||||
|
endIndex: 1208,
|
||||||
|
endLine: 44,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 1128,
|
||||||
|
startLine: 44,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dbEntity.columns).toBeNull();
|
||||||
|
expect(dbEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create function', () => {
|
||||||
|
const functionCreateContext = splitListener.statementsContext[11];
|
||||||
|
|
||||||
|
const collectListener = new FlinkEntityCollector(commonSql);
|
||||||
|
flink.listen(collectListener as ParseTreeListener, functionCreateContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const functionEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
|
||||||
|
expect(functionEntity.text).toBe('tempFunction');
|
||||||
|
expect(functionEntity.position).toEqual({
|
||||||
|
endColumn: 43,
|
||||||
|
endIndex: 1253,
|
||||||
|
line: 46,
|
||||||
|
startColumn: 31,
|
||||||
|
startIndex: 1242,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(functionEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_FUNCTION_STMT
|
||||||
|
);
|
||||||
|
expect(functionEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 58,
|
||||||
|
endIndex: 1268,
|
||||||
|
endLine: 46,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 1212,
|
||||||
|
startLine: 46,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(functionEntity.columns).toBeNull();
|
||||||
|
expect(functionEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
46
test/parser/flinksql/contextCollect/fixtures/common.sql
Normal file
46
test/parser/flinksql/contextCollect/fixtures/common.sql
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
CREATE TABLE MyTable ('user_id' BIGINT, 'name' STRING) WITH ('connector' = 'oracle-x');
|
||||||
|
|
||||||
|
CREATE TABLE my_ctas_table WITH ('connector' = 'kafka')
|
||||||
|
AS SELECT
|
||||||
|
id,
|
||||||
|
name,
|
||||||
|
age
|
||||||
|
FROM
|
||||||
|
source_table
|
||||||
|
WHERE
|
||||||
|
mod(id, 10) = 0;
|
||||||
|
|
||||||
|
CREATE TABLE Orders_with_watermark (
|
||||||
|
id INT,
|
||||||
|
WATERMARK FOR order_time AS order_time - INTERVAL '5' SECOND
|
||||||
|
) WITH (
|
||||||
|
'scan.startup.mode' = 'latest-offset'
|
||||||
|
) LIKE Orders_in_file (
|
||||||
|
EXCLUDING ALL
|
||||||
|
INCLUDING GENERATED
|
||||||
|
);
|
||||||
|
|
||||||
|
SELECT order_id, price + tax FROM Orders;
|
||||||
|
|
||||||
|
SELECT * FROM Orders LEFT JOIN Product ON Orders.product_id = Product.id;
|
||||||
|
|
||||||
|
(SELECT s FROM t1) UNION (SELECT s FROM t2);
|
||||||
|
|
||||||
|
INSERT INTO country_page_view VALUES ('Chinese', 'mumiao', 18), ('Amercian', 'georage', 22);
|
||||||
|
|
||||||
|
INSERT INTO catalog1.db1.country_page_view SELECT `user`, cnt FROM page_view_source;
|
||||||
|
|
||||||
|
EXECUTE STATEMENT SET BEGIN
|
||||||
|
INSERT INTO country_page_view1
|
||||||
|
VALUES ('Chinese', 'mumiao', 18),
|
||||||
|
('Amercian', 'georage', 22);
|
||||||
|
INSERT INTO country_page_view2
|
||||||
|
VALUES ('Chinese', 'mumiao', 18),
|
||||||
|
('Amercian', 'georage', 22);
|
||||||
|
END;
|
||||||
|
|
||||||
|
CREATE VIEW view1(col1, col2) AS SELECT col3, col4 FROM tbl;
|
||||||
|
|
||||||
|
CREATE DATABASE IF NOT EXISTS db1 WITH ('key1' = 'value1', 'key2.a' = 'value2.a');
|
||||||
|
|
||||||
|
CREATE FUNCTION IF NOT EXISTS tempFunction AS 'SimpleUdf';
|
@ -0,0 +1,11 @@
|
|||||||
|
SELECT FROM tb1;
|
||||||
|
|
||||||
|
SELECT col1, col2, FROM tb;
|
||||||
|
|
||||||
|
INSERT INTO insert_tb PARTITION (country, state) SELECT col1, col2, country, state FROM ( SELECT FROM inside_tb ) subquery;
|
||||||
|
|
||||||
|
INSERT INTO insert_tb PARTITION (country, state) SELECT col1, col2, country, state FROM ( SELECT id, FROM inside_tb ) subquery;
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS derived_table WITH ('connector' = 'kafka') AS SELECT FROM origin_table;
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS derived_table WITH ('connector' = 'kafka') AS SELECT id, FROM origin_table;
|
@ -32,7 +32,7 @@ SELECT col, FROM tb1;
|
|||||||
|
|
||||||
SELECT * FROM tb ORDER BY ;
|
SELECT * FROM tb ORDER BY ;
|
||||||
|
|
||||||
SELECT * FROM tb GROUP BY tb. ;
|
SELECT * FROM tb GROUP BY ;
|
||||||
|
|
||||||
INSERT INTO tb (col, tb.c );
|
INSERT INTO tb (col, tb.c );
|
||||||
|
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import FlinkSQL from 'src/parser/flinksql';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
|
||||||
const syntaxSql = fs.readFileSync(
|
const syntaxSql = fs.readFileSync(
|
||||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||||
@ -18,7 +18,7 @@ describe('FlinkSQL Multiple Statements Syntax Suggestion', () => {
|
|||||||
};
|
};
|
||||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -32,7 +32,7 @@ describe('FlinkSQL Multiple Statements Syntax Suggestion', () => {
|
|||||||
};
|
};
|
||||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -46,7 +46,7 @@ describe('FlinkSQL Multiple Statements Syntax Suggestion', () => {
|
|||||||
};
|
};
|
||||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -60,7 +60,7 @@ describe('FlinkSQL Multiple Statements Syntax Suggestion', () => {
|
|||||||
};
|
};
|
||||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
162
test/parser/flinksql/suggestion/suggestionWithEntity.test.ts
Normal file
162
test/parser/flinksql/suggestion/suggestionWithEntity.test.ts
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
|
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
import { commentOtherLine } from 'test/helper';
|
||||||
|
|
||||||
|
const syntaxSql = fs.readFileSync(
|
||||||
|
path.join(__dirname, 'fixtures', 'suggestionWithEntity.sql'),
|
||||||
|
'utf-8'
|
||||||
|
);
|
||||||
|
|
||||||
|
describe('Flink SQL Syntax Suggestion with collect entity', () => {
|
||||||
|
const flink = new FlinkSQL();
|
||||||
|
|
||||||
|
test('Validate Syntax SQL', () => {
|
||||||
|
expect(flink.validate(syntaxSql).length).not.toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('select with no columns', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 1,
|
||||||
|
column: 8,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const parseTree = flink.parse(sql);
|
||||||
|
|
||||||
|
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = flink.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(1);
|
||||||
|
expect(entities[0].text).toBe('tb1');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('select with columns with columns and trailing comma', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 3,
|
||||||
|
column: 20,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = flink.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(1);
|
||||||
|
expect(entities[0].text).toBe('tb');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('insert into from nested query with no column', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 5,
|
||||||
|
column: 98,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = flink.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(2);
|
||||||
|
expect(entities[0].text).toBe('insert_tb');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
|
||||||
|
expect(entities[1].text).toBe('inside_tb');
|
||||||
|
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('insert into from nested query with columns and trailing comma', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 7,
|
||||||
|
column: 102,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = flink.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(2);
|
||||||
|
expect(entities[0].text).toBe('insert_tb');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
|
||||||
|
expect(entities[1].text).toBe('inside_tb');
|
||||||
|
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table as select with no column', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 9,
|
||||||
|
column: 82,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = flink.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(2);
|
||||||
|
expect(entities[0].text).toBe('derived_table');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
|
||||||
|
expect(entities[1].text).toBe('origin_table');
|
||||||
|
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table as select with columns and trailing comma', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 11,
|
||||||
|
column: 86,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = flink.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = flink.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(2);
|
||||||
|
expect(entities[0].text).toBe('derived_table');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
|
||||||
|
expect(entities[1].text).toBe('origin_table');
|
||||||
|
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
});
|
@ -1,7 +1,7 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import FlinkSQL from 'src/parser/flinksql';
|
import FlinkSQL from 'src/parser/flinksql';
|
||||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||||
import { commentOtherLine } from 'test/helper';
|
import { commentOtherLine } from 'test/helper';
|
||||||
|
|
||||||
const syntaxSql = fs.readFileSync(
|
const syntaxSql = fs.readFileSync(
|
||||||
@ -28,7 +28,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.CATALOG
|
(syn) => syn.syntaxContextType === EntityContextType.CATALOG
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -45,7 +45,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -62,7 +62,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -79,7 +79,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -96,7 +96,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -113,7 +113,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -130,7 +130,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -147,7 +147,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.VIEW_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -164,7 +164,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
|
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -181,7 +181,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -198,7 +198,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -215,7 +215,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -232,7 +232,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -249,7 +249,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -266,7 +266,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -283,7 +283,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -300,7 +300,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -310,18 +310,18 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
test('Select group by column', () => {
|
test('Select group by column', () => {
|
||||||
const pos: CaretPosition = {
|
const pos: CaretPosition = {
|
||||||
lineNumber: 35,
|
lineNumber: 35,
|
||||||
column: 30,
|
column: 27,
|
||||||
};
|
};
|
||||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['tb', '.']);
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
});
|
});
|
||||||
|
|
||||||
test('Insert into spec columns', () => {
|
test('Insert into spec columns', () => {
|
||||||
@ -334,7 +334,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -351,7 +351,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -368,7 +368,7 @@ describe('Flink SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
752
test/parser/hive/contextCollect/entityCollector.test.ts
Normal file
752
test/parser/hive/contextCollect/entityCollector.test.ts
Normal file
@ -0,0 +1,752 @@
|
|||||||
|
import { ParseTreeListener } from 'antlr4ng';
|
||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import { HiveSqlParserListener } from 'src/lib/hive/HiveSqlParserListener';
|
||||||
|
import { EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
import HiveSQL, { HiveEntityCollector } from 'src/parser/hive';
|
||||||
|
import { HiveSqlSplitListener } from 'src/parser/hive/hiveSplitListener';
|
||||||
|
import { StmtContextType } from 'src/parser/common/entityCollector';
|
||||||
|
|
||||||
|
const commonSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'common.sql'), 'utf-8');
|
||||||
|
|
||||||
|
describe('Hive entity collector tests', () => {
|
||||||
|
const hiveSql = new HiveSQL();
|
||||||
|
const parseTree = hiveSql.parse(commonSql);
|
||||||
|
const splitListener = new HiveSqlSplitListener();
|
||||||
|
hiveSql.listen(splitListener as HiveSqlParserListener, parseTree);
|
||||||
|
|
||||||
|
test('validate common sql', () => {
|
||||||
|
expect(hiveSql.validate(commonSql).length).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('split results', () => {
|
||||||
|
expect(splitListener.statementsContext.length).toBe(18);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table by like', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[0];
|
||||||
|
|
||||||
|
const collectListener = new HiveEntityCollector(commonSql);
|
||||||
|
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
const tableLikeEntity = allEntities[1];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('copy_table');
|
||||||
|
expect(tableCreateEntity.position).toEqual({
|
||||||
|
endColumn: 48,
|
||||||
|
endIndex: 46,
|
||||||
|
line: 1,
|
||||||
|
startColumn: 38,
|
||||||
|
startIndex: 37,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_TABLE_STMT
|
||||||
|
);
|
||||||
|
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 66,
|
||||||
|
endIndex: 64,
|
||||||
|
startLine: 1,
|
||||||
|
endLine: 1,
|
||||||
|
startIndex: 0,
|
||||||
|
startColumn: 1,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.relatedEntities).not.toBeNull();
|
||||||
|
expect(tableCreateEntity.relatedEntities[0]).toEqual(tableLikeEntity);
|
||||||
|
expect(tableCreateEntity.columns).toBeNull();
|
||||||
|
|
||||||
|
expect(tableLikeEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableLikeEntity.text).toBe('origin_table');
|
||||||
|
expect(tableLikeEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table by columns', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[1];
|
||||||
|
|
||||||
|
const collectListener = new HiveEntityCollector(commonSql);
|
||||||
|
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('list_bucket_multiple');
|
||||||
|
expect(tableCreateEntity.position).toEqual({
|
||||||
|
endColumn: 67,
|
||||||
|
endIndex: 133,
|
||||||
|
line: 3,
|
||||||
|
startColumn: 47,
|
||||||
|
startIndex: 114,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_TABLE_STMT
|
||||||
|
);
|
||||||
|
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 132,
|
||||||
|
endIndex: 198,
|
||||||
|
endLine: 3,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 68,
|
||||||
|
startLine: 3,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.relatedEntities).toBeNull();
|
||||||
|
expect(tableCreateEntity.columns).not.toBeNull();
|
||||||
|
expect(tableCreateEntity.columns.length).toBe(3);
|
||||||
|
tableCreateEntity.columns.forEach((columEntity) => {
|
||||||
|
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||||
|
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||||
|
expect(columEntity.text).toBe(
|
||||||
|
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table by select', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[2];
|
||||||
|
|
||||||
|
const collectListener = new HiveEntityCollector(commonSql);
|
||||||
|
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
const tableFromEntity = allEntities[1];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('derived_table');
|
||||||
|
expect(tableCreateEntity.position).toEqual({
|
||||||
|
endColumn: 41,
|
||||||
|
endIndex: 241,
|
||||||
|
line: 5,
|
||||||
|
startColumn: 28,
|
||||||
|
startIndex: 229,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_TABLE_STMT
|
||||||
|
);
|
||||||
|
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 17,
|
||||||
|
endIndex: 279,
|
||||||
|
endLine: 9,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 202,
|
||||||
|
startLine: 5,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.relatedEntities).not.toBeNull();
|
||||||
|
expect(tableCreateEntity.relatedEntities[0]).toBe(tableFromEntity);
|
||||||
|
expect(tableCreateEntity.columns).toBeNull();
|
||||||
|
|
||||||
|
expect(tableFromEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableFromEntity.text).toBe('origin_table');
|
||||||
|
expect(tableFromEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create view by select', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[3];
|
||||||
|
|
||||||
|
const collectListener = new HiveEntityCollector(commonSql);
|
||||||
|
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const viewCreateEntity = allEntities[0];
|
||||||
|
const viewSelectEntity = allEntities[1];
|
||||||
|
|
||||||
|
expect(viewCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||||
|
expect(viewCreateEntity.text).toBe('mydb.bro_view');
|
||||||
|
expect(viewCreateEntity.position).toEqual({
|
||||||
|
endColumn: 26,
|
||||||
|
endIndex: 307,
|
||||||
|
line: 11,
|
||||||
|
startColumn: 13,
|
||||||
|
startIndex: 295,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(viewCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
expect(viewCreateEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 31,
|
||||||
|
endIndex: 338,
|
||||||
|
endLine: 12,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 283,
|
||||||
|
startLine: 11,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(viewCreateEntity.relatedEntities).not.toBeNull();
|
||||||
|
expect(viewCreateEntity.relatedEntities[0]).toBe(viewSelectEntity);
|
||||||
|
expect(viewCreateEntity.columns).toBeNull();
|
||||||
|
|
||||||
|
expect(viewSelectEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(viewSelectEntity.text).toBe('mydb.sale_tbl');
|
||||||
|
expect(viewSelectEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create view columns by select', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[4];
|
||||||
|
|
||||||
|
const collectListener = new HiveEntityCollector(commonSql);
|
||||||
|
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const viewCreateEntity = allEntities[0];
|
||||||
|
const viewSelectEntity = allEntities[1];
|
||||||
|
|
||||||
|
expect(viewCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||||
|
expect(viewCreateEntity.text).toBe('mydb.task_view');
|
||||||
|
expect(viewCreateEntity.position).toEqual({
|
||||||
|
endColumn: 27,
|
||||||
|
endIndex: 367,
|
||||||
|
line: 14,
|
||||||
|
startColumn: 13,
|
||||||
|
startIndex: 354,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(viewCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
expect(viewCreateEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 21,
|
||||||
|
endIndex: 596,
|
||||||
|
endLine: 25,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 342,
|
||||||
|
startLine: 14,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(viewCreateEntity.relatedEntities).not.toBeNull();
|
||||||
|
expect(viewCreateEntity.relatedEntities[0]).toBe(viewSelectEntity);
|
||||||
|
expect(viewCreateEntity.columns).not.toBeNull();
|
||||||
|
expect(viewCreateEntity.columns.length).toBe(3);
|
||||||
|
viewCreateEntity.columns.forEach((columEntity) => {
|
||||||
|
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||||
|
expect(columEntity.belongStmt).toBe(viewCreateEntity.belongStmt);
|
||||||
|
expect(columEntity.text).toBe(
|
||||||
|
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(viewSelectEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(viewSelectEntity.text).toBe('task_tbl');
|
||||||
|
expect(viewSelectEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create materialized view by select', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[5];
|
||||||
|
|
||||||
|
const collectListener = new HiveEntityCollector(commonSql);
|
||||||
|
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const viewCreateEntity = allEntities[0];
|
||||||
|
const viewSelectEntity = allEntities[1];
|
||||||
|
|
||||||
|
expect(viewCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||||
|
expect(viewCreateEntity.text).toBe('mydb.bro_view');
|
||||||
|
expect(viewCreateEntity.position).toEqual({
|
||||||
|
endColumn: 53,
|
||||||
|
endIndex: 651,
|
||||||
|
line: 27,
|
||||||
|
startColumn: 40,
|
||||||
|
startIndex: 639,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(viewCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
expect(viewCreateEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 31,
|
||||||
|
endIndex: 715,
|
||||||
|
endLine: 30,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 600,
|
||||||
|
startLine: 27,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(viewCreateEntity.relatedEntities).not.toBeNull();
|
||||||
|
expect(viewCreateEntity.relatedEntities[0]).toBe(viewSelectEntity);
|
||||||
|
expect(viewCreateEntity.columns).toBeNull();
|
||||||
|
|
||||||
|
expect(viewSelectEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(viewSelectEntity.text).toBe('mydb.sale_tbl');
|
||||||
|
expect(viewSelectEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('select table default', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[6];
|
||||||
|
|
||||||
|
const collectListener = new HiveEntityCollector(commonSql);
|
||||||
|
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const selectTableEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(selectTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(selectTableEntity.text).toBe('table_name_1');
|
||||||
|
expect(selectTableEntity.position).toEqual({
|
||||||
|
endColumn: 36,
|
||||||
|
endIndex: 753,
|
||||||
|
line: 32,
|
||||||
|
startColumn: 24,
|
||||||
|
startIndex: 742,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(selectTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
expect(selectTableEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 36,
|
||||||
|
endIndex: 753,
|
||||||
|
endLine: 32,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 719,
|
||||||
|
startLine: 32,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(selectTableEntity.columns).toBeNull();
|
||||||
|
expect(selectTableEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('select table with join', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[7];
|
||||||
|
|
||||||
|
const collectListener = new HiveEntityCollector(commonSql);
|
||||||
|
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const selectTableEntity = allEntities[0];
|
||||||
|
const joinTableEntity = allEntities[1];
|
||||||
|
|
||||||
|
expect(selectTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(selectTableEntity.text).toBe('a');
|
||||||
|
expect(selectTableEntity.position).toEqual({
|
||||||
|
endColumn: 18,
|
||||||
|
endIndex: 773,
|
||||||
|
line: 34,
|
||||||
|
startColumn: 17,
|
||||||
|
startIndex: 773,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(selectTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
expect(selectTableEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 74,
|
||||||
|
endIndex: 829,
|
||||||
|
endLine: 34,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 757,
|
||||||
|
startLine: 34,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(selectTableEntity.columns).toBeNull();
|
||||||
|
expect(selectTableEntity.relatedEntities).toBeNull();
|
||||||
|
|
||||||
|
expect(selectTableEntity.belongStmt).toEqual(joinTableEntity.belongStmt);
|
||||||
|
expect(joinTableEntity.text).toBe('b');
|
||||||
|
expect(joinTableEntity.columns).toBeNull();
|
||||||
|
expect(joinTableEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('from select table', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[8];
|
||||||
|
|
||||||
|
const collectListener = new HiveEntityCollector(commonSql);
|
||||||
|
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const selectTableEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(selectTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(selectTableEntity.text).toBe('table_name_1');
|
||||||
|
expect(selectTableEntity.position).toEqual({
|
||||||
|
endColumn: 18,
|
||||||
|
endIndex: 849,
|
||||||
|
line: 36,
|
||||||
|
startColumn: 6,
|
||||||
|
startIndex: 838,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(selectTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
expect(selectTableEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 36,
|
||||||
|
endIndex: 867,
|
||||||
|
endLine: 36,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 833,
|
||||||
|
startLine: 36,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(selectTableEntity.columns).toBeNull();
|
||||||
|
expect(selectTableEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('from select table with join', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[9];
|
||||||
|
|
||||||
|
const collectListener = new HiveEntityCollector(commonSql);
|
||||||
|
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const selectTableEntity = allEntities[0];
|
||||||
|
const joinTableEntity = allEntities[1];
|
||||||
|
|
||||||
|
expect(selectTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(selectTableEntity.text).toBe('a');
|
||||||
|
expect(selectTableEntity.position).toEqual({
|
||||||
|
endColumn: 7,
|
||||||
|
endIndex: 876,
|
||||||
|
line: 38,
|
||||||
|
startColumn: 6,
|
||||||
|
startIndex: 876,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(selectTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
expect(selectTableEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 74,
|
||||||
|
endIndex: 943,
|
||||||
|
endLine: 38,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 871,
|
||||||
|
startLine: 38,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(selectTableEntity.columns).toBeNull();
|
||||||
|
expect(selectTableEntity.relatedEntities).toBeNull();
|
||||||
|
|
||||||
|
expect(selectTableEntity.belongStmt).toEqual(joinTableEntity.belongStmt);
|
||||||
|
expect(joinTableEntity.text).toBe('b');
|
||||||
|
expect(joinTableEntity.columns).toBeNull();
|
||||||
|
expect(joinTableEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('insert table with values', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[10];
|
||||||
|
|
||||||
|
const collectListener = new HiveEntityCollector(commonSql);
|
||||||
|
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const insertTableEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(insertTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(insertTableEntity.text).toBe('students');
|
||||||
|
expect(insertTableEntity.position).toEqual({
|
||||||
|
endColumn: 27,
|
||||||
|
endIndex: 972,
|
||||||
|
line: 40,
|
||||||
|
startColumn: 19,
|
||||||
|
startIndex: 965,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(insertTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||||
|
expect(insertTableEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 66,
|
||||||
|
endIndex: 1045,
|
||||||
|
endLine: 41,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 947,
|
||||||
|
startLine: 40,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(insertTableEntity.columns).toBeNull();
|
||||||
|
expect(insertTableEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('insert table use select', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[11];
|
||||||
|
|
||||||
|
const collectListener = new HiveEntityCollector(commonSql);
|
||||||
|
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const insertTableEntity = allEntities[0];
|
||||||
|
const fromTableEntity = allEntities[1];
|
||||||
|
|
||||||
|
expect(insertTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(insertTableEntity.text).toBe('table_name');
|
||||||
|
expect(insertTableEntity.position).toEqual({
|
||||||
|
endColumn: 23,
|
||||||
|
endIndex: 1070,
|
||||||
|
line: 43,
|
||||||
|
startColumn: 13,
|
||||||
|
startIndex: 1061,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(insertTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||||
|
expect(insertTableEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 18,
|
||||||
|
endIndex: 1183,
|
||||||
|
endLine: 46,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 1049,
|
||||||
|
startLine: 43,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(insertTableEntity.columns).toBeNull();
|
||||||
|
expect(insertTableEntity.relatedEntities).toBeNull();
|
||||||
|
|
||||||
|
expect(fromTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
expect(fromTableEntity.text).toBe('source_table');
|
||||||
|
expect(fromTableEntity.belongStmt.parentStmt).toEqual(insertTableEntity.belongStmt);
|
||||||
|
expect(fromTableEntity.belongStmt.rootStmt).toBe(insertTableEntity.belongStmt);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('from insert table use select', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[12];
|
||||||
|
|
||||||
|
const collectListener = new HiveEntityCollector(commonSql);
|
||||||
|
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const fromTableEntity = allEntities[0];
|
||||||
|
const insertTableEntity = allEntities[1];
|
||||||
|
|
||||||
|
expect(insertTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(insertTableEntity.text).toBe('page_view');
|
||||||
|
expect(insertTableEntity.position).toEqual({
|
||||||
|
endColumn: 33,
|
||||||
|
endIndex: 1241,
|
||||||
|
line: 49,
|
||||||
|
startColumn: 24,
|
||||||
|
startIndex: 1233,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(insertTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||||
|
expect(insertTableEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 93,
|
||||||
|
endIndex: 1370,
|
||||||
|
endLine: 50,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 1187,
|
||||||
|
startLine: 48,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(insertTableEntity.columns).toBeNull();
|
||||||
|
expect(insertTableEntity.relatedEntities).toBeNull();
|
||||||
|
|
||||||
|
expect(fromTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||||
|
expect(fromTableEntity.text).toBe('page_view_stg');
|
||||||
|
expect(fromTableEntity.belongStmt).toEqual(insertTableEntity.belongStmt);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create db', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[13];
|
||||||
|
|
||||||
|
const collectListener = new HiveEntityCollector(commonSql);
|
||||||
|
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const dbEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(dbEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||||
|
expect(dbEntity.text).toBe('mydb');
|
||||||
|
expect(dbEntity.position).toEqual({
|
||||||
|
endColumn: 21,
|
||||||
|
endIndex: 1393,
|
||||||
|
line: 52,
|
||||||
|
startColumn: 17,
|
||||||
|
startIndex: 1390,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dbEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_DATABASE_STMT);
|
||||||
|
expect(dbEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 21,
|
||||||
|
endIndex: 1393,
|
||||||
|
endLine: 52,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 1374,
|
||||||
|
startLine: 52,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dbEntity.columns).toBeNull();
|
||||||
|
expect(dbEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create remote db', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[14];
|
||||||
|
|
||||||
|
const collectListener = new HiveEntityCollector(commonSql);
|
||||||
|
hiveSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const dbEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(dbEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||||
|
expect(dbEntity.text).toBe('mydb');
|
||||||
|
expect(dbEntity.position).toEqual({
|
||||||
|
endColumn: 28,
|
||||||
|
endIndex: 1423,
|
||||||
|
line: 54,
|
||||||
|
startColumn: 24,
|
||||||
|
startIndex: 1420,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dbEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_DATABASE_STMT);
|
||||||
|
expect(dbEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 28,
|
||||||
|
endIndex: 1423,
|
||||||
|
endLine: 54,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 1397,
|
||||||
|
startLine: 54,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dbEntity.columns).toBeNull();
|
||||||
|
expect(dbEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('show locks db', () => {
|
||||||
|
const dbContext = splitListener.statementsContext[15];
|
||||||
|
|
||||||
|
const collectListener = new HiveEntityCollector(commonSql);
|
||||||
|
hiveSql.listen(collectListener as ParseTreeListener, dbContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const dbEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(dbEntity.entityContextType).toBe(EntityContextType.DATABASE);
|
||||||
|
expect(dbEntity.text).toBe('db1');
|
||||||
|
expect(dbEntity.position).toEqual({
|
||||||
|
endColumn: 24,
|
||||||
|
endIndex: 1449,
|
||||||
|
line: 56,
|
||||||
|
startColumn: 21,
|
||||||
|
startIndex: 1447,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dbEntity.belongStmt.stmtContextType).toBe(StmtContextType.COMMON_STMT);
|
||||||
|
expect(dbEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 25,
|
||||||
|
endIndex: 1450,
|
||||||
|
endLine: 56,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 1427,
|
||||||
|
startLine: 56,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dbEntity.columns).toBeNull();
|
||||||
|
expect(dbEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create function', () => {
|
||||||
|
const functionCreateContext = splitListener.statementsContext[16];
|
||||||
|
|
||||||
|
const collectListener = new HiveEntityCollector(commonSql);
|
||||||
|
hiveSql.listen(collectListener as ParseTreeListener, functionCreateContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const functionEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
|
||||||
|
expect(functionEntity.text).toBe('base_analizer');
|
||||||
|
expect(functionEntity.position).toEqual({
|
||||||
|
endColumn: 30,
|
||||||
|
endIndex: 1481,
|
||||||
|
line: 58,
|
||||||
|
startColumn: 17,
|
||||||
|
startIndex: 1469,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(functionEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_FUNCTION_STMT
|
||||||
|
);
|
||||||
|
expect(functionEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 30,
|
||||||
|
endIndex: 1481,
|
||||||
|
endLine: 58,
|
||||||
|
startColumn: 17,
|
||||||
|
startIndex: 1469,
|
||||||
|
startLine: 58,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(functionEntity.columns).toBeNull();
|
||||||
|
expect(functionEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create temporary function', () => {
|
||||||
|
const functionCreateContext = splitListener.statementsContext[17];
|
||||||
|
|
||||||
|
const collectListener = new HiveEntityCollector(commonSql);
|
||||||
|
hiveSql.listen(collectListener as ParseTreeListener, functionCreateContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const functionEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
|
||||||
|
expect(functionEntity.text).toBe('flat_analizer');
|
||||||
|
expect(functionEntity.position).toEqual({
|
||||||
|
endColumn: 40,
|
||||||
|
endIndex: 1549,
|
||||||
|
line: 60,
|
||||||
|
startColumn: 27,
|
||||||
|
startIndex: 1537,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(functionEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_FUNCTION_STMT
|
||||||
|
);
|
||||||
|
expect(functionEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 40,
|
||||||
|
endIndex: 1549,
|
||||||
|
endLine: 60,
|
||||||
|
startColumn: 27,
|
||||||
|
startIndex: 1537,
|
||||||
|
startLine: 60,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(functionEntity.columns).toBeNull();
|
||||||
|
expect(functionEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
60
test/parser/hive/contextCollect/fixtures/common.sql
Normal file
60
test/parser/hive/contextCollect/fixtures/common.sql
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
CREATE TEMPORARY TABLE IF NOT EXISTS copy_table LIKE origin_table;
|
||||||
|
|
||||||
|
CREATE TEMPORARY EXTERNAL TABLE IF NOT EXISTS list_bucket_multiple (col1 STRING, col2 INT, col3 STRING) COMMENT 'this is a comment';
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS derived_table AS
|
||||||
|
SELECT
|
||||||
|
*
|
||||||
|
FROM
|
||||||
|
origin_table;
|
||||||
|
|
||||||
|
CREATE VIEW mydb.bro_view
|
||||||
|
AS SELECT * FROM mydb.sale_tbl;
|
||||||
|
|
||||||
|
CREATE VIEW mydb.task_view (
|
||||||
|
taskId COMMENT '任务id',
|
||||||
|
taskName COMMENT '任务名称',
|
||||||
|
taskRunTime COMMENT '任务运行时长'
|
||||||
|
)
|
||||||
|
COMMENT '一个任务信息视图'
|
||||||
|
TBLPROPERTIES(
|
||||||
|
'author'='hayden'
|
||||||
|
)
|
||||||
|
AS SELECT DISTINCT id, `name`, runtime
|
||||||
|
FROM task_tbl
|
||||||
|
WHERE type='day';
|
||||||
|
|
||||||
|
CREATE MATERIALIZED VIEW IF NOT EXISTS mydb.bro_view
|
||||||
|
DISABLE REWRITE
|
||||||
|
COMMENT '一个测试视图'
|
||||||
|
AS SELECT * FROM mydb.sale_tbl;
|
||||||
|
|
||||||
|
SELECT col1, col2 FROM table_name_1;
|
||||||
|
|
||||||
|
SELECT a.* FROM a JOIN b ON (a.id = b.id AND a.department = b.department);
|
||||||
|
|
||||||
|
FROM table_name_1 SELECT col1, col2;
|
||||||
|
|
||||||
|
FROM a JOIN b ON (a.id = b.id AND a.department = b.department) SELECT a.*;
|
||||||
|
|
||||||
|
INSERT INTO TABLE students(a,b,c)
|
||||||
|
VALUES ('fred flintstone', 35, 1.28), ('barney rubble', 32, 2.32);
|
||||||
|
|
||||||
|
INSERT INTO table_name PARTITION (country, state)
|
||||||
|
SELECT col1, col2,
|
||||||
|
CONCAT(country, '_', state) AS country_state
|
||||||
|
FROM source_table;
|
||||||
|
|
||||||
|
FROM page_view_stg pvs
|
||||||
|
INSERT OVERWRITE TABLE page_view PARTITION(dt='2008-06-08', country)
|
||||||
|
SELECT pvs.viewTime, pvs.userid, pvs.page_url, pvs.referrer_url, null, null, pvs.ip, pvs.cnt;
|
||||||
|
|
||||||
|
CREATE DATABASE mydb;
|
||||||
|
|
||||||
|
CREATE REMOTE DATABASE mydb;
|
||||||
|
|
||||||
|
SHOW LOCKS DATABASE db1;
|
||||||
|
|
||||||
|
CREATE FUNCTION base_analizer AS 'com.udf.BaseFieldUDF';
|
||||||
|
|
||||||
|
CREATE TEMPORARY FUNCTION flat_analizer AS 'com.udtf.EventJsonUDTF';
|
@ -0,0 +1,23 @@
|
|||||||
|
SELECT FROM tb1
|
||||||
|
|
||||||
|
SELECT col1, col2, FROM tb
|
||||||
|
|
||||||
|
FROM table_name_1 SELECT ; -- TODO: request semicolon
|
||||||
|
|
||||||
|
FROM table_name_1 SELECT col1, col2, ; -- TODO: request semicolon
|
||||||
|
|
||||||
|
FROM a JOIN b ON (a.id = b.id AND a.department = b.department) SELECT ; -- TODO: request semicolon
|
||||||
|
|
||||||
|
FROM a JOIN b ON (a.id = b.id AND a.department = b.department) SELECT a.*, ; -- TODO: request semicolon
|
||||||
|
|
||||||
|
FROM page_view_stg pvs INSERT OVERWRITE TABLE page_view PARTITION(dt='2008-06-08', country) SELECT ; -- TODO: request semicolon
|
||||||
|
|
||||||
|
FROM page_view_stg pvs INSERT OVERWRITE TABLE page_view PARTITION(dt='2008-06-08', country) SELECT id, ; -- TODO: request semicolon
|
||||||
|
|
||||||
|
INSERT INTO insert_tb PARTITION (country, state) SELECT col1, col2, country, state FROM ( SELECT FROM inside_tb ) subquery
|
||||||
|
|
||||||
|
INSERT INTO insert_tb PARTITION (country, state) SELECT col1, col2, country, state FROM ( SELECT id, FROM inside_tb ) subquery
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS derived_table AS SELECT FROM origin_table
|
||||||
|
|
||||||
|
CREATE TABLE IF NOT EXISTS derived_table AS SELECT id, FROM origin_table
|
@ -32,4 +32,10 @@ MERGE INTO tablename USING tablename2 ON (tablename.id = tablename2.id) WHEN MAT
|
|||||||
|
|
||||||
ALTER TABLE tbl CHANGE COLUMN ;
|
ALTER TABLE tbl CHANGE COLUMN ;
|
||||||
|
|
||||||
ALTER TABLE tbl CHANGE COLUMN tbl.oldcol new ;
|
ALTER TABLE tbl CHANGE COLUMN tbl.oldcol new ;
|
||||||
|
|
||||||
|
FROM table_name_1 SELECT col1, col2;
|
||||||
|
|
||||||
|
FROM a JOIN b ON (a.id = b.id AND a.department = b.department) SELECT a.*;
|
||||||
|
|
||||||
|
FROM page_view_stg INSERT;
|
@ -1,7 +1,7 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import HiveSQL from 'src/parser/hive';
|
import HiveSQL from 'src/parser/hive';
|
||||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
|
||||||
const syntaxSql = fs.readFileSync(
|
const syntaxSql = fs.readFileSync(
|
||||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||||
@ -18,7 +18,7 @@ describe('HiveSQL Multiple Statements Syntax Suggestion', () => {
|
|||||||
};
|
};
|
||||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -32,7 +32,7 @@ describe('HiveSQL Multiple Statements Syntax Suggestion', () => {
|
|||||||
};
|
};
|
||||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -46,7 +46,7 @@ describe('HiveSQL Multiple Statements Syntax Suggestion', () => {
|
|||||||
};
|
};
|
||||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -60,7 +60,7 @@ describe('HiveSQL Multiple Statements Syntax Suggestion', () => {
|
|||||||
};
|
};
|
||||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
310
test/parser/hive/suggestion/suggestionWithEntity.test.ts
Normal file
310
test/parser/hive/suggestion/suggestionWithEntity.test.ts
Normal file
@ -0,0 +1,310 @@
|
|||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import HiveSQL from 'src/parser/hive';
|
||||||
|
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
import { commentOtherLine } from 'test/helper';
|
||||||
|
|
||||||
|
const syntaxSql = fs.readFileSync(
|
||||||
|
path.join(__dirname, 'fixtures', 'suggestionWithEntity.sql'),
|
||||||
|
'utf-8'
|
||||||
|
);
|
||||||
|
|
||||||
|
describe('Hive SQL Syntax Suggestion with collect entity', () => {
|
||||||
|
const hive = new HiveSQL();
|
||||||
|
|
||||||
|
test('Validate Syntax SQL', () => {
|
||||||
|
expect(hive.validate(syntaxSql).length).not.toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('select with no columns', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 1,
|
||||||
|
column: 8,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = hive.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(1);
|
||||||
|
expect(entities[0].text).toBe('tb1');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('select with columns with columns and trailing comma', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 3,
|
||||||
|
column: 20,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = hive.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(1);
|
||||||
|
expect(entities[0].text).toBe('tb');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('from table select with no column', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 5,
|
||||||
|
column: 26,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = hive.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(1);
|
||||||
|
expect(entities[0].text).toBe('table_name_1');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('from table select with with columns and trailing comma', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 7,
|
||||||
|
column: 38,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = hive.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(1);
|
||||||
|
expect(entities[0].text).toBe('table_name_1');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('from joined table select with no column', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 9,
|
||||||
|
column: 71,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = hive.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(2);
|
||||||
|
expect(entities[0].text).toBe('a');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeFalsy();
|
||||||
|
expect(entities[0].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||||
|
|
||||||
|
expect(entities[1].text).toBe('b');
|
||||||
|
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[1].belongStmt.isContainCaret).toBeFalsy();
|
||||||
|
expect(entities[1].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('from joined table select with columns and trailing comma', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 11,
|
||||||
|
column: 76,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = hive.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(2);
|
||||||
|
expect(entities[0].text).toBe('a');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeFalsy();
|
||||||
|
expect(entities[0].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||||
|
|
||||||
|
expect(entities[1].text).toBe('b');
|
||||||
|
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[1].belongStmt.isContainCaret).toBeFalsy();
|
||||||
|
expect(entities[1].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('from table insert into table select no columns', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 13,
|
||||||
|
column: 100,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = hive.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(2);
|
||||||
|
expect(entities[0].text).toBe('page_view_stg');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeFalsy();
|
||||||
|
expect(entities[0].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||||
|
|
||||||
|
expect(entities[1].text).toBe('page_view');
|
||||||
|
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[1].belongStmt.isContainCaret).toBeFalsy();
|
||||||
|
expect(entities[1].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('from table insert into table select with column and trailing comma', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 15,
|
||||||
|
column: 104,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = hive.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(2);
|
||||||
|
expect(entities[0].text).toBe('page_view_stg');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeFalsy();
|
||||||
|
expect(entities[0].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||||
|
|
||||||
|
expect(entities[1].text).toBe('page_view');
|
||||||
|
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[1].belongStmt.isContainCaret).toBeFalsy();
|
||||||
|
expect(entities[1].belongStmt.rootStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('insert into from nested query with no column', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 17,
|
||||||
|
column: 98,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = hive.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(2);
|
||||||
|
expect(entities[0].text).toBe('insert_tb');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
|
||||||
|
expect(entities[1].text).toBe('inside_tb');
|
||||||
|
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('insert into from nested query with columns and trailing comma', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 19,
|
||||||
|
column: 102,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = hive.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(2);
|
||||||
|
expect(entities[0].text).toBe('insert_tb');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
|
||||||
|
expect(entities[1].text).toBe('inside_tb');
|
||||||
|
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table as select with no column', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 21,
|
||||||
|
column: 52,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = hive.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(2);
|
||||||
|
expect(entities[0].text).toBe('derived_table');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
|
||||||
|
expect(entities[1].text).toBe('origin_table');
|
||||||
|
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table as select with columns and trailing comma', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 23,
|
||||||
|
column: 56,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = hive.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = hive.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(2);
|
||||||
|
expect(entities[0].text).toBe('derived_table');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
|
||||||
|
expect(entities[1].text).toBe('origin_table');
|
||||||
|
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
});
|
@ -1,7 +1,7 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import HiveSQL from 'src/parser/hive';
|
import HiveSQL from 'src/parser/hive';
|
||||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||||
import { commentOtherLine } from 'test/helper';
|
import { commentOtherLine } from 'test/helper';
|
||||||
|
|
||||||
const syntaxSql = fs.readFileSync(
|
const syntaxSql = fs.readFileSync(
|
||||||
@ -28,7 +28,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -45,7 +45,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -62,7 +62,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -79,7 +79,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -96,7 +96,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.VIEW_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -113,7 +113,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -130,7 +130,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -147,7 +147,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
|
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -164,7 +164,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.DATABASE_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -181,7 +181,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -198,7 +198,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -215,7 +215,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -232,7 +232,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -249,7 +249,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -266,7 +266,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -287,7 +287,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -304,10 +304,61 @@ describe('Hive SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['new']);
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['new']);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('From Table Select', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 37,
|
||||||
|
column: 18,
|
||||||
|
};
|
||||||
|
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||||
|
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||||
|
pos
|
||||||
|
)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['table_name_1']);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('From Table Select join', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 39,
|
||||||
|
column: 14,
|
||||||
|
};
|
||||||
|
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||||
|
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||||
|
pos
|
||||||
|
)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['b']);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('From Table Insert', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 41,
|
||||||
|
column: 19,
|
||||||
|
};
|
||||||
|
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||||
|
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||||
|
pos
|
||||||
|
)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['page_view_stg']);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
@ -239,3 +239,8 @@ EXPLAIN LOCKS UPDATE target SET b = 1 WHERE p IN (SELECT t.q1 FROM source t WHER
|
|||||||
|
|
||||||
-- LanguageManual Explain -- User-level Explain Output
|
-- LanguageManual Explain -- User-level Explain Output
|
||||||
EXPLAIN select sum(hash(key)), sum(hash(value)) from src_orc_merge_test_part where ds='2012-01-03' and ts='2012-01-03+14:46:31';
|
EXPLAIN select sum(hash(key)), sum(hash(value)) from src_orc_merge_test_part where ds='2012-01-03' and ts='2012-01-03+14:46:31';
|
||||||
|
|
||||||
|
-- FROM xx SELECT
|
||||||
|
FROM table_name_1 SELECT col1, col2;
|
||||||
|
|
||||||
|
FROM a JOIN b ON (a.id = b.id AND a.department = b.department) SELECT a.*;
|
600
test/parser/impala/contextCollect/entityCollector.test.ts
Normal file
600
test/parser/impala/contextCollect/entityCollector.test.ts
Normal file
@ -0,0 +1,600 @@
|
|||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import { ImpalaSqlSplitListener } from 'src/parser/impala';
|
||||||
|
import { EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
import { StmtContextType } from 'src/parser/common/entityCollector';
|
||||||
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
|
import { ImpalaSqlParserListener } from 'src/lib/impala/ImpalaSqlParserListener';
|
||||||
|
import ImpalaEntityCollector from 'src/parser/impala/impalaEntityCollector';
|
||||||
|
import { ParseTreeListener } from 'antlr4ng';
|
||||||
|
|
||||||
|
const commonSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'common.sql'), 'utf-8');
|
||||||
|
|
||||||
|
describe('ImpalaSQL entity collector tests', () => {
|
||||||
|
const impalaSql = new ImpalaSQL();
|
||||||
|
const parseTree = impalaSql.parse(commonSql);
|
||||||
|
const splitListener = new ImpalaSqlSplitListener();
|
||||||
|
impalaSql.listen(splitListener as ImpalaSqlParserListener, parseTree);
|
||||||
|
|
||||||
|
test('validate common sql', () => {
|
||||||
|
expect(impalaSql.validate(commonSql).length).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('split results', () => {
|
||||||
|
expect(splitListener.statementsContext.length).toBe(14);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table by like', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[0];
|
||||||
|
|
||||||
|
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||||
|
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
const likeTableEntity = allEntities[1];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('new_Table');
|
||||||
|
expect(tableCreateEntity.position).toEqual({
|
||||||
|
endColumn: 23,
|
||||||
|
endIndex: 21,
|
||||||
|
line: 1,
|
||||||
|
startColumn: 14,
|
||||||
|
startIndex: 13,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_TABLE_STMT
|
||||||
|
);
|
||||||
|
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 38,
|
||||||
|
endIndex: 36,
|
||||||
|
endLine: 1,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 0,
|
||||||
|
startLine: 1,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||||
|
expect(tableCreateEntity.columns).toBeNull();
|
||||||
|
expect(tableCreateEntity.relatedEntities[0]).toBe(likeTableEntity);
|
||||||
|
|
||||||
|
expect(likeTableEntity.text).toBe('old_table');
|
||||||
|
expect(likeTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(likeTableEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table by columns', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[1];
|
||||||
|
|
||||||
|
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||||
|
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('census');
|
||||||
|
expect(tableCreateEntity.position).toEqual({
|
||||||
|
endColumn: 20,
|
||||||
|
endIndex: 58,
|
||||||
|
line: 3,
|
||||||
|
startColumn: 14,
|
||||||
|
startIndex: 53,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_TABLE_STMT
|
||||||
|
);
|
||||||
|
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 77,
|
||||||
|
endIndex: 115,
|
||||||
|
endLine: 3,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 40,
|
||||||
|
startLine: 3,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.relatedEntities).toBeNull();
|
||||||
|
expect(tableCreateEntity.columns.length).toBe(2);
|
||||||
|
tableCreateEntity.columns.forEach((columEntity) => {
|
||||||
|
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||||
|
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||||
|
expect(columEntity.text).toBe(
|
||||||
|
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table by select', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[2];
|
||||||
|
|
||||||
|
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||||
|
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
const fromCreateEntity = allEntities[1];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('sorted_census_data');
|
||||||
|
expect(tableCreateEntity.position).toEqual({
|
||||||
|
endColumn: 32,
|
||||||
|
endIndex: 149,
|
||||||
|
line: 5,
|
||||||
|
startColumn: 14,
|
||||||
|
startIndex: 132,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_TABLE_STMT
|
||||||
|
);
|
||||||
|
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 30,
|
||||||
|
endIndex: 278,
|
||||||
|
endLine: 9,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 119,
|
||||||
|
startLine: 5,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||||
|
expect(tableCreateEntity.columns).toBeNull();
|
||||||
|
|
||||||
|
expect(fromCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
expect(fromCreateEntity.text).toBe('unsorted_census_data');
|
||||||
|
expect(tableCreateEntity.relatedEntities[0]).toBe(fromCreateEntity);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create kudu table by select', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[3];
|
||||||
|
|
||||||
|
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||||
|
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
const fromCreateEntity = allEntities[1];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('ctas_t1');
|
||||||
|
expect(tableCreateEntity.position).toEqual({
|
||||||
|
endColumn: 21,
|
||||||
|
endIndex: 301,
|
||||||
|
line: 11,
|
||||||
|
startColumn: 14,
|
||||||
|
startIndex: 295,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_TABLE_STMT
|
||||||
|
);
|
||||||
|
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 31,
|
||||||
|
endIndex: 405,
|
||||||
|
endLine: 14,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 282,
|
||||||
|
startLine: 11,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||||
|
expect(tableCreateEntity.columns).toBeNull();
|
||||||
|
|
||||||
|
expect(fromCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
expect(fromCreateEntity.text).toBe('kudu_t1');
|
||||||
|
expect(tableCreateEntity.relatedEntities[0]).toBe(fromCreateEntity);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create kudu table by columns', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[4];
|
||||||
|
|
||||||
|
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||||
|
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('kudu_t3');
|
||||||
|
expect(tableCreateEntity.position).toEqual({
|
||||||
|
endColumn: 21,
|
||||||
|
endIndex: 428,
|
||||||
|
line: 16,
|
||||||
|
startColumn: 14,
|
||||||
|
startIndex: 422,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_TABLE_STMT
|
||||||
|
);
|
||||||
|
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 17,
|
||||||
|
endIndex: 705,
|
||||||
|
endLine: 23,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 409,
|
||||||
|
startLine: 16,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.relatedEntities).toBeNull();
|
||||||
|
expect(tableCreateEntity.columns.length).toBe(4);
|
||||||
|
tableCreateEntity.columns.forEach((columEntity) => {
|
||||||
|
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||||
|
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||||
|
expect(columEntity.text).toBe(
|
||||||
|
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create view', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[5];
|
||||||
|
|
||||||
|
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||||
|
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const viewCreateEntity = allEntities[0];
|
||||||
|
const fromCreateEntity = allEntities[1];
|
||||||
|
|
||||||
|
expect(viewCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||||
|
expect(viewCreateEntity.text).toBe('my_view');
|
||||||
|
expect(viewCreateEntity.position).toEqual({
|
||||||
|
endColumn: 20,
|
||||||
|
endIndex: 727,
|
||||||
|
line: 25,
|
||||||
|
startColumn: 13,
|
||||||
|
startIndex: 721,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(viewCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
expect(viewCreateEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 146,
|
||||||
|
endIndex: 853,
|
||||||
|
endLine: 25,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 709,
|
||||||
|
startLine: 25,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(viewCreateEntity.relatedEntities[0]).toBe(fromCreateEntity);
|
||||||
|
expect(viewCreateEntity.columns.length).toBe(2);
|
||||||
|
viewCreateEntity.columns.forEach((columEntity) => {
|
||||||
|
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||||
|
expect(columEntity.belongStmt).toBe(viewCreateEntity.belongStmt);
|
||||||
|
expect(columEntity.text).toBe(
|
||||||
|
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(fromCreateEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(fromCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
expect(fromCreateEntity.text).toBe('my_table');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('insert table select', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[6];
|
||||||
|
|
||||||
|
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||||
|
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const tableInsertEntity = allEntities[0];
|
||||||
|
const fromTableEntity = allEntities[1];
|
||||||
|
|
||||||
|
expect(tableInsertEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableInsertEntity.text).toBe('t2');
|
||||||
|
expect(tableInsertEntity.position).toEqual({
|
||||||
|
endColumn: 15,
|
||||||
|
endIndex: 870,
|
||||||
|
line: 27,
|
||||||
|
startColumn: 13,
|
||||||
|
startIndex: 869,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableInsertEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||||
|
expect(tableInsertEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 40,
|
||||||
|
endIndex: 895,
|
||||||
|
endLine: 27,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 857,
|
||||||
|
startLine: 27,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableInsertEntity.columns).toBeNull();
|
||||||
|
|
||||||
|
expect(fromTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(fromTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
expect(fromTableEntity.text).toBe('t1');
|
||||||
|
expect(fromTableEntity.belongStmt.parentStmt).toBe(tableInsertEntity.belongStmt);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('select table', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[7];
|
||||||
|
|
||||||
|
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||||
|
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const tableEntity1 = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableEntity1.text).toBe('a');
|
||||||
|
expect(tableEntity1.position).toEqual({
|
||||||
|
endColumn: 16,
|
||||||
|
endIndex: 913,
|
||||||
|
line: 29,
|
||||||
|
startColumn: 15,
|
||||||
|
startIndex: 913,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
expect(tableEntity1.belongStmt.position).toEqual({
|
||||||
|
endColumn: 16,
|
||||||
|
endIndex: 913,
|
||||||
|
endLine: 29,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 899,
|
||||||
|
startLine: 29,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableEntity1.columns).toBeNull();
|
||||||
|
expect(tableEntity1.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('select table join', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[8];
|
||||||
|
|
||||||
|
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||||
|
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const tableEntity1 = allEntities[0];
|
||||||
|
const tableEntity2 = allEntities[1];
|
||||||
|
|
||||||
|
expect(tableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableEntity1.text).toBe('t1');
|
||||||
|
expect(tableEntity1.position).toEqual({
|
||||||
|
endColumn: 28,
|
||||||
|
endIndex: 943,
|
||||||
|
line: 31,
|
||||||
|
startColumn: 26,
|
||||||
|
startIndex: 942,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
expect(tableEntity1.belongStmt.position).toEqual({
|
||||||
|
endColumn: 20,
|
||||||
|
endIndex: 1022,
|
||||||
|
endLine: 33,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 917,
|
||||||
|
startLine: 31,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableEntity1.columns).toBeNull();
|
||||||
|
expect(tableEntity1.relatedEntities).toBeNull();
|
||||||
|
|
||||||
|
expect(tableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
expect(tableEntity2.text).toBe('t2');
|
||||||
|
expect(tableEntity2.columns).toBeNull();
|
||||||
|
expect(tableEntity2.relatedEntities).toBeNull();
|
||||||
|
expect(tableEntity2.belongStmt).toBe(tableEntity1.belongStmt);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create db', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[9];
|
||||||
|
|
||||||
|
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||||
|
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const dbEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(dbEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||||
|
expect(dbEntity.text).toBe('my_db');
|
||||||
|
expect(dbEntity.position).toEqual({
|
||||||
|
endColumn: 22,
|
||||||
|
endIndex: 1046,
|
||||||
|
line: 35,
|
||||||
|
startColumn: 17,
|
||||||
|
startIndex: 1042,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dbEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_DATABASE_STMT);
|
||||||
|
expect(dbEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 52,
|
||||||
|
endIndex: 1076,
|
||||||
|
endLine: 35,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 1026,
|
||||||
|
startLine: 35,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dbEntity.columns).toBeNull();
|
||||||
|
expect(dbEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create schema', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[10];
|
||||||
|
|
||||||
|
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||||
|
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const schemaEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(schemaEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||||
|
expect(schemaEntity.text).toBe('my_schema');
|
||||||
|
expect(schemaEntity.position).toEqual({
|
||||||
|
endColumn: 38,
|
||||||
|
endIndex: 1116,
|
||||||
|
line: 37,
|
||||||
|
startColumn: 29,
|
||||||
|
startIndex: 1108,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(schemaEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_DATABASE_STMT);
|
||||||
|
expect(schemaEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 94,
|
||||||
|
endIndex: 1172,
|
||||||
|
endLine: 37,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 1080,
|
||||||
|
startLine: 37,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(schemaEntity.columns).toBeNull();
|
||||||
|
expect(schemaEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('comment dbName', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[11];
|
||||||
|
|
||||||
|
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||||
|
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const dbEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(dbEntity.entityContextType).toBe(EntityContextType.DATABASE);
|
||||||
|
expect(dbEntity.text).toBe('my_database');
|
||||||
|
expect(dbEntity.position).toEqual({
|
||||||
|
endColumn: 32,
|
||||||
|
endIndex: 1206,
|
||||||
|
line: 39,
|
||||||
|
startColumn: 21,
|
||||||
|
startIndex: 1196,
|
||||||
|
});
|
||||||
|
|
||||||
|
// 由于没有处理 comment 语句,所以当前是处于 COMMON_STMT
|
||||||
|
expect(dbEntity.belongStmt.stmtContextType).toBe(StmtContextType.COMMON_STMT);
|
||||||
|
expect(dbEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 59,
|
||||||
|
endIndex: 1233,
|
||||||
|
endLine: 39,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 1176,
|
||||||
|
startLine: 39,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dbEntity.columns).toBeNull();
|
||||||
|
expect(dbEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create aggregate function', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[12];
|
||||||
|
|
||||||
|
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||||
|
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const functionEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
|
||||||
|
expect(functionEntity.text).toBe('function_name');
|
||||||
|
expect(functionEntity.position).toEqual({
|
||||||
|
endColumn: 40,
|
||||||
|
endIndex: 1274,
|
||||||
|
line: 41,
|
||||||
|
startColumn: 27,
|
||||||
|
startIndex: 1262,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(functionEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_FUNCTION_STMT
|
||||||
|
);
|
||||||
|
expect(functionEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 26,
|
||||||
|
endIndex: 1391,
|
||||||
|
endLine: 45,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 1236,
|
||||||
|
startLine: 41,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(functionEntity.columns).toBeNull();
|
||||||
|
expect(functionEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create function', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[13];
|
||||||
|
|
||||||
|
const collectListener = new ImpalaEntityCollector(commonSql);
|
||||||
|
impalaSql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const functionEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
|
||||||
|
expect(functionEntity.text).toBe('function_name');
|
||||||
|
expect(functionEntity.position).toEqual({
|
||||||
|
endColumn: 30,
|
||||||
|
endIndex: 1423,
|
||||||
|
line: 47,
|
||||||
|
startColumn: 17,
|
||||||
|
startIndex: 1411,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(functionEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_FUNCTION_STMT
|
||||||
|
);
|
||||||
|
expect(functionEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 21,
|
||||||
|
endIndex: 1517,
|
||||||
|
endLine: 50,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 1395,
|
||||||
|
startLine: 47,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(functionEntity.columns).toBeNull();
|
||||||
|
expect(functionEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
50
test/parser/impala/contextCollect/fixtures/common.sql
Normal file
50
test/parser/impala/contextCollect/fixtures/common.sql
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
CREATE TABLE new_Table like old_table;
|
||||||
|
|
||||||
|
create table census (name string, census_year int) partitioned by (year int);
|
||||||
|
|
||||||
|
CREATE TABLE sorted_census_data
|
||||||
|
SORT BY (last_name, state)
|
||||||
|
STORED AS PARQUET
|
||||||
|
AS SELECT last_name, first_name, state, address
|
||||||
|
FROM unsorted_census_data;
|
||||||
|
|
||||||
|
CREATE TABLE ctas_t1
|
||||||
|
PRIMARY KEY (id) PARTITION BY HASH (id) PARTITIONS 10
|
||||||
|
STORED AS KUDU
|
||||||
|
AS SELECT id, s FROM kudu_t1;
|
||||||
|
|
||||||
|
CREATE TABLE kudu_t3 (id BIGINT, year INT, s STRING,
|
||||||
|
b BOOLEAN, PRIMARY KEY (id,year))
|
||||||
|
PARTITION BY HASH (id) PARTITIONS 20,
|
||||||
|
RANGE (year) (PARTITION 1980 <= VALUES < 1990,
|
||||||
|
PARTITION 1990 <= VALUES < 2000,
|
||||||
|
PARTITION VALUE = 2001,
|
||||||
|
PARTITION 2001 < VALUES < 2003)
|
||||||
|
STORED AS KUDU;
|
||||||
|
|
||||||
|
CREATE VIEW my_view (age COMMENT 'this is number col', age1 COMMENT 'this is number col') TBLPROPERTIES ('tblp1' = '1') AS SELECT * FROM my_table;
|
||||||
|
|
||||||
|
insert into t2 (y, x) select c1 from t1;
|
||||||
|
|
||||||
|
SELECT * from a;
|
||||||
|
|
||||||
|
SELECT t1.c1, t2.c2 FROM t1 JOIN t2
|
||||||
|
ON t1.id = t2.id and t1.type_flag = t2.type_flag
|
||||||
|
WHERE t1.c1 > 100;
|
||||||
|
|
||||||
|
CREATE DATABASE my_db LOCATION '/path/to/partition';
|
||||||
|
|
||||||
|
CREATE SCHEMA IF NOT EXISTS my_schema COMMENT 'my first schema' LOCATION '/path/to/partition';
|
||||||
|
|
||||||
|
COMMENT ON DATABASE my_database IS 'This is my database.';
|
||||||
|
|
||||||
|
CREATE AGGREGATE FUNCTION function_name(arg_type1, arg_type2)
|
||||||
|
RETURNS return_type
|
||||||
|
LOCATION 'hdfs_path'
|
||||||
|
UPDATE_FN='update_function'
|
||||||
|
MERGE_FN='merge_function';
|
||||||
|
|
||||||
|
CREATE FUNCTION function_name(arg_type1, arg_type2)
|
||||||
|
RETURNS return_type
|
||||||
|
LOCATION 'hdfs_path_to_dot_so'
|
||||||
|
SYMBOL='symbol_name';
|
@ -0,0 +1,11 @@
|
|||||||
|
SELECT FROM tab;
|
||||||
|
|
||||||
|
SELECT name, calculate_age(birthdate) AS age, FROM students;
|
||||||
|
|
||||||
|
INSERT INTO insert_tb SELECT FROM from_tb;
|
||||||
|
|
||||||
|
INSERT INTO insert_tb SELECT id, FROM from_tb;
|
||||||
|
|
||||||
|
CREATE TABLE sorted_census_data AS SELECT FROM unsorted_census_data;
|
||||||
|
|
||||||
|
CREATE TABLE sorted_census_data AS SELECT id, FROM unsorted_census_data;
|
@ -1,7 +1,7 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import ImpalaSQL from 'src/parser/impala';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
|
||||||
const syntaxSql = fs.readFileSync(
|
const syntaxSql = fs.readFileSync(
|
||||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||||
@ -18,7 +18,7 @@ describe('ImpalaSQL Multiple Statements Syntax Suggestion', () => {
|
|||||||
};
|
};
|
||||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -32,7 +32,7 @@ describe('ImpalaSQL Multiple Statements Syntax Suggestion', () => {
|
|||||||
};
|
};
|
||||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -46,7 +46,7 @@ describe('ImpalaSQL Multiple Statements Syntax Suggestion', () => {
|
|||||||
};
|
};
|
||||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -60,7 +60,7 @@ describe('ImpalaSQL Multiple Statements Syntax Suggestion', () => {
|
|||||||
};
|
};
|
||||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
158
test/parser/impala/suggestion/suggestionWithEntity.test.ts
Normal file
158
test/parser/impala/suggestion/suggestionWithEntity.test.ts
Normal file
@ -0,0 +1,158 @@
|
|||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
|
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
import { commentOtherLine } from 'test/helper';
|
||||||
|
|
||||||
|
const syntaxSql = fs.readFileSync(
|
||||||
|
path.join(__dirname, 'fixtures', 'suggestionWithEntity.sql'),
|
||||||
|
'utf-8'
|
||||||
|
);
|
||||||
|
|
||||||
|
describe('Impala SQL Syntax Suggestion with collect entity', () => {
|
||||||
|
const impala = new ImpalaSQL();
|
||||||
|
|
||||||
|
test('select with no column', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 1,
|
||||||
|
column: 8,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = impala.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
// TODO:
|
||||||
|
// const entities = impala.getAllEntities(sql, pos);
|
||||||
|
// expect(entities.length).toBe(1);
|
||||||
|
// expect(entities[0].text).toBe('my_db.tb');
|
||||||
|
// expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
// expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('select with columns with trailing comma', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 3,
|
||||||
|
column: 47,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = impala.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = impala.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(1);
|
||||||
|
expect(entities[0].text).toBe('students');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('insert into table as select with no column', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 5,
|
||||||
|
column: 30,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = impala.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = impala.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(1);
|
||||||
|
expect(entities[0].text).toBe('insert_tb');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
|
||||||
|
// TODO:
|
||||||
|
// expect(entities[1].text).toBe('from_tb');
|
||||||
|
// expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
// expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('insert into table as select with trailing comma', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 7,
|
||||||
|
column: 34,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = impala.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = impala.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(2);
|
||||||
|
expect(entities[0].text).toBe('insert_tb');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
|
||||||
|
expect(entities[1].text).toBe('from_tb');
|
||||||
|
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table as select with no column', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 9,
|
||||||
|
column: 43,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = impala.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = impala.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(1);
|
||||||
|
expect(entities[0].text).toBe('sorted_census_data');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
// TODO:
|
||||||
|
// expect(entities[1].text).toBe('unsorted_census_data');
|
||||||
|
// expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
// expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table as select with trailing comma', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 11,
|
||||||
|
column: 47,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = impala.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = impala.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(2);
|
||||||
|
expect(entities[0].text).toBe('sorted_census_data');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
|
||||||
|
expect(entities[1].text).toBe('unsorted_census_data');
|
||||||
|
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
});
|
@ -1,7 +1,7 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import ImpalaSQL from 'src/parser/impala';
|
import ImpalaSQL from 'src/parser/impala';
|
||||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||||
import { commentOtherLine } from 'test/helper';
|
import { commentOtherLine } from 'test/helper';
|
||||||
|
|
||||||
const syntaxSql = fs.readFileSync(
|
const syntaxSql = fs.readFileSync(
|
||||||
@ -22,7 +22,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -39,7 +39,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
|
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -56,7 +56,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -73,7 +73,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -90,7 +90,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -107,7 +107,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -124,7 +124,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -141,7 +141,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -158,7 +158,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -175,7 +175,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
|
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -192,7 +192,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -209,7 +209,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.VIEW_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -226,7 +226,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -243,7 +243,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -260,7 +260,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.DATABASE_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -277,7 +277,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -294,7 +294,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -311,7 +311,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -328,7 +328,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -345,7 +345,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -362,7 +362,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -379,7 +379,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -396,7 +396,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -413,7 +413,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -430,7 +430,7 @@ describe('Impala SQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
@ -178,4 +178,8 @@ CREATE TABLE fk(id INT, col1 INT, col2 STRING, PRIMARY KEY(id),
|
|||||||
CREATE TABLE pk(id INT, PRIMARY KEY(id) DISABLE, NOVALIDATE, RELY);
|
CREATE TABLE pk(id INT, PRIMARY KEY(id) DISABLE, NOVALIDATE, RELY);
|
||||||
|
|
||||||
CREATE TABLE fk(id INT, col1 INT, col2 STRING, PRIMARY KEY(id),
|
CREATE TABLE fk(id INT, col1 INT, col2 STRING, PRIMARY KEY(id),
|
||||||
FOREIGN KEY(col1, col2) REFERENCES pk(col1, col2));
|
FOREIGN KEY(col1, col2) REFERENCES pk(col1, col2));
|
||||||
|
|
||||||
|
CREATE TABLE new_Table like old_table;
|
||||||
|
|
||||||
|
CREATE TABLE new_Table like old_table partitioned by (year int) SORT BY (last_name, state);
|
||||||
|
495
test/parser/mysql/contextCollect/entityCollector.test.ts
Normal file
495
test/parser/mysql/contextCollect/entityCollector.test.ts
Normal file
@ -0,0 +1,495 @@
|
|||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import MySQL from 'src/parser/mysql';
|
||||||
|
import { MySqlEntityCollector, MysqlSplitListener } from 'src/parser/mysql';
|
||||||
|
import { ParseTreeListener } from 'antlr4ng';
|
||||||
|
import { MySqlParserListener } from 'src/lib/mysql/MySqlParserListener';
|
||||||
|
import { EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
import { StmtContextType } from 'src/parser/common/entityCollector';
|
||||||
|
|
||||||
|
const commonSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'common.sql'), 'utf-8');
|
||||||
|
|
||||||
|
describe('MySQL entity collector tests', () => {
|
||||||
|
const mysql = new MySQL();
|
||||||
|
const parseTree = mysql.parse(commonSql);
|
||||||
|
const splitListener = new MysqlSplitListener();
|
||||||
|
mysql.listen(splitListener as MySqlParserListener, parseTree);
|
||||||
|
|
||||||
|
test('validate common sql', () => {
|
||||||
|
expect(mysql.validate(commonSql).length).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('split results', () => {
|
||||||
|
expect(splitListener.statementsContext.length).toBe(15);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table by columns', () => {
|
||||||
|
const columnCreateTableContext = splitListener.statementsContext[0];
|
||||||
|
|
||||||
|
const collectListener = new MySqlEntityCollector(commonSql);
|
||||||
|
mysql.listen(collectListener as ParseTreeListener, columnCreateTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('new_tb_with_col');
|
||||||
|
expect(tableCreateEntity.position).toEqual({
|
||||||
|
startIndex: 13,
|
||||||
|
endIndex: 27,
|
||||||
|
line: 1,
|
||||||
|
startColumn: 14,
|
||||||
|
endColumn: 29,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_TABLE_STMT
|
||||||
|
);
|
||||||
|
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||||
|
startIndex: 0,
|
||||||
|
endIndex: 45,
|
||||||
|
startLine: 1,
|
||||||
|
endLine: 1,
|
||||||
|
startColumn: 1,
|
||||||
|
endColumn: 47,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.relatedEntities).toBeNull();
|
||||||
|
|
||||||
|
expect(tableCreateEntity.columns.length).toBe(2);
|
||||||
|
tableCreateEntity.columns.forEach((columEntity) => {
|
||||||
|
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||||
|
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||||
|
expect(columEntity.text).toBe(
|
||||||
|
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table by select', () => {
|
||||||
|
const createTableBySelectContext = splitListener.statementsContext[1];
|
||||||
|
|
||||||
|
const collectListener = new MySqlEntityCollector(commonSql);
|
||||||
|
mysql.listen(collectListener as ParseTreeListener, createTableBySelectContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(3);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('new_tb_from_old');
|
||||||
|
expect(tableCreateEntity.position).toEqual({
|
||||||
|
startIndex: 62,
|
||||||
|
endIndex: 76,
|
||||||
|
line: 3,
|
||||||
|
startColumn: 14,
|
||||||
|
endColumn: 29,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_TABLE_STMT
|
||||||
|
);
|
||||||
|
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||||
|
startIndex: 49,
|
||||||
|
endIndex: 265,
|
||||||
|
startLine: 3,
|
||||||
|
endLine: 12,
|
||||||
|
startColumn: 1,
|
||||||
|
endColumn: 34,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.columns).toBeNull();
|
||||||
|
|
||||||
|
expect(tableCreateEntity.relatedEntities.length).toBe(2);
|
||||||
|
tableCreateEntity.relatedEntities.forEach((relatedEntity) => {
|
||||||
|
expect(relatedEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(allEntities.some((en) => relatedEntity === en)).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(allEntities[1].text).toBe('old_tb1');
|
||||||
|
expect(allEntities[1].belongStmt.rootStmt).toBe(allEntities[0].belongStmt);
|
||||||
|
expect(allEntities[1].position).toEqual({
|
||||||
|
startIndex: 161,
|
||||||
|
endIndex: 167,
|
||||||
|
line: 8,
|
||||||
|
startColumn: 9,
|
||||||
|
endColumn: 16,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(allEntities[2].text).toBe('old_tb2');
|
||||||
|
expect(allEntities[2].belongStmt.rootStmt).toBe(allEntities[0].belongStmt);
|
||||||
|
expect(allEntities[2].position).toEqual({
|
||||||
|
startIndex: 187,
|
||||||
|
endIndex: 193,
|
||||||
|
line: 10,
|
||||||
|
startColumn: 9,
|
||||||
|
endColumn: 16,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table like', () => {
|
||||||
|
const createTableLikeContext = splitListener.statementsContext[2];
|
||||||
|
|
||||||
|
const collectListener = new MySqlEntityCollector(commonSql);
|
||||||
|
mysql.listen(collectListener as ParseTreeListener, createTableLikeContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
const originTableEntity = allEntities[1];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('new_tb_like_old');
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_TABLE_STMT
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(tableCreateEntity.columns).toBeNull();
|
||||||
|
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||||
|
expect(tableCreateEntity.relatedEntities[0]).toBe(originTableEntity);
|
||||||
|
|
||||||
|
expect(originTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(originTableEntity.text).toBe('old_tb');
|
||||||
|
expect(originTableEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('common select from table', () => {
|
||||||
|
const selectTableContext = splitListener.statementsContext[3];
|
||||||
|
const collectListener = new MySqlEntityCollector(commonSql);
|
||||||
|
mysql.listen(collectListener as ParseTreeListener, selectTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const tableEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableEntity.text).toBe('select_tb');
|
||||||
|
expect(tableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
|
||||||
|
expect(tableEntity.columns).toBeNull();
|
||||||
|
expect(tableEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('select into from table', () => {
|
||||||
|
const selectTableContext = splitListener.statementsContext[4];
|
||||||
|
const collectListener = new MySqlEntityCollector(commonSql);
|
||||||
|
mysql.listen(collectListener as ParseTreeListener, selectTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const tableEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableEntity.text).toBe('into_select_tb');
|
||||||
|
expect(tableEntity.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
|
||||||
|
expect(tableEntity.columns).toBeNull();
|
||||||
|
expect(tableEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('select from table join', () => {
|
||||||
|
const selectTableContext = splitListener.statementsContext[5];
|
||||||
|
const collectListener = new MySqlEntityCollector(commonSql);
|
||||||
|
mysql.listen(collectListener as ParseTreeListener, selectTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const tableEntity1 = allEntities[0];
|
||||||
|
const tableEntity2 = allEntities[1];
|
||||||
|
|
||||||
|
expect(tableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableEntity1.text).toBe('from_tb');
|
||||||
|
expect(tableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
|
||||||
|
expect(tableEntity1.columns).toBeNull();
|
||||||
|
expect(tableEntity1.relatedEntities).toBeNull();
|
||||||
|
|
||||||
|
expect(tableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableEntity2.text).toBe('join_tb');
|
||||||
|
expect(tableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
|
||||||
|
expect(tableEntity2.columns).toBeNull();
|
||||||
|
expect(tableEntity2.relatedEntities).toBeNull();
|
||||||
|
|
||||||
|
expect(tableEntity1.belongStmt).toBe(tableEntity2.belongStmt);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('insert into table values', () => {
|
||||||
|
const insertTableContext = splitListener.statementsContext[6];
|
||||||
|
const collectListener = new MySqlEntityCollector(commonSql);
|
||||||
|
mysql.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const tableEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableEntity.text).toBe('insert_tb');
|
||||||
|
expect(tableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||||
|
|
||||||
|
expect(tableEntity.columns).toBeNull();
|
||||||
|
expect(tableEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('insert into table select', () => {
|
||||||
|
const insertTableContext = splitListener.statementsContext[7];
|
||||||
|
const collectListener = new MySqlEntityCollector(commonSql);
|
||||||
|
mysql.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(3);
|
||||||
|
|
||||||
|
const insertTableEntity = allEntities[0];
|
||||||
|
const fromTableEntity1 = allEntities[1];
|
||||||
|
const fromTableEntity2 = allEntities[2];
|
||||||
|
|
||||||
|
expect(insertTableEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(insertTableEntity.text).toBe('insert_from_tb');
|
||||||
|
expect(insertTableEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||||
|
|
||||||
|
expect(fromTableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(fromTableEntity1.text).toBe('from_tb1');
|
||||||
|
expect(fromTableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
|
||||||
|
expect(fromTableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(fromTableEntity2.text).toBe('from_tb2');
|
||||||
|
expect(fromTableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
|
||||||
|
expect(fromTableEntity1.belongStmt.parentStmt).toBe(insertTableEntity.belongStmt);
|
||||||
|
expect(fromTableEntity2.belongStmt.parentStmt).toBe(insertTableEntity.belongStmt);
|
||||||
|
expect(fromTableEntity1.belongStmt.rootStmt).toBe(insertTableEntity.belongStmt);
|
||||||
|
expect(fromTableEntity2.belongStmt.rootStmt).toBe(insertTableEntity.belongStmt);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create view with col', () => {
|
||||||
|
const insertTableContext = splitListener.statementsContext[8];
|
||||||
|
const collectListener = new MySqlEntityCollector(commonSql);
|
||||||
|
mysql.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
expect(allEntities[0].entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||||
|
expect(allEntities[0].text).toBe('new_view');
|
||||||
|
expect(allEntities[0].belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
|
||||||
|
expect(allEntities[0].columns.length).toBe(2);
|
||||||
|
expect(allEntities[0].columns[0].text).toBe('col1');
|
||||||
|
expect(allEntities[0].columns[1].text).toBe('col2');
|
||||||
|
expect(allEntities[0].columns[0].entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||||
|
expect(allEntities[0].columns[0].belongStmt).toBe(allEntities[0].belongStmt);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create view as select table', () => {
|
||||||
|
const insertTableContext = splitListener.statementsContext[9];
|
||||||
|
const collectListener = new MySqlEntityCollector(commonSql);
|
||||||
|
mysql.listen(collectListener as ParseTreeListener, insertTableContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
expect(allEntities[0].entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||||
|
expect(allEntities[0].text).toBe('db.new_view');
|
||||||
|
expect(allEntities[0].belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create database', () => {
|
||||||
|
const dbCreateContext = splitListener.statementsContext[10];
|
||||||
|
|
||||||
|
const collectListener = new MySqlEntityCollector(commonSql);
|
||||||
|
mysql.listen(collectListener as ParseTreeListener, dbCreateContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const dbEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(dbEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||||
|
expect(dbEntity.text).toBe('db_name');
|
||||||
|
expect(dbEntity.position).toEqual({
|
||||||
|
endColumn: 24,
|
||||||
|
endIndex: 778,
|
||||||
|
line: 31,
|
||||||
|
startColumn: 17,
|
||||||
|
startIndex: 772,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dbEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_DATABASE_STMT);
|
||||||
|
expect(dbEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 47,
|
||||||
|
endIndex: 801,
|
||||||
|
endLine: 31,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 756,
|
||||||
|
startLine: 31,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dbEntity.columns).toBeNull();
|
||||||
|
expect(dbEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create schema', () => {
|
||||||
|
const schemaCreateContext = splitListener.statementsContext[11];
|
||||||
|
|
||||||
|
const collectListener = new MySqlEntityCollector(commonSql);
|
||||||
|
mysql.listen(collectListener as ParseTreeListener, schemaCreateContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const schemaEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(schemaEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||||
|
expect(schemaEntity.text).toBe('db_name');
|
||||||
|
expect(schemaEntity.position).toEqual({
|
||||||
|
endColumn: 36,
|
||||||
|
endIndex: 839,
|
||||||
|
line: 33,
|
||||||
|
startColumn: 29,
|
||||||
|
startIndex: 833,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(schemaEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_DATABASE_STMT);
|
||||||
|
expect(schemaEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 59,
|
||||||
|
endIndex: 862,
|
||||||
|
endLine: 33,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 805,
|
||||||
|
startLine: 33,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(schemaEntity.columns).toBeNull();
|
||||||
|
expect(schemaEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('show create database', () => {
|
||||||
|
const dbCreateContext = splitListener.statementsContext[12];
|
||||||
|
|
||||||
|
const collectListener = new MySqlEntityCollector(commonSql);
|
||||||
|
mysql.listen(collectListener as ParseTreeListener, dbCreateContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const dbEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(dbEntity.entityContextType).toBe(EntityContextType.DATABASE);
|
||||||
|
expect(dbEntity.text).toBe('db_name');
|
||||||
|
expect(dbEntity.position).toEqual({
|
||||||
|
endColumn: 41,
|
||||||
|
endIndex: 905,
|
||||||
|
line: 35,
|
||||||
|
startColumn: 34,
|
||||||
|
startIndex: 899,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dbEntity.belongStmt.stmtContextType).toBe(StmtContextType.COMMON_STMT);
|
||||||
|
expect(dbEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 42,
|
||||||
|
endIndex: 906,
|
||||||
|
endLine: 35,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 866,
|
||||||
|
startLine: 35,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dbEntity.columns).toBeNull();
|
||||||
|
expect(dbEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('drop database', () => {
|
||||||
|
const dbDropContext = splitListener.statementsContext[13];
|
||||||
|
|
||||||
|
const collectListener = new MySqlEntityCollector(commonSql);
|
||||||
|
mysql.listen(collectListener as ParseTreeListener, dbDropContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const dbEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(dbEntity.entityContextType).toBe(EntityContextType.DATABASE);
|
||||||
|
expect(dbEntity.text).toBe('db_name');
|
||||||
|
expect(dbEntity.position).toEqual({
|
||||||
|
endColumn: 30,
|
||||||
|
endIndex: 937,
|
||||||
|
line: 37,
|
||||||
|
startColumn: 23,
|
||||||
|
startIndex: 931,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dbEntity.belongStmt.stmtContextType).toBe(StmtContextType.COMMON_STMT);
|
||||||
|
expect(dbEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 31,
|
||||||
|
endIndex: 938,
|
||||||
|
endLine: 37,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 909,
|
||||||
|
startLine: 37,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(dbEntity.columns).toBeNull();
|
||||||
|
expect(dbEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create function', () => {
|
||||||
|
const functionCreateContext = splitListener.statementsContext[14];
|
||||||
|
|
||||||
|
const collectListener = new MySqlEntityCollector(commonSql);
|
||||||
|
mysql.listen(collectListener as ParseTreeListener, functionCreateContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const functionEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
|
||||||
|
expect(functionEntity.text).toBe('my_concat_ws');
|
||||||
|
expect(functionEntity.position).toEqual({
|
||||||
|
endColumn: 43,
|
||||||
|
endIndex: 982,
|
||||||
|
line: 39,
|
||||||
|
startColumn: 31,
|
||||||
|
startIndex: 971,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(functionEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_FUNCTION_STMT
|
||||||
|
);
|
||||||
|
expect(functionEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 87,
|
||||||
|
endIndex: 1026,
|
||||||
|
endLine: 39,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 941,
|
||||||
|
startLine: 39,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(functionEntity.columns).toBeNull();
|
||||||
|
expect(functionEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
39
test/parser/mysql/contextCollect/fixtures/common.sql
Normal file
39
test/parser/mysql/contextCollect/fixtures/common.sql
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
CREATE TABLE new_tb_with_col (id int, age int);
|
||||||
|
|
||||||
|
CREATE TABLE new_tb_from_old AS
|
||||||
|
SELECT
|
||||||
|
old_tb1.column1,
|
||||||
|
old_tb2.column2
|
||||||
|
FROM
|
||||||
|
old_tb1
|
||||||
|
JOIN
|
||||||
|
old_tb2 ON old_tb1.id = old_tb2.id
|
||||||
|
WHERE
|
||||||
|
old_tb1.column1 = 'value';
|
||||||
|
|
||||||
|
|
||||||
|
CREATE TABLE new_tb_like_old LIKE old_tb;
|
||||||
|
|
||||||
|
SELECT * FROM select_tb LIMIT 5,10;
|
||||||
|
|
||||||
|
SELECT * INTO @my_var FROM into_select_tb;
|
||||||
|
|
||||||
|
SELECT * FROM from_tb LEFT JOIN join_tb ON (t1.a = t2.a);
|
||||||
|
|
||||||
|
INSERT INTO insert_tb (a,b,c) VALUES(1,2,3), (4,5,6), (7,8,9);
|
||||||
|
|
||||||
|
INSERT INTO insert_from_tb (a, b) SELECT c, d FROM from_tb1 UNION SELECT e, f FROM from_tb2 ON DUPLICATE KEY UPDATE b = b + c;
|
||||||
|
|
||||||
|
CREATE VIEW new_view (col1, col2) AS SELECT CURRENT_DATE;
|
||||||
|
|
||||||
|
CREATE VIEW db.new_view AS SELECT * FROM from_tb;
|
||||||
|
|
||||||
|
CREATE DATABASE db_name DEFAULT ENCRYPTION 'N';
|
||||||
|
|
||||||
|
CREATE SCHEMA IF NOT EXISTS db_name DEFAULT ENCRYPTION 'Y';
|
||||||
|
|
||||||
|
SHOW CREATE SCHEMA IF NOT EXISTS db_name;
|
||||||
|
|
||||||
|
DROP SCHEMA IF EXISTS db_name;
|
||||||
|
|
||||||
|
CREATE FUNCTION IF NOT EXISTS my_concat_ws RETURNS STRING SONAME 'udf_my_concat_ws.so';
|
@ -0,0 +1,11 @@
|
|||||||
|
SELECT FROM my_db.tb;
|
||||||
|
|
||||||
|
SELECT name, calculate_age(birthdate) AS age, FROM students;
|
||||||
|
|
||||||
|
INSERT INTO insert_tb SELECT FROM from_tb;
|
||||||
|
|
||||||
|
INSERT INTO insert_tb SELECT id, age, FROM from_tb;
|
||||||
|
|
||||||
|
CREATE TABLE sorted_census_data AS SELECT FROM unsorted_census_data;
|
||||||
|
|
||||||
|
CREATE TABLE sorted_census_data AS SELECT id, age, FROM unsorted_census_data;
|
@ -47,3 +47,11 @@ SELECT id, n FROM tbl GROUP BY ;
|
|||||||
SELECT id, n FROM tbl ORDER BY name, i ;
|
SELECT id, n FROM tbl ORDER BY name, i ;
|
||||||
|
|
||||||
SELECT id FROM tb1 GROUP BY ROLLUP( );
|
SELECT id FROM tb1 GROUP BY ROLLUP( );
|
||||||
|
|
||||||
|
SHOW CREATE FUNCTION func_name;
|
||||||
|
|
||||||
|
SHOW CREATE TABLE tbl_name;
|
||||||
|
|
||||||
|
SHOW CREATE DATABASE IF NOT EXISTS db_name;
|
||||||
|
|
||||||
|
SHOW CREATE VIEW test.v;
|
@ -1,7 +1,7 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import MySQL from 'src/parser/mysql';
|
import MySQL from 'src/parser/mysql';
|
||||||
import { CaretPosition, SyntaxContextType } from 'src/parser/common/basic-parser-types';
|
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
|
||||||
const syntaxSql = fs.readFileSync(
|
const syntaxSql = fs.readFileSync(
|
||||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||||
@ -18,7 +18,7 @@ describe('MySQL Multiple Statements Syntax Suggestion', () => {
|
|||||||
};
|
};
|
||||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -32,7 +32,7 @@ describe('MySQL Multiple Statements Syntax Suggestion', () => {
|
|||||||
};
|
};
|
||||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -46,7 +46,7 @@ describe('MySQL Multiple Statements Syntax Suggestion', () => {
|
|||||||
};
|
};
|
||||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -60,7 +60,7 @@ describe('MySQL Multiple Statements Syntax Suggestion', () => {
|
|||||||
};
|
};
|
||||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
156
test/parser/mysql/suggestion/suggestionWithEntity.test.ts
Normal file
156
test/parser/mysql/suggestion/suggestionWithEntity.test.ts
Normal file
@ -0,0 +1,156 @@
|
|||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import MySQL from 'src/parser/mysql';
|
||||||
|
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
import { commentOtherLine } from 'test/helper';
|
||||||
|
|
||||||
|
const syntaxSql = fs.readFileSync(
|
||||||
|
path.join(__dirname, 'fixtures', 'suggestionWithEntity.sql'),
|
||||||
|
'utf-8'
|
||||||
|
);
|
||||||
|
|
||||||
|
describe('MySQL Syntax Suggestion with collect entity', () => {
|
||||||
|
const mysql = new MySQL();
|
||||||
|
|
||||||
|
test('select with no columns', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 1,
|
||||||
|
column: 8,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = mysql.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = mysql.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(1);
|
||||||
|
expect(entities[0].text).toBe('my_db.tb');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('select with columns with trailing comma', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 3,
|
||||||
|
column: 47,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = mysql.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = mysql.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(1);
|
||||||
|
expect(entities[0].text).toBe('students');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('insert into table as select with no column', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 5,
|
||||||
|
column: 30,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = mysql.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = mysql.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(2);
|
||||||
|
expect(entities[0].text).toBe('insert_tb');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
|
||||||
|
expect(entities[1].text).toBe('from_tb');
|
||||||
|
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('insert into table as select with trailing comma', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 7,
|
||||||
|
column: 39,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = mysql.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = mysql.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(2);
|
||||||
|
expect(entities[0].text).toBe('insert_tb');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
|
||||||
|
expect(entities[1].text).toBe('from_tb');
|
||||||
|
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table as select with no column', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 9,
|
||||||
|
column: 43,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = mysql.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = mysql.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(2);
|
||||||
|
expect(entities[0].text).toBe('sorted_census_data');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
|
||||||
|
expect(entities[1].text).toBe('unsorted_census_data');
|
||||||
|
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table as select with trailing comma', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 11,
|
||||||
|
column: 52,
|
||||||
|
};
|
||||||
|
const sql = commentOtherLine(syntaxSql, pos.lineNumber);
|
||||||
|
|
||||||
|
const syntaxes = mysql.getSuggestionAtCaretPosition(sql, pos)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
|
);
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
|
|
||||||
|
const entities = mysql.getAllEntities(sql, pos);
|
||||||
|
expect(entities.length).toBe(2);
|
||||||
|
expect(entities[0].text).toBe('sorted_census_data');
|
||||||
|
expect(entities[0].entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(entities[0].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
|
||||||
|
expect(entities[1].text).toBe('unsorted_census_data');
|
||||||
|
expect(entities[1].entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(entities[1].belongStmt.isContainCaret).toBeTruthy();
|
||||||
|
});
|
||||||
|
});
|
@ -1,7 +1,7 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import MySQL from 'src/parser/mysql';
|
import MySQL from 'src/parser/mysql';
|
||||||
import { SyntaxContextType, CaretPosition } from 'src/parser/common/basic-parser-types';
|
import { EntityContextType, CaretPosition } from 'src/parser/common/basic-parser-types';
|
||||||
import { commentOtherLine } from 'test/helper';
|
import { commentOtherLine } from 'test/helper';
|
||||||
|
|
||||||
const syntaxSql = fs.readFileSync(
|
const syntaxSql = fs.readFileSync(
|
||||||
@ -26,7 +26,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -43,7 +43,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -60,7 +60,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -77,7 +77,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -94,7 +94,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.VIEW_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -111,7 +111,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
|
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -128,7 +128,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -145,7 +145,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
|
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -162,7 +162,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.DATABASE_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -179,7 +179,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
|
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -196,7 +196,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -213,7 +213,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -230,7 +230,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -247,7 +247,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN_CREATE
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN_CREATE
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -264,7 +264,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -281,7 +281,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -298,7 +298,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -315,7 +315,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -332,7 +332,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -349,7 +349,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -366,7 +366,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -383,7 +383,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -400,7 +400,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -417,7 +417,7 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
@ -434,10 +434,78 @@ describe('MySQL Syntax Suggestion', () => {
|
|||||||
pos
|
pos
|
||||||
)?.syntax;
|
)?.syntax;
|
||||||
const suggestion = syntaxes?.find(
|
const suggestion = syntaxes?.find(
|
||||||
(syn) => syn.syntaxContextType === SyntaxContextType.COLUMN
|
(syn) => syn.syntaxContextType === EntityContextType.COLUMN
|
||||||
);
|
);
|
||||||
|
|
||||||
expect(suggestion).not.toBeUndefined();
|
expect(suggestion).not.toBeUndefined();
|
||||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test('show create function', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 51,
|
||||||
|
column: 31,
|
||||||
|
};
|
||||||
|
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||||
|
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||||
|
pos
|
||||||
|
)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.FUNCTION
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['func_name']);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('show create table', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 53,
|
||||||
|
column: 27,
|
||||||
|
};
|
||||||
|
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||||
|
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||||
|
pos
|
||||||
|
)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['tbl_name']);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('show create dbName', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 55,
|
||||||
|
column: 43,
|
||||||
|
};
|
||||||
|
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||||
|
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||||
|
pos
|
||||||
|
)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.DATABASE
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db_name']);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('show create view', () => {
|
||||||
|
const pos: CaretPosition = {
|
||||||
|
lineNumber: 57,
|
||||||
|
column: 24,
|
||||||
|
};
|
||||||
|
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||||
|
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||||
|
pos
|
||||||
|
)?.syntax;
|
||||||
|
const suggestion = syntaxes?.find(
|
||||||
|
(syn) => syn.syntaxContextType === EntityContextType.VIEW
|
||||||
|
);
|
||||||
|
|
||||||
|
expect(suggestion).not.toBeUndefined();
|
||||||
|
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['test', '.', 'v']);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
444
test/parser/pgsql/contextCollect/entityCollector.test.ts
Normal file
444
test/parser/pgsql/contextCollect/entityCollector.test.ts
Normal file
@ -0,0 +1,444 @@
|
|||||||
|
import { ParseTreeListener } from 'antlr4ng';
|
||||||
|
import fs from 'fs';
|
||||||
|
import path from 'path';
|
||||||
|
import { PostgreSQLParserListener } from 'src/lib/pgsql/PostgreSQLParserListener';
|
||||||
|
import { EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||||
|
import { StmtContextType } from 'src/parser/common/entityCollector';
|
||||||
|
import PostgreSQL, { PostgreSQLEntityCollector, PostgreSqlSplitListener } from 'src/parser/pgsql';
|
||||||
|
|
||||||
|
const commonSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'common.sql'), 'utf-8');
|
||||||
|
|
||||||
|
describe('PostgreSQL entity collector tests', () => {
|
||||||
|
const postgreSql = new PostgreSQL();
|
||||||
|
const parseTree = postgreSql.parse(commonSql);
|
||||||
|
const splitListener = new PostgreSqlSplitListener();
|
||||||
|
postgreSql.listen(splitListener as PostgreSQLParserListener, parseTree);
|
||||||
|
|
||||||
|
test('validate common sql', () => {
|
||||||
|
expect(postgreSql.validate(commonSql).length).toBe(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('split results', () => {
|
||||||
|
expect(splitListener.statementsContext.length).toBe(10);
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create database', () => {
|
||||||
|
const testingContext = splitListener.statementsContext[0];
|
||||||
|
|
||||||
|
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||||
|
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const sourceTableEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(sourceTableEntity.entityContextType).toBe(EntityContextType.DATABASE_CREATE);
|
||||||
|
expect(sourceTableEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_DATABASE_STMT
|
||||||
|
);
|
||||||
|
expect(sourceTableEntity.text).toBe('music2');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table by select', () => {
|
||||||
|
const testingContext = splitListener.statementsContext[1];
|
||||||
|
|
||||||
|
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||||
|
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
expect(allEntities.length).toBe(3);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('films_recent');
|
||||||
|
expect(tableCreateEntity.position).toEqual({
|
||||||
|
startIndex: 110,
|
||||||
|
endIndex: 121,
|
||||||
|
line: 6,
|
||||||
|
startColumn: 14,
|
||||||
|
endColumn: 26,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_TABLE_STMT
|
||||||
|
);
|
||||||
|
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||||
|
startIndex: 97,
|
||||||
|
endIndex: 246,
|
||||||
|
startLine: 6,
|
||||||
|
endLine: 7,
|
||||||
|
startColumn: 1,
|
||||||
|
endColumn: 122,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.columns).toBeNull();
|
||||||
|
|
||||||
|
expect(tableCreateEntity.relatedEntities.length).toBe(2);
|
||||||
|
tableCreateEntity.relatedEntities.forEach((relatedEntity) => {
|
||||||
|
expect(relatedEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(allEntities.some((en) => relatedEntity === en)).toBeTruthy();
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(allEntities[1].text).toBe('films');
|
||||||
|
expect(allEntities[1].belongStmt.rootStmt).toBe(allEntities[0].belongStmt);
|
||||||
|
expect(allEntities[1].position).toEqual({
|
||||||
|
startIndex: 168,
|
||||||
|
endIndex: 172,
|
||||||
|
line: 7,
|
||||||
|
startColumn: 43,
|
||||||
|
endColumn: 48,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(allEntities[2].text).toBe('films2');
|
||||||
|
expect(allEntities[2].belongStmt.rootStmt).toBe(allEntities[0].belongStmt);
|
||||||
|
expect(allEntities[2].position).toEqual({
|
||||||
|
startIndex: 179,
|
||||||
|
endIndex: 184,
|
||||||
|
line: 7,
|
||||||
|
startColumn: 54,
|
||||||
|
endColumn: 60,
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create table of columns', () => {
|
||||||
|
const testingContext = splitListener.statementsContext[2];
|
||||||
|
|
||||||
|
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||||
|
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('distributors');
|
||||||
|
expect(tableCreateEntity.position).toEqual({
|
||||||
|
startIndex: 263,
|
||||||
|
endIndex: 274,
|
||||||
|
line: 9,
|
||||||
|
startColumn: 14,
|
||||||
|
endColumn: 26,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_TABLE_STMT
|
||||||
|
);
|
||||||
|
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||||
|
startIndex: 250,
|
||||||
|
endIndex: 377,
|
||||||
|
startLine: 9,
|
||||||
|
endLine: 13,
|
||||||
|
startColumn: 1,
|
||||||
|
endColumn: 2,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.columns.length).toBe(3);
|
||||||
|
tableCreateEntity.columns.forEach((columEntity) => {
|
||||||
|
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||||
|
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||||
|
expect(columEntity.text).toBe(
|
||||||
|
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create foreign table by columns', () => {
|
||||||
|
const testingContext = splitListener.statementsContext[3];
|
||||||
|
|
||||||
|
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||||
|
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('films');
|
||||||
|
expect(tableCreateEntity.position).toEqual({
|
||||||
|
startIndex: 402,
|
||||||
|
endIndex: 406,
|
||||||
|
line: 15,
|
||||||
|
startColumn: 22,
|
||||||
|
endColumn: 27,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_TABLE_STMT
|
||||||
|
);
|
||||||
|
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||||
|
startIndex: 381,
|
||||||
|
endIndex: 626,
|
||||||
|
startLine: 15,
|
||||||
|
endLine: 23,
|
||||||
|
startColumn: 1,
|
||||||
|
endColumn: 19,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.columns.length).toBe(6);
|
||||||
|
tableCreateEntity.columns.forEach((columEntity) => {
|
||||||
|
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||||
|
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||||
|
expect(columEntity.text).toBe(
|
||||||
|
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create foreign table of partition', () => {
|
||||||
|
const testingContext = splitListener.statementsContext[4];
|
||||||
|
|
||||||
|
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||||
|
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.TABLE_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('measurement_y2016m07');
|
||||||
|
expect(tableCreateEntity.position).toEqual({
|
||||||
|
startIndex: 651,
|
||||||
|
endIndex: 670,
|
||||||
|
line: 25,
|
||||||
|
startColumn: 22,
|
||||||
|
endColumn: 42,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_TABLE_STMT
|
||||||
|
);
|
||||||
|
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||||
|
startIndex: 630,
|
||||||
|
endIndex: 769,
|
||||||
|
startLine: 25,
|
||||||
|
endLine: 27,
|
||||||
|
startColumn: 1,
|
||||||
|
endColumn: 21,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.columns).toBeNull();
|
||||||
|
|
||||||
|
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const relatedEntity = tableCreateEntity.relatedEntities[0];
|
||||||
|
expect(relatedEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(allEntities.some((en) => relatedEntity === en)).toBeTruthy();
|
||||||
|
expect(relatedEntity.text).toBe('measurement');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create view by select', () => {
|
||||||
|
const testingContext = splitListener.statementsContext[5];
|
||||||
|
|
||||||
|
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||||
|
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('comedies');
|
||||||
|
expect(tableCreateEntity.position).toEqual({
|
||||||
|
startIndex: 785,
|
||||||
|
endIndex: 792,
|
||||||
|
line: 29,
|
||||||
|
startColumn: 13,
|
||||||
|
endColumn: 21,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||||
|
startIndex: 773,
|
||||||
|
endIndex: 849,
|
||||||
|
startLine: 29,
|
||||||
|
endLine: 32,
|
||||||
|
startColumn: 1,
|
||||||
|
endColumn: 26,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.columns).toBeNull();
|
||||||
|
|
||||||
|
expect(tableCreateEntity.relatedEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const relatedEntity = tableCreateEntity.relatedEntities[0];
|
||||||
|
expect(relatedEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(allEntities.some((en) => relatedEntity === en)).toBeTruthy();
|
||||||
|
expect(relatedEntity.text).toBe('films');
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create materialized view by columns', () => {
|
||||||
|
const testingContext = splitListener.statementsContext[6];
|
||||||
|
|
||||||
|
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||||
|
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const tableCreateEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableCreateEntity.entityContextType).toBe(EntityContextType.VIEW_CREATE);
|
||||||
|
expect(tableCreateEntity.text).toBe('comedies_mate');
|
||||||
|
expect(tableCreateEntity.position).toEqual({
|
||||||
|
startIndex: 878,
|
||||||
|
endIndex: 890,
|
||||||
|
line: 34,
|
||||||
|
startColumn: 26,
|
||||||
|
endColumn: 39,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.belongStmt.stmtContextType).toBe(StmtContextType.CREATE_VIEW_STMT);
|
||||||
|
expect(tableCreateEntity.belongStmt.position).toEqual({
|
||||||
|
startIndex: 853,
|
||||||
|
endIndex: 1055,
|
||||||
|
startLine: 34,
|
||||||
|
endLine: 39,
|
||||||
|
startColumn: 1,
|
||||||
|
endColumn: 17,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableCreateEntity.columns.length).toBe(2);
|
||||||
|
|
||||||
|
tableCreateEntity.columns.forEach((columEntity) => {
|
||||||
|
expect(columEntity.entityContextType).toBe(EntityContextType.COLUMN_CREATE);
|
||||||
|
expect(columEntity.belongStmt).toBe(tableCreateEntity.belongStmt);
|
||||||
|
expect(columEntity.text).toBe(
|
||||||
|
commonSql.slice(columEntity.position.startIndex, columEntity.position.endIndex + 1)
|
||||||
|
);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
test('select with clause', () => {
|
||||||
|
const testingContext = splitListener.statementsContext[7];
|
||||||
|
|
||||||
|
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||||
|
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
expect(allEntities.length).toBe(2);
|
||||||
|
|
||||||
|
const tableEntity1 = allEntities[0];
|
||||||
|
const tableEntity2 = allEntities[1];
|
||||||
|
expect(tableEntity1.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableEntity1.text).toBe('table_expression');
|
||||||
|
expect(tableEntity1.position).toEqual({
|
||||||
|
startIndex: 1109,
|
||||||
|
endIndex: 1124,
|
||||||
|
line: 41,
|
||||||
|
startColumn: 51,
|
||||||
|
endColumn: 67,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableEntity1.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
expect(tableEntity1.belongStmt.position).toEqual({
|
||||||
|
startIndex: 1094,
|
||||||
|
endIndex: 1124,
|
||||||
|
startLine: 41,
|
||||||
|
endLine: 41,
|
||||||
|
startColumn: 36,
|
||||||
|
endColumn: 67,
|
||||||
|
});
|
||||||
|
expect(tableEntity1.columns).toBeNull();
|
||||||
|
expect(tableEntity1.relatedEntities).toBeNull();
|
||||||
|
|
||||||
|
expect(tableEntity2.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableEntity2.text).toBe('table_expression1');
|
||||||
|
expect(tableEntity2.position).toEqual({
|
||||||
|
startIndex: 1182,
|
||||||
|
endIndex: 1198,
|
||||||
|
line: 42,
|
||||||
|
startColumn: 55,
|
||||||
|
endColumn: 72,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableEntity2.belongStmt.stmtContextType).toBe(StmtContextType.SELECT_STMT);
|
||||||
|
expect(tableEntity2.belongStmt.position).toEqual({
|
||||||
|
startIndex: 1059,
|
||||||
|
endIndex: 1237,
|
||||||
|
startLine: 41,
|
||||||
|
endLine: 42,
|
||||||
|
startColumn: 1,
|
||||||
|
endColumn: 111,
|
||||||
|
});
|
||||||
|
expect(tableEntity2.columns).toBeNull();
|
||||||
|
expect(tableEntity2.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('insert into table', () => {
|
||||||
|
const testingContext = splitListener.statementsContext[8];
|
||||||
|
|
||||||
|
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||||
|
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const tableInsertEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(tableInsertEntity.entityContextType).toBe(EntityContextType.TABLE);
|
||||||
|
expect(tableInsertEntity.text).toBe('insert_films');
|
||||||
|
expect(tableInsertEntity.position).toEqual({
|
||||||
|
startIndex: 1253,
|
||||||
|
endIndex: 1264,
|
||||||
|
line: 44,
|
||||||
|
startColumn: 13,
|
||||||
|
endColumn: 25,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableInsertEntity.belongStmt.stmtContextType).toBe(StmtContextType.INSERT_STMT);
|
||||||
|
expect(tableInsertEntity.belongStmt.position).toEqual({
|
||||||
|
startIndex: 1241,
|
||||||
|
endIndex: 1355,
|
||||||
|
startLine: 44,
|
||||||
|
endLine: 45,
|
||||||
|
startColumn: 1,
|
||||||
|
endColumn: 55,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(tableInsertEntity.columns).toBeNull();
|
||||||
|
expect(tableInsertEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
|
||||||
|
test('create function', () => {
|
||||||
|
const testingContext = splitListener.statementsContext[9];
|
||||||
|
|
||||||
|
const collectListener = new PostgreSQLEntityCollector(commonSql);
|
||||||
|
postgreSql.listen(collectListener as ParseTreeListener, testingContext);
|
||||||
|
|
||||||
|
const allEntities = collectListener.getEntities();
|
||||||
|
expect(allEntities.length).toBe(1);
|
||||||
|
|
||||||
|
const functionEntity = allEntities[0];
|
||||||
|
|
||||||
|
expect(functionEntity.entityContextType).toBe(EntityContextType.FUNCTION_CREATE);
|
||||||
|
expect(functionEntity.text).toBe('get_color_note');
|
||||||
|
expect(functionEntity.position).toEqual({
|
||||||
|
endColumn: 31,
|
||||||
|
endIndex: 1388,
|
||||||
|
line: 47,
|
||||||
|
startColumn: 17,
|
||||||
|
startIndex: 1375,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(functionEntity.belongStmt.stmtContextType).toBe(
|
||||||
|
StmtContextType.CREATE_FUNCTION_STMT
|
||||||
|
);
|
||||||
|
expect(functionEntity.belongStmt.position).toEqual({
|
||||||
|
endColumn: 15,
|
||||||
|
endIndex: 1477,
|
||||||
|
endLine: 49,
|
||||||
|
startColumn: 1,
|
||||||
|
startIndex: 1359,
|
||||||
|
startLine: 47,
|
||||||
|
});
|
||||||
|
|
||||||
|
expect(functionEntity.columns).toBeNull();
|
||||||
|
expect(functionEntity.relatedEntities).toBeNull();
|
||||||
|
});
|
||||||
|
});
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user