Feat/auto complete (#175)

* feat: update hive grammar to adapt to c3

* feat: support viewName, dbName, fnName autoComplete to hive

* test: add hive suggestion unit test

* test: optimze flink suggestion unit tests
This commit is contained in:
Hayden 2023-10-10 16:37:49 +08:00 committed by GitHub
parent d0ad381833
commit c4030929b2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 9376 additions and 8469 deletions

View File

@ -119,13 +119,13 @@ replDumpStatement
; ;
replDbPolicy replDbPolicy
: dbName=id_ (DOT tablePolicy=replTableLevelPolicy)? : dbName=dbSchemaName (DOT tablePolicy=replTableLevelPolicy)?
; ;
replLoadStatement replLoadStatement
: KW_REPL KW_LOAD : KW_REPL KW_LOAD
sourceDbPolicy=replDbPolicy sourceDbPolicy=replDbPolicy
(KW_INTO dbName=id_)? (KW_INTO dbName=dbSchemaName)?
(KW_WITH replConf=replConfigs)? (KW_WITH replConf=replConfigs)?
; ;
@ -143,7 +143,7 @@ replTableLevelPolicy
replStatusStatement replStatusStatement
: KW_REPL KW_STATUS : KW_REPL KW_STATUS
dbName=id_ dbName=dbSchemaName
(KW_WITH replConf=replConfigs)? (KW_WITH replConf=replConfigs)?
; ;
@ -233,14 +233,14 @@ orReplace
createDatabaseStatement createDatabaseStatement
: KW_CREATE KW_REMOTE? db_schema : KW_CREATE KW_REMOTE? db_schema
ifNotExists? ifNotExists?
name=id_ name=dbSchemaNameCreate
databaseComment? databaseComment?
dbLocation? dbLocation?
dbManagedLocation? dbManagedLocation?
(KW_WITH KW_DBPROPERTIES dbprops=dbProperties)? (KW_WITH KW_DBPROPERTIES dbprops=dbProperties)?
| KW_CREATE KW_REMOTE db_schema | KW_CREATE KW_REMOTE db_schema
ifNotExists? ifNotExists?
name=id_ name=dbSchemaNameCreate
databaseComment? databaseComment?
dbConnectorName dbConnectorName
(KW_WITH KW_DBPROPERTIES dbprops=dbProperties)? (KW_WITH KW_DBPROPERTIES dbprops=dbProperties)?
@ -263,15 +263,15 @@ dbPropertiesList
; ;
dbConnectorName dbConnectorName
: KW_USING dcName=id_ : KW_USING dcName=dbSchemaName
; ;
switchDatabaseStatement switchDatabaseStatement
: KW_USE id_ : KW_USE dbSchemaName
; ;
dropDatabaseStatement dropDatabaseStatement
: KW_DROP db_schema ifExists? id_ restrictOrCascade? : KW_DROP db_schema ifExists? dbSchemaName restrictOrCascade?
; ;
databaseComment databaseComment
@ -300,15 +300,15 @@ partTypeExpr
; ;
tabPartColTypeExpr tabPartColTypeExpr
: tableName partitionSpec? extColumnName? : tableOrView partitionSpec? extColumnName?
; ;
descStatement descStatement
: (KW_DESCRIBE | KW_DESC) : (KW_DESCRIBE | KW_DESC)
( (
db_schema KW_EXTENDED? dbName=id_ db_schema KW_EXTENDED? dbName=dbSchemaName
| KW_DATACONNECTOR KW_EXTENDED? dcName=id_ | KW_DATACONNECTOR KW_EXTENDED? dcName=dbSchemaName
| KW_FUNCTION KW_EXTENDED? name=descFuncNames | KW_FUNCTION KW_EXTENDED? name=functionNameForDDL
| (descOptions=KW_FORMATTED | descOptions=KW_EXTENDED) parttype=tabPartColTypeExpr | (descOptions=KW_FORMATTED | descOptions=KW_EXTENDED) parttype=tabPartColTypeExpr
| parttype=tabPartColTypeExpr | parttype=tabPartColTypeExpr
) )
@ -333,26 +333,26 @@ db_schema
showStatement showStatement
: KW_SHOW (KW_DATABASES | KW_SCHEMAS) (KW_LIKE showStmtIdentifier)? : KW_SHOW (KW_DATABASES | KW_SCHEMAS) (KW_LIKE showStmtIdentifier)?
| KW_SHOW isExtended=KW_EXTENDED? KW_TABLES (from_in db_name=id_)? filter=showTablesFilterExpr? | KW_SHOW isExtended=KW_EXTENDED? KW_TABLES (from_in db_name=dbSchemaName)? filter=showTablesFilterExpr?
| KW_SHOW KW_VIEWS (from_in db_name=id_)? (KW_LIKE showStmtIdentifier | showStmtIdentifier)? | KW_SHOW KW_VIEWS (from_in db_name=dbSchemaName)? (KW_LIKE showStmtIdentifier | showStmtIdentifier)?
| KW_SHOW KW_MATERIALIZED KW_VIEWS (from_in db_name=id_)? (KW_LIKE showStmtIdentifier|showStmtIdentifier)? | KW_SHOW KW_MATERIALIZED KW_VIEWS (from_in db_name=dbSchemaName)? (KW_LIKE showStmtIdentifier|showStmtIdentifier)?
| KW_SHOW KW_SORTED? KW_COLUMNS from_in tableName (from_in db_name=id_)? (KW_LIKE showStmtIdentifier|showStmtIdentifier)? | KW_SHOW KW_SORTED? KW_COLUMNS from_in tableOrView (from_in db_name=dbSchemaName)? (KW_LIKE showStmtIdentifier|showStmtIdentifier)?
| KW_SHOW KW_FUNCTIONS (KW_LIKE showFunctionIdentifier)? | KW_SHOW KW_FUNCTIONS (KW_LIKE functionNameForDDL)?
| KW_SHOW KW_PARTITIONS tabName=tableName partitionSpec? whereClause? orderByClause? limitClause? | KW_SHOW KW_PARTITIONS tabOrViewName=tableOrView partitionSpec? whereClause? orderByClause? limitClause?
| KW_SHOW KW_CREATE (db_schema db_name=id_ | KW_TABLE tabName=tableName) | KW_SHOW KW_CREATE (db_schema db_name=dbSchemaName | KW_TABLE tabName=tableName)
| KW_SHOW KW_TABLE KW_EXTENDED (from_in db_name=id_)? KW_LIKE showStmtIdentifier partitionSpec? | KW_SHOW KW_TABLE KW_EXTENDED (from_in db_name=dbSchemaName)? KW_LIKE showStmtIdentifier partitionSpec?
| KW_SHOW KW_TBLPROPERTIES tableName (LPAREN prptyName=StringLiteral RPAREN)? | KW_SHOW KW_TBLPROPERTIES tableName (LPAREN prptyName=StringLiteral RPAREN)?
| KW_SHOW KW_LOCKS (db_schema dbName=id_ isExtended=KW_EXTENDED? | parttype=partTypeExpr? isExtended=KW_EXTENDED?) | KW_SHOW KW_LOCKS (db_schema dbName=dbSchemaName isExtended=KW_EXTENDED? | parttype=partTypeExpr? isExtended=KW_EXTENDED?)
| KW_SHOW KW_COMPACTIONS | KW_SHOW KW_COMPACTIONS
( compactionId ( compactionId
| db_schema dbName=id_ compactionPool? compactionType? compactionStatus? orderByClause? limitClause? | db_schema dbName=dbSchemaName compactionPool? compactionType? compactionStatus? orderByClause? limitClause?
| parttype=partTypeExpr? compactionPool? compactionType? compactionStatus? orderByClause? limitClause? | parttype=partTypeExpr? compactionPool? compactionType? compactionStatus? orderByClause? limitClause?
) )
| KW_SHOW KW_TRANSACTIONS | KW_SHOW KW_TRANSACTIONS
| KW_SHOW KW_CONF StringLiteral | KW_SHOW KW_CONF StringLiteral
| KW_SHOW KW_RESOURCE (KW_PLAN rp_name=id_ | KW_PLANS) | KW_SHOW KW_RESOURCE (KW_PLAN rp_name=id_ | KW_PLANS)
| KW_SHOW KW_DATACONNECTORS | KW_SHOW KW_DATACONNECTORS
| KW_SHOW KW_FORMATTED? ( KW_INDEX | KW_INDEXES ) KW_ON tableName (from_in id_)? | KW_SHOW KW_FORMATTED? ( KW_INDEX | KW_INDEXES ) KW_ON tableName (from_in dbSchemaName)?
; ;
showTablesFilterExpr showTablesFilterExpr
@ -366,7 +366,7 @@ lockStatement
; ;
lockDatabase lockDatabase
: KW_LOCK db_schema dbName=id_ lockMode : KW_LOCK db_schema dbName=dbSchemaName lockMode
; ;
lockMode lockMode
@ -379,7 +379,7 @@ unlockStatement
; ;
unlockDatabase unlockDatabase
: KW_UNLOCK db_schema dbName=id_ : KW_UNLOCK db_schema dbName=dbSchemaName
; ;
createRoleStatement createRoleStatement
@ -450,14 +450,14 @@ privilegeObject
database or table type. Type is optional, default type is table database or table type. Type is optional, default type is table
*/ */
privObject privObject
: db_schema id_ : db_schema dbSchemaName
| KW_TABLE? tableName partitionSpec? | KW_TABLE? tableName partitionSpec?
| KW_URI path=StringLiteral | KW_URI path=StringLiteral
| KW_SERVER id_ | KW_SERVER id_
; ;
privObjectCols privObjectCols
: db_schema id_ : db_schema dbSchemaName
| KW_TABLE? tableName (LPAREN cols=columnNameList RPAREN)? partitionSpec? | KW_TABLE? tableName (LPAREN cols=columnNameList RPAREN)? partitionSpec?
| KW_URI path=StringLiteral | KW_URI path=StringLiteral
| KW_SERVER id_ | KW_SERVER id_
@ -539,12 +539,12 @@ resourceType
; ;
createFunctionStatement createFunctionStatement
: KW_CREATE temp=KW_TEMPORARY? KW_FUNCTION functionIdentifier KW_AS StringLiteral : KW_CREATE temp=KW_TEMPORARY? KW_FUNCTION functionNameCreate KW_AS StringLiteral
(KW_USING rList=resourceList)? (KW_USING rList=resourceList)?
; ;
dropFunctionStatement dropFunctionStatement
: KW_DROP temp=KW_TEMPORARY? KW_FUNCTION ifExists? functionIdentifier : KW_DROP temp=KW_TEMPORARY? KW_FUNCTION ifExists? functionNameForDDL
; ;
reloadFunctionsStatement reloadFunctionsStatement
@ -575,7 +575,7 @@ dropIndexStatement
: KW_DROP KW_INDEX ifExists? id_ KW_ON tableName; : KW_DROP KW_INDEX ifExists? id_ KW_ON tableName;
createViewStatement createViewStatement
: KW_CREATE orReplace? KW_VIEW ifNotExists? name=tableName : KW_CREATE orReplace? KW_VIEW ifNotExists? name=viewNameCreate
(LPAREN columnNameCommentList RPAREN)? tableComment? viewPartition? (LPAREN columnNameCommentList RPAREN)? tableComment? viewPartition?
tablePropertiesPrefixed? tablePropertiesPrefixed?
KW_AS KW_AS
@ -612,7 +612,7 @@ dropViewStatement
; ;
createMaterializedViewStatement createMaterializedViewStatement
: KW_CREATE KW_MATERIALIZED KW_VIEW ifNotExists? name=tableName : KW_CREATE KW_MATERIALIZED KW_VIEW ifNotExists? name=viewNameCreate
rewriteDisabled? tableComment? viewPartition? viewOrganization? rewriteDisabled? tableComment? viewPartition? viewOrganization?
tableRowFormat? tableFileFormat? tableLocation? tableRowFormat? tableFileFormat? tableLocation?
tablePropertiesPrefixed? KW_AS selectStatementWithCTE tablePropertiesPrefixed? KW_AS selectStatementWithCTE
@ -660,11 +660,6 @@ definedAsSpec
: KW_DEFINED? KW_AS statement : KW_DEFINED? KW_AS statement
; ;
showFunctionIdentifier
: functionIdentifier
| StringLiteral
;
showStmtIdentifier showStmtIdentifier
: id_ : id_
| StringLiteral | StringLiteral
@ -1345,8 +1340,8 @@ END SHOW COMPACTIONS statement
alterStatement alterStatement
: KW_ALTER ( KW_TABLE tableName alterTableStatementSuffix : KW_ALTER ( KW_TABLE tableName alterTableStatementSuffix
| KW_VIEW tableName KW_AS? alterViewStatementSuffix | KW_VIEW viewName KW_AS? alterViewStatementSuffix
| KW_MATERIALIZED KW_VIEW tableNameTree=tableName alterMaterializedViewStatementSuffix | KW_MATERIALIZED KW_VIEW tableNameTree=viewName alterMaterializedViewStatementSuffix
| db_schema alterDatabaseStatementSuffix | db_schema alterDatabaseStatementSuffix
| KW_DATACONNECTOR alterDataConnectorStatementSuffix | KW_DATACONNECTOR alterDataConnectorStatementSuffix
| KW_INDEX alterIndexStatementSuffix | KW_INDEX alterIndexStatementSuffix
@ -1425,23 +1420,23 @@ alterDatabaseStatementSuffix
; ;
alterDatabaseSuffixProperties alterDatabaseSuffixProperties
: name=id_ KW_SET KW_DBPROPERTIES dbProperties : name=dbSchemaName KW_SET KW_DBPROPERTIES dbProperties
; ;
alterDatabaseSuffixSetOwner alterDatabaseSuffixSetOwner
: dbName=id_ KW_SET KW_OWNER principalAlterName : dbName=dbSchemaName KW_SET KW_OWNER principalAlterName
; ;
alterDatabaseSuffixSetLocation alterDatabaseSuffixSetLocation
: dbName=id_ KW_SET (KW_LOCATION | KW_MANAGEDLOCATION) newLocation=StringLiteral : dbName=dbSchemaName KW_SET (KW_LOCATION | KW_MANAGEDLOCATION) newLocation=StringLiteral
; ;
alterDatabaseSuffixSetManagedLocation alterDatabaseSuffixSetManagedLocation
: dbName=id_ KW_SET KW_MANAGEDLOCATION newLocation=StringLiteral : dbName=dbSchemaName KW_SET KW_MANAGEDLOCATION newLocation=StringLiteral
; ;
alterStatementSuffixRename alterStatementSuffixRename
: KW_RENAME KW_TO tableName : KW_RENAME KW_TO tableNameCreate
; ;
alterStatementSuffixAddCol alterStatementSuffixAddCol
@ -1635,15 +1630,15 @@ alterDataConnectorStatementSuffix
; ;
alterDataConnectorSuffixProperties alterDataConnectorSuffixProperties
: name=id_ KW_SET KW_DCPROPERTIES dcProperties : name=dbSchemaName KW_SET KW_DCPROPERTIES dcProperties
; ;
alterDataConnectorSuffixSetOwner alterDataConnectorSuffixSetOwner
: dcName=id_ KW_SET KW_OWNER principalAlterName : dcName=dbSchemaName KW_SET KW_OWNER principalAlterName
; ;
alterDataConnectorSuffixSetUrl alterDataConnectorSuffixSetUrl
: dcName=id_ KW_SET KW_URL newUri=StringLiteral : dcName=dbSchemaName KW_SET KW_URL newUri=StringLiteral
; ;
likeTableOrFile likeTableOrFile
@ -1656,7 +1651,7 @@ likeTableOrFile
Rules for parsing createtable Rules for parsing createtable
*/ */
createTableStatement createTableStatement
: KW_CREATE temp=KW_TEMPORARY? trans=KW_TRANSACTIONAL? ext=KW_EXTERNAL? KW_TABLE ifNotExists? name=tableName : KW_CREATE temp=KW_TEMPORARY? trans=KW_TRANSACTIONAL? ext=KW_EXTERNAL? KW_TABLE ifNotExists? name=tableNameCreate
( likeTableOrFile ( likeTableOrFile
createTablePartitionSpec? createTablePartitionSpec?
tableRowFormat? tableRowFormat?
@ -1674,7 +1669,7 @@ createTableStatement
tablePropertiesPrefixed? tablePropertiesPrefixed?
(KW_AS selectStatementWithCTE)? (KW_AS selectStatementWithCTE)?
) )
| KW_CREATE mgd=KW_MANAGED KW_TABLE ifNotExists? name=tableName | KW_CREATE mgd=KW_MANAGED KW_TABLE ifNotExists? name=tableNameCreate
( likeTableOrFile ( likeTableOrFile
tableRowFormat? tableRowFormat?
tableFileFormat? tableFileFormat?
@ -1720,7 +1715,7 @@ dropDataConnectorStatement
tableAllColumns tableAllColumns
: STAR : STAR
| tableName DOT STAR | tableOrView DOT STAR
; ;
// (table|column) // (table|column)
@ -1816,7 +1811,7 @@ tableSample
; ;
tableSource tableSource
: tabname=tableName props=tableProperties? ts=tableSample? asOf=asOfClause? (KW_AS? alias=id_)? : tabname=tableOrView props=tableProperties? ts=tableSample? asOf=asOfClause? (KW_AS? alias=id_)?
; ;
asOfClause asOfClause
@ -1826,7 +1821,20 @@ asOfClause
; ;
uniqueJoinTableSource uniqueJoinTableSource
: tabname=tableName ts=tableSample? (KW_AS? alias=id_)? : tabname=tableOrView ts=tableSample? (KW_AS? alias=id_)?
;
dbSchemaName
: id_
;
dbSchemaNameCreate
: id_
;
tableOrView
: tableName
| viewName
; ;
tableName tableName
@ -1834,10 +1842,19 @@ tableName
| tab=id_ | tab=id_
; ;
tableNameCreate
: db=id_ DOT tab=id_ (DOT meta=id_)?
| tab=id_
;
viewName viewName
: (db=id_ DOT)? view=id_ : (db=id_ DOT)? view=id_
; ;
viewNameCreate
: (db=id_ DOT)? view=id_
;
subQuerySource subQuerySource
: LPAREN queryStatementExpression RPAREN KW_AS? id_ : LPAREN queryStatementExpression RPAREN KW_AS? id_
; ;
@ -2140,7 +2157,7 @@ trimFunction
// fun(par1, par2, par3) // fun(par1, par2, par3)
function_ function_
: trimFunction : trimFunction
| functionName | functionNameForInvoke
LPAREN LPAREN
(star=STAR | dist=all_distinct? (selectExpression (COMMA selectExpression)*)?) (star=STAR | dist=all_distinct? (selectExpression (COMMA selectExpression)*)?)
( (
@ -2160,9 +2177,23 @@ null_treatment
| KW_IGNORE KW_NULLS | KW_IGNORE KW_NULLS
; ;
functionName functionNameForDDL
: functionIdentifier // Keyword IF is also a function name : functionNameForInvoke
| StringLiteral
;
functionNameForInvoke
: userDefinedFuncName
| sql11ReservedKeywordsUsedAsFunctionName | sql11ReservedKeywordsUsedAsFunctionName
| sysFuncNames
;
userDefinedFuncName
: functionIdentifier
;
functionNameCreate
: functionIdentifier
; ;
castExpression castExpression
@ -2584,12 +2615,6 @@ sysFuncNames
| KW_BETWEEN | KW_BETWEEN
; ;
descFuncNames
: sysFuncNames
| StringLiteral
| functionIdentifier
;
id_ id_
: Identifier : Identifier
| nonReserved | nonReserved

View File

@ -1,4 +1,4 @@
// Generated from /Users/xuxiaoqi/Documents/work/daishu-code/dt-sql-parser/src/grammar/hive/HiveSqlLexer.g4 by ANTLR 4.9.0-SNAPSHOT // Generated from /Users/hayden/Desktop/dt-works/dt-sql-parser/src/grammar/hive/HiveSqlLexer.g4 by ANTLR 4.9.0-SNAPSHOT
import { ATN } from "antlr4ts/atn/ATN"; import { ATN } from "antlr4ts/atn/ATN";

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,4 +1,4 @@
// Generated from /Users/xuxiaoqi/Documents/work/daishu-code/dt-sql-parser/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.9.0-SNAPSHOT // Generated from /Users/hayden/Desktop/dt-works/dt-sql-parser/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.9.0-SNAPSHOT
import { ParseTreeListener } from "antlr4ts/tree/ParseTreeListener"; import { ParseTreeListener } from "antlr4ts/tree/ParseTreeListener";
@ -111,7 +111,6 @@ import { AlterScheduledQueryChangeContext } from "./HiveSqlParser";
import { ScheduleSpecContext } from "./HiveSqlParser"; import { ScheduleSpecContext } from "./HiveSqlParser";
import { ExecutedAsSpecContext } from "./HiveSqlParser"; import { ExecutedAsSpecContext } from "./HiveSqlParser";
import { DefinedAsSpecContext } from "./HiveSqlParser"; import { DefinedAsSpecContext } from "./HiveSqlParser";
import { ShowFunctionIdentifierContext } from "./HiveSqlParser";
import { ShowStmtIdentifierContext } from "./HiveSqlParser"; import { ShowStmtIdentifierContext } from "./HiveSqlParser";
import { TableCommentContext } from "./HiveSqlParser"; import { TableCommentContext } from "./HiveSqlParser";
import { CreateTablePartitionSpecContext } from "./HiveSqlParser"; import { CreateTablePartitionSpecContext } from "./HiveSqlParser";
@ -331,8 +330,13 @@ import { TableSampleContext } from "./HiveSqlParser";
import { TableSourceContext } from "./HiveSqlParser"; import { TableSourceContext } from "./HiveSqlParser";
import { AsOfClauseContext } from "./HiveSqlParser"; import { AsOfClauseContext } from "./HiveSqlParser";
import { UniqueJoinTableSourceContext } from "./HiveSqlParser"; import { UniqueJoinTableSourceContext } from "./HiveSqlParser";
import { DbSchemaNameContext } from "./HiveSqlParser";
import { DbSchemaNameCreateContext } from "./HiveSqlParser";
import { TableOrViewContext } from "./HiveSqlParser";
import { TableNameContext } from "./HiveSqlParser"; import { TableNameContext } from "./HiveSqlParser";
import { TableNameCreateContext } from "./HiveSqlParser";
import { ViewNameContext } from "./HiveSqlParser"; import { ViewNameContext } from "./HiveSqlParser";
import { ViewNameCreateContext } from "./HiveSqlParser";
import { SubQuerySourceContext } from "./HiveSqlParser"; import { SubQuerySourceContext } from "./HiveSqlParser";
import { PartitioningSpecContext } from "./HiveSqlParser"; import { PartitioningSpecContext } from "./HiveSqlParser";
import { PartitionTableFunctionSourceContext } from "./HiveSqlParser"; import { PartitionTableFunctionSourceContext } from "./HiveSqlParser";
@ -389,7 +393,10 @@ import { SortByClauseContext } from "./HiveSqlParser";
import { TrimFunctionContext } from "./HiveSqlParser"; import { TrimFunctionContext } from "./HiveSqlParser";
import { Function_Context } from "./HiveSqlParser"; import { Function_Context } from "./HiveSqlParser";
import { Null_treatmentContext } from "./HiveSqlParser"; import { Null_treatmentContext } from "./HiveSqlParser";
import { FunctionNameContext } from "./HiveSqlParser"; import { FunctionNameForDDLContext } from "./HiveSqlParser";
import { FunctionNameForInvokeContext } from "./HiveSqlParser";
import { UserDefinedFuncNameContext } from "./HiveSqlParser";
import { FunctionNameCreateContext } from "./HiveSqlParser";
import { CastExpressionContext } from "./HiveSqlParser"; import { CastExpressionContext } from "./HiveSqlParser";
import { CaseExpressionContext } from "./HiveSqlParser"; import { CaseExpressionContext } from "./HiveSqlParser";
import { WhenExpressionContext } from "./HiveSqlParser"; import { WhenExpressionContext } from "./HiveSqlParser";
@ -458,7 +465,6 @@ import { PartitionSelectorValContext } from "./HiveSqlParser";
import { PartitionSelectorOperatorContext } from "./HiveSqlParser"; import { PartitionSelectorOperatorContext } from "./HiveSqlParser";
import { SubQuerySelectorOperatorContext } from "./HiveSqlParser"; import { SubQuerySelectorOperatorContext } from "./HiveSqlParser";
import { SysFuncNamesContext } from "./HiveSqlParser"; import { SysFuncNamesContext } from "./HiveSqlParser";
import { DescFuncNamesContext } from "./HiveSqlParser";
import { Id_Context } from "./HiveSqlParser"; import { Id_Context } from "./HiveSqlParser";
import { FunctionIdentifierContext } from "./HiveSqlParser"; import { FunctionIdentifierContext } from "./HiveSqlParser";
import { PrincipalIdentifierContext } from "./HiveSqlParser"; import { PrincipalIdentifierContext } from "./HiveSqlParser";
@ -1712,17 +1718,6 @@ export interface HiveSqlParserListener extends ParseTreeListener {
*/ */
exitDefinedAsSpec?: (ctx: DefinedAsSpecContext) => void; exitDefinedAsSpec?: (ctx: DefinedAsSpecContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.showFunctionIdentifier`.
* @param ctx the parse tree
*/
enterShowFunctionIdentifier?: (ctx: ShowFunctionIdentifierContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.showFunctionIdentifier`.
* @param ctx the parse tree
*/
exitShowFunctionIdentifier?: (ctx: ShowFunctionIdentifierContext) => void;
/** /**
* Enter a parse tree produced by `HiveSqlParser.showStmtIdentifier`. * Enter a parse tree produced by `HiveSqlParser.showStmtIdentifier`.
* @param ctx the parse tree * @param ctx the parse tree
@ -4132,6 +4127,39 @@ export interface HiveSqlParserListener extends ParseTreeListener {
*/ */
exitUniqueJoinTableSource?: (ctx: UniqueJoinTableSourceContext) => void; exitUniqueJoinTableSource?: (ctx: UniqueJoinTableSourceContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.dbSchemaName`.
* @param ctx the parse tree
*/
enterDbSchemaName?: (ctx: DbSchemaNameContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.dbSchemaName`.
* @param ctx the parse tree
*/
exitDbSchemaName?: (ctx: DbSchemaNameContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.dbSchemaNameCreate`.
* @param ctx the parse tree
*/
enterDbSchemaNameCreate?: (ctx: DbSchemaNameCreateContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.dbSchemaNameCreate`.
* @param ctx the parse tree
*/
exitDbSchemaNameCreate?: (ctx: DbSchemaNameCreateContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.tableOrView`.
* @param ctx the parse tree
*/
enterTableOrView?: (ctx: TableOrViewContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.tableOrView`.
* @param ctx the parse tree
*/
exitTableOrView?: (ctx: TableOrViewContext) => void;
/** /**
* Enter a parse tree produced by `HiveSqlParser.tableName`. * Enter a parse tree produced by `HiveSqlParser.tableName`.
* @param ctx the parse tree * @param ctx the parse tree
@ -4143,6 +4171,17 @@ export interface HiveSqlParserListener extends ParseTreeListener {
*/ */
exitTableName?: (ctx: TableNameContext) => void; exitTableName?: (ctx: TableNameContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.tableNameCreate`.
* @param ctx the parse tree
*/
enterTableNameCreate?: (ctx: TableNameCreateContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.tableNameCreate`.
* @param ctx the parse tree
*/
exitTableNameCreate?: (ctx: TableNameCreateContext) => void;
/** /**
* Enter a parse tree produced by `HiveSqlParser.viewName`. * Enter a parse tree produced by `HiveSqlParser.viewName`.
* @param ctx the parse tree * @param ctx the parse tree
@ -4154,6 +4193,17 @@ export interface HiveSqlParserListener extends ParseTreeListener {
*/ */
exitViewName?: (ctx: ViewNameContext) => void; exitViewName?: (ctx: ViewNameContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.viewNameCreate`.
* @param ctx the parse tree
*/
enterViewNameCreate?: (ctx: ViewNameCreateContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.viewNameCreate`.
* @param ctx the parse tree
*/
exitViewNameCreate?: (ctx: ViewNameCreateContext) => void;
/** /**
* Enter a parse tree produced by `HiveSqlParser.subQuerySource`. * Enter a parse tree produced by `HiveSqlParser.subQuerySource`.
* @param ctx the parse tree * @param ctx the parse tree
@ -4771,15 +4821,48 @@ export interface HiveSqlParserListener extends ParseTreeListener {
exitNull_treatment?: (ctx: Null_treatmentContext) => void; exitNull_treatment?: (ctx: Null_treatmentContext) => void;
/** /**
* Enter a parse tree produced by `HiveSqlParser.functionName`. * Enter a parse tree produced by `HiveSqlParser.functionNameForDDL`.
* @param ctx the parse tree * @param ctx the parse tree
*/ */
enterFunctionName?: (ctx: FunctionNameContext) => void; enterFunctionNameForDDL?: (ctx: FunctionNameForDDLContext) => void;
/** /**
* Exit a parse tree produced by `HiveSqlParser.functionName`. * Exit a parse tree produced by `HiveSqlParser.functionNameForDDL`.
* @param ctx the parse tree * @param ctx the parse tree
*/ */
exitFunctionName?: (ctx: FunctionNameContext) => void; exitFunctionNameForDDL?: (ctx: FunctionNameForDDLContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.functionNameForInvoke`.
* @param ctx the parse tree
*/
enterFunctionNameForInvoke?: (ctx: FunctionNameForInvokeContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.functionNameForInvoke`.
* @param ctx the parse tree
*/
exitFunctionNameForInvoke?: (ctx: FunctionNameForInvokeContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.userDefinedFuncName`.
* @param ctx the parse tree
*/
enterUserDefinedFuncName?: (ctx: UserDefinedFuncNameContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.userDefinedFuncName`.
* @param ctx the parse tree
*/
exitUserDefinedFuncName?: (ctx: UserDefinedFuncNameContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.functionNameCreate`.
* @param ctx the parse tree
*/
enterFunctionNameCreate?: (ctx: FunctionNameCreateContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.functionNameCreate`.
* @param ctx the parse tree
*/
exitFunctionNameCreate?: (ctx: FunctionNameCreateContext) => void;
/** /**
* Enter a parse tree produced by `HiveSqlParser.castExpression`. * Enter a parse tree produced by `HiveSqlParser.castExpression`.
@ -5529,17 +5612,6 @@ export interface HiveSqlParserListener extends ParseTreeListener {
*/ */
exitSysFuncNames?: (ctx: SysFuncNamesContext) => void; exitSysFuncNames?: (ctx: SysFuncNamesContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.descFuncNames`.
* @param ctx the parse tree
*/
enterDescFuncNames?: (ctx: DescFuncNamesContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.descFuncNames`.
* @param ctx the parse tree
*/
exitDescFuncNames?: (ctx: DescFuncNamesContext) => void;
/** /**
* Enter a parse tree produced by `HiveSqlParser.id_`. * Enter a parse tree produced by `HiveSqlParser.id_`.
* @param ctx the parse tree * @param ctx the parse tree

View File

@ -1,4 +1,4 @@
// Generated from /Users/xuxiaoqi/Documents/work/daishu-code/dt-sql-parser/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.9.0-SNAPSHOT // Generated from /Users/hayden/Desktop/dt-works/dt-sql-parser/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.9.0-SNAPSHOT
import { ParseTreeVisitor } from "antlr4ts/tree/ParseTreeVisitor"; import { ParseTreeVisitor } from "antlr4ts/tree/ParseTreeVisitor";
@ -111,7 +111,6 @@ import { AlterScheduledQueryChangeContext } from "./HiveSqlParser";
import { ScheduleSpecContext } from "./HiveSqlParser"; import { ScheduleSpecContext } from "./HiveSqlParser";
import { ExecutedAsSpecContext } from "./HiveSqlParser"; import { ExecutedAsSpecContext } from "./HiveSqlParser";
import { DefinedAsSpecContext } from "./HiveSqlParser"; import { DefinedAsSpecContext } from "./HiveSqlParser";
import { ShowFunctionIdentifierContext } from "./HiveSqlParser";
import { ShowStmtIdentifierContext } from "./HiveSqlParser"; import { ShowStmtIdentifierContext } from "./HiveSqlParser";
import { TableCommentContext } from "./HiveSqlParser"; import { TableCommentContext } from "./HiveSqlParser";
import { CreateTablePartitionSpecContext } from "./HiveSqlParser"; import { CreateTablePartitionSpecContext } from "./HiveSqlParser";
@ -331,8 +330,13 @@ import { TableSampleContext } from "./HiveSqlParser";
import { TableSourceContext } from "./HiveSqlParser"; import { TableSourceContext } from "./HiveSqlParser";
import { AsOfClauseContext } from "./HiveSqlParser"; import { AsOfClauseContext } from "./HiveSqlParser";
import { UniqueJoinTableSourceContext } from "./HiveSqlParser"; import { UniqueJoinTableSourceContext } from "./HiveSqlParser";
import { DbSchemaNameContext } from "./HiveSqlParser";
import { DbSchemaNameCreateContext } from "./HiveSqlParser";
import { TableOrViewContext } from "./HiveSqlParser";
import { TableNameContext } from "./HiveSqlParser"; import { TableNameContext } from "./HiveSqlParser";
import { TableNameCreateContext } from "./HiveSqlParser";
import { ViewNameContext } from "./HiveSqlParser"; import { ViewNameContext } from "./HiveSqlParser";
import { ViewNameCreateContext } from "./HiveSqlParser";
import { SubQuerySourceContext } from "./HiveSqlParser"; import { SubQuerySourceContext } from "./HiveSqlParser";
import { PartitioningSpecContext } from "./HiveSqlParser"; import { PartitioningSpecContext } from "./HiveSqlParser";
import { PartitionTableFunctionSourceContext } from "./HiveSqlParser"; import { PartitionTableFunctionSourceContext } from "./HiveSqlParser";
@ -389,7 +393,10 @@ import { SortByClauseContext } from "./HiveSqlParser";
import { TrimFunctionContext } from "./HiveSqlParser"; import { TrimFunctionContext } from "./HiveSqlParser";
import { Function_Context } from "./HiveSqlParser"; import { Function_Context } from "./HiveSqlParser";
import { Null_treatmentContext } from "./HiveSqlParser"; import { Null_treatmentContext } from "./HiveSqlParser";
import { FunctionNameContext } from "./HiveSqlParser"; import { FunctionNameForDDLContext } from "./HiveSqlParser";
import { FunctionNameForInvokeContext } from "./HiveSqlParser";
import { UserDefinedFuncNameContext } from "./HiveSqlParser";
import { FunctionNameCreateContext } from "./HiveSqlParser";
import { CastExpressionContext } from "./HiveSqlParser"; import { CastExpressionContext } from "./HiveSqlParser";
import { CaseExpressionContext } from "./HiveSqlParser"; import { CaseExpressionContext } from "./HiveSqlParser";
import { WhenExpressionContext } from "./HiveSqlParser"; import { WhenExpressionContext } from "./HiveSqlParser";
@ -458,7 +465,6 @@ import { PartitionSelectorValContext } from "./HiveSqlParser";
import { PartitionSelectorOperatorContext } from "./HiveSqlParser"; import { PartitionSelectorOperatorContext } from "./HiveSqlParser";
import { SubQuerySelectorOperatorContext } from "./HiveSqlParser"; import { SubQuerySelectorOperatorContext } from "./HiveSqlParser";
import { SysFuncNamesContext } from "./HiveSqlParser"; import { SysFuncNamesContext } from "./HiveSqlParser";
import { DescFuncNamesContext } from "./HiveSqlParser";
import { Id_Context } from "./HiveSqlParser"; import { Id_Context } from "./HiveSqlParser";
import { FunctionIdentifierContext } from "./HiveSqlParser"; import { FunctionIdentifierContext } from "./HiveSqlParser";
import { PrincipalIdentifierContext } from "./HiveSqlParser"; import { PrincipalIdentifierContext } from "./HiveSqlParser";
@ -1283,13 +1289,6 @@ export interface HiveSqlParserVisitor<Result> extends ParseTreeVisitor<Result> {
*/ */
visitDefinedAsSpec?: (ctx: DefinedAsSpecContext) => Result; visitDefinedAsSpec?: (ctx: DefinedAsSpecContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.showFunctionIdentifier`.
* @param ctx the parse tree
* @return the visitor result
*/
visitShowFunctionIdentifier?: (ctx: ShowFunctionIdentifierContext) => Result;
/** /**
* Visit a parse tree produced by `HiveSqlParser.showStmtIdentifier`. * Visit a parse tree produced by `HiveSqlParser.showStmtIdentifier`.
* @param ctx the parse tree * @param ctx the parse tree
@ -2823,6 +2822,27 @@ export interface HiveSqlParserVisitor<Result> extends ParseTreeVisitor<Result> {
*/ */
visitUniqueJoinTableSource?: (ctx: UniqueJoinTableSourceContext) => Result; visitUniqueJoinTableSource?: (ctx: UniqueJoinTableSourceContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.dbSchemaName`.
* @param ctx the parse tree
* @return the visitor result
*/
visitDbSchemaName?: (ctx: DbSchemaNameContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.dbSchemaNameCreate`.
* @param ctx the parse tree
* @return the visitor result
*/
visitDbSchemaNameCreate?: (ctx: DbSchemaNameCreateContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.tableOrView`.
* @param ctx the parse tree
* @return the visitor result
*/
visitTableOrView?: (ctx: TableOrViewContext) => Result;
/** /**
* Visit a parse tree produced by `HiveSqlParser.tableName`. * Visit a parse tree produced by `HiveSqlParser.tableName`.
* @param ctx the parse tree * @param ctx the parse tree
@ -2830,6 +2850,13 @@ export interface HiveSqlParserVisitor<Result> extends ParseTreeVisitor<Result> {
*/ */
visitTableName?: (ctx: TableNameContext) => Result; visitTableName?: (ctx: TableNameContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.tableNameCreate`.
* @param ctx the parse tree
* @return the visitor result
*/
visitTableNameCreate?: (ctx: TableNameCreateContext) => Result;
/** /**
* Visit a parse tree produced by `HiveSqlParser.viewName`. * Visit a parse tree produced by `HiveSqlParser.viewName`.
* @param ctx the parse tree * @param ctx the parse tree
@ -2837,6 +2864,13 @@ export interface HiveSqlParserVisitor<Result> extends ParseTreeVisitor<Result> {
*/ */
visitViewName?: (ctx: ViewNameContext) => Result; visitViewName?: (ctx: ViewNameContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.viewNameCreate`.
* @param ctx the parse tree
* @return the visitor result
*/
visitViewNameCreate?: (ctx: ViewNameCreateContext) => Result;
/** /**
* Visit a parse tree produced by `HiveSqlParser.subQuerySource`. * Visit a parse tree produced by `HiveSqlParser.subQuerySource`.
* @param ctx the parse tree * @param ctx the parse tree
@ -3230,11 +3264,32 @@ export interface HiveSqlParserVisitor<Result> extends ParseTreeVisitor<Result> {
visitNull_treatment?: (ctx: Null_treatmentContext) => Result; visitNull_treatment?: (ctx: Null_treatmentContext) => Result;
/** /**
* Visit a parse tree produced by `HiveSqlParser.functionName`. * Visit a parse tree produced by `HiveSqlParser.functionNameForDDL`.
* @param ctx the parse tree * @param ctx the parse tree
* @return the visitor result * @return the visitor result
*/ */
visitFunctionName?: (ctx: FunctionNameContext) => Result; visitFunctionNameForDDL?: (ctx: FunctionNameForDDLContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.functionNameForInvoke`.
* @param ctx the parse tree
* @return the visitor result
*/
visitFunctionNameForInvoke?: (ctx: FunctionNameForInvokeContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.userDefinedFuncName`.
* @param ctx the parse tree
* @return the visitor result
*/
visitUserDefinedFuncName?: (ctx: UserDefinedFuncNameContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.functionNameCreate`.
* @param ctx the parse tree
* @return the visitor result
*/
visitFunctionNameCreate?: (ctx: FunctionNameCreateContext) => Result;
/** /**
* Visit a parse tree produced by `HiveSqlParser.castExpression`. * Visit a parse tree produced by `HiveSqlParser.castExpression`.
@ -3712,13 +3767,6 @@ export interface HiveSqlParserVisitor<Result> extends ParseTreeVisitor<Result> {
*/ */
visitSysFuncNames?: (ctx: SysFuncNamesContext) => Result; visitSysFuncNames?: (ctx: SysFuncNamesContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.descFuncNames`.
* @param ctx the parse tree
* @return the visitor result
*/
visitDescFuncNames?: (ctx: DescFuncNamesContext) => Result;
/** /**
* Visit a parse tree produced by `HiveSqlParser.id_`. * Visit a parse tree produced by `HiveSqlParser.id_`.
* @param ctx the parse tree * @param ctx the parse tree

View File

@ -17,20 +17,20 @@ export enum SyntaxContextType {
CATALOG = 'catalog', CATALOG = 'catalog',
/** database name path, such as catalog.db */ /** database name path, such as catalog.db */
DATABASE = 'database', DATABASE = 'database',
/** database name path will be created */ /** database name path that will be created */
DATABASE_CREATE = 'databaseCreate', DATABASE_CREATE = 'databaseCreate',
/** table name path, such as catalog.db.tb */ /** table name path, such as catalog.db.tb */
TABLE = 'table', TABLE = 'table',
/** table name path will be created */ /** table name path that will be created */
TABLE_CREATE = 'tableCreate', TABLE_CREATE = 'tableCreate',
/** view name */ /** view name path, such as db.tb.view */
VIEW = 'view', VIEW = 'view',
/** view name path that will be created */
VIEW_CREATE = 'viewCreate',
/** function name */ /** function name */
FUNCTION = 'function', FUNCTION = 'function',
/** principal name */ /** function name that will be created */
PRINCIPAL = 'principal', FUNCTION_CREATE = 'functionCreate',
/** hint arg name */
HTNTARG = 'hintArg',
} }
export interface WordRange { export interface WordRange {

View File

@ -18,11 +18,15 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
} }
protected preferredRules: Set<number> = new Set([ protected preferredRules: Set<number> = new Set([
HiveSqlParser.RULE_dbSchemaName, // db or schema name
HiveSqlParser.RULE_dbSchemaNameCreate, // db or schema name that will be created
HiveSqlParser.RULE_tableName, // table name HiveSqlParser.RULE_tableName, // table name
HiveSqlParser.RULE_tableNameCreate, // table name that will be created
HiveSqlParser.RULE_viewName, // view name HiveSqlParser.RULE_viewName, // view name
HiveSqlParser.RULE_functionIdentifier, // function name HiveSqlParser.RULE_viewNameCreate, // view name that will be created
HiveSqlParser.RULE_principalIdentifier, // USER/ROLE/GROUP name HiveSqlParser.RULE_userDefinedFuncName, // function name
HiveSqlParser.RULE_hintArgName, // hint name HiveSqlParser.RULE_functionNameCreate, // function name that will be created
]); ]);
protected get splitListener () { protected get splitListener () {
@ -37,7 +41,6 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
): Suggestions<Token> { ): Suggestions<Token> {
const originalSyntaxSuggestions: SyntaxSuggestion<Token>[] = []; const originalSyntaxSuggestions: SyntaxSuggestion<Token>[] = [];
const keywords: string[] = []; const keywords: string[] = [];
for (let candidate of candidates.rules) { for (let candidate of candidates.rules) {
const [ruleType, candidateRule] = candidate; const [ruleType, candidateRule] = candidate;
const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset; const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset;
@ -45,24 +48,36 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
let syntaxContextType: SyntaxContextType; let syntaxContextType: SyntaxContextType;
switch (ruleType) { switch (ruleType) {
case HiveSqlParser.RULE_dbSchemaName: {
syntaxContextType = SyntaxContextType.DATABASE;
break;
}
case HiveSqlParser.RULE_dbSchemaNameCreate: {
syntaxContextType = SyntaxContextType.DATABASE_CREATE;
break;
}
case HiveSqlParser.RULE_tableName: { case HiveSqlParser.RULE_tableName: {
syntaxContextType = SyntaxContextType.TABLE; syntaxContextType = SyntaxContextType.TABLE;
break; break;
} }
case HiveSqlParser.RULE_tableNameCreate: {
syntaxContextType = SyntaxContextType.TABLE_CREATE
break;
}
case HiveSqlParser.RULE_viewName: { case HiveSqlParser.RULE_viewName: {
syntaxContextType = SyntaxContextType.VIEW; syntaxContextType = SyntaxContextType.VIEW;
break; break;
} }
case HiveSqlParser.RULE_functionIdentifier: { case HiveSqlParser.RULE_viewNameCreate: {
syntaxContextType = SyntaxContextType.VIEW_CREATE;
break;
}
case HiveSqlParser.RULE_userDefinedFuncName: {
syntaxContextType = SyntaxContextType.FUNCTION; syntaxContextType = SyntaxContextType.FUNCTION;
break; break;
} }
case HiveSqlParser.RULE_principalIdentifier: { case HiveSqlParser.RULE_functionNameCreate: {
syntaxContextType = SyntaxContextType.PRINCIPAL; syntaxContextType = SyntaxContextType.FUNCTION_CREATE;
break;
}
case HiveSqlParser.RULE_hintArgName: {
syntaxContextType = SyntaxContextType.HTNTARG;
break; break;
} }
default: default:
@ -86,15 +101,15 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
} }
} }
return { return {
syntax: [], syntax: originalSyntaxSuggestions,
keywords: [], keywords,
}; };
} }
} }
export class HiveSqlSplitListener implements HiveSqlParserListener { export class HiveSqlSplitListener implements HiveSqlParserListener {
private _statementContext: StatementContext[] = []; private _statementContext: StatementContext[] = [];
exitStatement = (ctx: StatementContext) => { exitStatement = (ctx: StatementContext) => {
this._statementContext.push(ctx); this._statementContext.push(ctx);
} }

View File

@ -20,10 +20,10 @@ describe('Flink SQL Syntax Suggestion', () => {
lineNumber: 1, lineNumber: 1,
column: 22 column: 22
} }
const suggestion = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax?.[0]; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
expect(suggestion?.syntaxContextType === SyntaxContextType.TABLE)
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat', '.', 'db', '.', 'tb' ]) .toEqual([ 'cat', '.', 'db', '.', 'tb' ])
}) })
@ -33,9 +33,10 @@ describe('Flink SQL Syntax Suggestion', () => {
lineNumber: 3, lineNumber: 3,
column: 21 column: 21
} }
const suggestion = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax?.[0]; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
expect(suggestion?.syntaxContextType === SyntaxContextType.TABLE)
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat', '.', 'db' ]) .toEqual([ 'cat', '.', 'db' ])
}) })
@ -45,9 +46,10 @@ describe('Flink SQL Syntax Suggestion', () => {
lineNumber: 5, lineNumber: 5,
column: 20 column: 20
} }
const suggestion = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax?.[0]; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE);
expect(suggestion?.syntaxContextType === SyntaxContextType.TABLE_CREATE)
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat', '.', 'db' ]) .toEqual([ 'cat', '.', 'db' ])
}) })
@ -57,9 +59,10 @@ describe('Flink SQL Syntax Suggestion', () => {
lineNumber: 7, lineNumber: 7,
column: 21 column: 21
} }
const suggestion = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax?.[0]; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
expect(suggestion?.syntaxContextType === SyntaxContextType.TABLE)
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat' ]) .toEqual([ 'cat' ])
}) })
@ -69,9 +72,10 @@ describe('Flink SQL Syntax Suggestion', () => {
lineNumber: 9, lineNumber: 9,
column: 20 column: 20
} }
const suggestion = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax?.[0]; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE);
expect(suggestion?.syntaxContextType === SyntaxContextType.DATABASE)
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat', '.' ]) .toEqual([ 'cat', '.' ])
}) })
@ -81,9 +85,10 @@ describe('Flink SQL Syntax Suggestion', () => {
lineNumber: 9, lineNumber: 9,
column: 20 column: 20
} }
const suggestion = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax?.[0]; const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE);
expect(suggestion?.syntaxContextType === SyntaxContextType.DATABASE)
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat', '.' ]); .toEqual([ 'cat', '.' ]);
}) })
@ -93,9 +98,10 @@ describe('Flink SQL Syntax Suggestion', () => {
lineNumber: 19, lineNumber: 19,
column: 10, column: 10,
} }
const suggestion = parser.getSuggestionAtCaretPosition(multipleSql, pos)?.syntax?.[0]; const syntaxes = parser.getSuggestionAtCaretPosition(multipleSql, pos)?.syntax;
console.log(suggestion); const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE);
expect(suggestion?.syntaxContextType === SyntaxContextType.DATABASE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text)) expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat1', '.' ]); .toEqual([ 'cat1', '.' ]);
}) })

View File

@ -5,7 +5,7 @@ import FlinkSQL from '../../../../src/parser/flinksql'
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8'); const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
describe('Flink SQL Syntax Suggestion', () => { describe('Flink SQL Token Suggestion', () => {
const parser = new FlinkSQL(); const parser = new FlinkSQL();
test('Use Statement ', () => { test('Use Statement ', () => {

View File

@ -0,0 +1,19 @@
INSERT INTO db.tb ;
SELECT * FROM db.;
CREATE TABLE db. VALUES;
DROP TABLE IF EXISTS db.a;
CREATE OR REPLACE VIEW db.v;
DROP VIEW db.v ;
CREATE FUNCTION fn1;
SELECT name, calculate_age(birthdate) AS age FROM students;
CREATE DATABASE db;
DROP SCHEMA IF EXISTS sch;

View File

@ -0,0 +1,20 @@
ALTER
;
CREATE
;
DELETE
;
DESCRIBE
;
DROP
;
EXPORT
;
IMPORT
;
INSERT
;
LOAD
;
SHOW
;

View File

@ -0,0 +1,147 @@
import fs from 'fs';
import path from 'path';
import { CaretPosition, SyntaxContextType } from '../../../../src/parser/common/basic-parser-types';
import HiveSQL from '../../../../src/parser/hive'
const syntaxSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'), 'utf-8');
describe('Hive SQL Syntax Suggestion', () => {
const parser = new HiveSQL();
test('Validate Syntax SQL', () => {
expect(parser.validate(syntaxSql).length).not.toBe(0);
expect(parser.validate(syntaxSql).length).not.toBe(0);
expect(parser.validate(syntaxSql).length).not.toBe(0);
});
test('Insert table ', () => {
const pos: CaretPosition = {
lineNumber: 1,
column: 18
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.', 'tb' ])
});
test('Select table ', () => {
const pos: CaretPosition = {
lineNumber: 3,
column: 18
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.' ])
});
test('Create table ', () => {
const pos: CaretPosition = {
lineNumber: 5,
column: 17
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.' ])
});
test('DROP table ', () => {
const pos: CaretPosition = {
lineNumber: 7,
column: 26
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.', 'a' ])
});
test('Create view ', () => {
const pos: CaretPosition = {
lineNumber: 9,
column: 28
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.', 'v' ])
});
test('Drop view ', () => {
const pos: CaretPosition = {
lineNumber: 11,
column: 15
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.', 'v' ])
});
test('Create function ', () => {
const pos: CaretPosition = {
lineNumber: 13,
column: 20
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'fn1' ])
});
test('Use function', () => {
const pos: CaretPosition = {
lineNumber: 15,
column: 27
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.FUNCTION);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'calculate_age' ])
});
test('Create database', () => {
const pos: CaretPosition = {
lineNumber: 17,
column: 19
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db' ])
});
test('Drop database', () => {
const pos: CaretPosition = {
lineNumber: 19,
column: 26
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'sch' ])
});
})

View File

@ -0,0 +1,232 @@
import fs from "fs";
import path from "path";
import { CaretPosition } from "../../../../src/parser/common/basic-parser-types";
import HiveSQL from "../../../../src/parser/hive";
const tokenSql = fs.readFileSync(
path.join(__dirname, "fixtures", "tokenSuggestion.sql"),
"utf-8"
);
describe("Hive SQL Syntax Suggestion", () => {
const parser = new HiveSQL();
test("After ALTER", () => {
const pos: CaretPosition = {
lineNumber: 1,
column: 7,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([
"APPLICATION",
"GROUP",
"USER",
"POOL",
"TRIGGER",
"RESOURCE",
"SCHEDULED",
"INDEX",
"CONNECTOR",
"DATABASE",
"SCHEMA",
"MATERIALIZED",
"VIEW",
"TABLE",
]);
});
test("After CREATE", () => {
const pos: CaretPosition = {
lineNumber: 3,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([
"CONNECTOR",
"APPLICATION",
"GROUP",
"USER",
"POOL",
"TRIGGER",
"RESOURCE",
"ROLE",
"INDEX",
"TEMPORARY",
"FUNCTION",
"SCHEDULED",
"MATERIALIZED",
"VIEW",
"OR",
"MANAGED",
"TABLE",
"EXTERNAL",
"TRANSACTIONAL",
"REMOTE",
"DATABASE",
"SCHEMA",
]);
});
test("After DELETE", () => {
const pos: CaretPosition = {
lineNumber: 5,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual(['FROM']);
});
test("After DESCRIBE", () => {
const pos: CaretPosition = {
lineNumber: 7,
column: 10,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([
"EXTENDED",
"FORMATTED",
"FUNCTION",
"CONNECTOR",
"DATABASE",
"SCHEMA",
]);
});
test("After DROP", () => {
const pos: CaretPosition = {
lineNumber: 9,
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([
"CONNECTOR",
"APPLICATION",
"GROUP",
"USER",
"POOL",
"TRIGGER",
"RESOURCE",
"ROLE",
"INDEX",
"TEMPORARY",
"FUNCTION",
"MATERIALIZED",
"VIEW",
"SCHEDULED",
"TABLE",
"DATABASE",
"SCHEMA",
]);
});
test("After EXPORT", () => {
const pos: CaretPosition = {
lineNumber: 11,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual(['TABLE']);
});
test("After IMPORT", () => {
const pos: CaretPosition = {
lineNumber: 13,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([
"FROM",
"TABLE",
"EXTERNAL",
]);
});
test("After INSERT", () => {
const pos: CaretPosition = {
lineNumber: 15,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([
"INTO",
"OVERWRITE",
]);
});
test("After LOAD", () => {
const pos: CaretPosition = {
lineNumber: 17,
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual(["DATA"
]);
});
test("After SHOW", () => {
const pos: CaretPosition = {
lineNumber: 19,
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([
"CURRENT",
"ROLES",
"PRINCIPALS",
"ROLE",
"GRANT",
"INDEX",
"INDEXES",
"FORMATTED",
"CONNECTORS",
"RESOURCE",
"CONF",
"TRANSACTIONS",
"COMPACTIONS",
"LOCKS",
"TBLPROPERTIES",
"TABLE",
"CREATE",
"PARTITIONS",
"FUNCTIONS",
"COLUMNS",
"SORTED",
"MATERIALIZED",
"VIEWS",
"TABLES",
"EXTENDED",
"DATABASES",
"SCHEMAS",
]);
});
});