Feat/auto complete (#175)

* feat: update hive grammar to adapt to c3

* feat: support viewName, dbName, fnName autoComplete to hive

* test: add hive suggestion unit test

* test: optimze flink suggestion unit tests
This commit is contained in:
Hayden 2023-10-10 16:37:49 +08:00 committed by GitHub
parent d0ad381833
commit c4030929b2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 9376 additions and 8469 deletions

View File

@ -119,13 +119,13 @@ replDumpStatement
;
replDbPolicy
: dbName=id_ (DOT tablePolicy=replTableLevelPolicy)?
: dbName=dbSchemaName (DOT tablePolicy=replTableLevelPolicy)?
;
replLoadStatement
: KW_REPL KW_LOAD
sourceDbPolicy=replDbPolicy
(KW_INTO dbName=id_)?
(KW_INTO dbName=dbSchemaName)?
(KW_WITH replConf=replConfigs)?
;
@ -143,7 +143,7 @@ replTableLevelPolicy
replStatusStatement
: KW_REPL KW_STATUS
dbName=id_
dbName=dbSchemaName
(KW_WITH replConf=replConfigs)?
;
@ -233,14 +233,14 @@ orReplace
createDatabaseStatement
: KW_CREATE KW_REMOTE? db_schema
ifNotExists?
name=id_
name=dbSchemaNameCreate
databaseComment?
dbLocation?
dbManagedLocation?
(KW_WITH KW_DBPROPERTIES dbprops=dbProperties)?
| KW_CREATE KW_REMOTE db_schema
ifNotExists?
name=id_
name=dbSchemaNameCreate
databaseComment?
dbConnectorName
(KW_WITH KW_DBPROPERTIES dbprops=dbProperties)?
@ -263,15 +263,15 @@ dbPropertiesList
;
dbConnectorName
: KW_USING dcName=id_
: KW_USING dcName=dbSchemaName
;
switchDatabaseStatement
: KW_USE id_
: KW_USE dbSchemaName
;
dropDatabaseStatement
: KW_DROP db_schema ifExists? id_ restrictOrCascade?
: KW_DROP db_schema ifExists? dbSchemaName restrictOrCascade?
;
databaseComment
@ -300,15 +300,15 @@ partTypeExpr
;
tabPartColTypeExpr
: tableName partitionSpec? extColumnName?
: tableOrView partitionSpec? extColumnName?
;
descStatement
: (KW_DESCRIBE | KW_DESC)
(
db_schema KW_EXTENDED? dbName=id_
| KW_DATACONNECTOR KW_EXTENDED? dcName=id_
| KW_FUNCTION KW_EXTENDED? name=descFuncNames
db_schema KW_EXTENDED? dbName=dbSchemaName
| KW_DATACONNECTOR KW_EXTENDED? dcName=dbSchemaName
| KW_FUNCTION KW_EXTENDED? name=functionNameForDDL
| (descOptions=KW_FORMATTED | descOptions=KW_EXTENDED) parttype=tabPartColTypeExpr
| parttype=tabPartColTypeExpr
)
@ -333,26 +333,26 @@ db_schema
showStatement
: KW_SHOW (KW_DATABASES | KW_SCHEMAS) (KW_LIKE showStmtIdentifier)?
| KW_SHOW isExtended=KW_EXTENDED? KW_TABLES (from_in db_name=id_)? filter=showTablesFilterExpr?
| KW_SHOW KW_VIEWS (from_in db_name=id_)? (KW_LIKE showStmtIdentifier | showStmtIdentifier)?
| KW_SHOW KW_MATERIALIZED KW_VIEWS (from_in db_name=id_)? (KW_LIKE showStmtIdentifier|showStmtIdentifier)?
| KW_SHOW KW_SORTED? KW_COLUMNS from_in tableName (from_in db_name=id_)? (KW_LIKE showStmtIdentifier|showStmtIdentifier)?
| KW_SHOW KW_FUNCTIONS (KW_LIKE showFunctionIdentifier)?
| KW_SHOW KW_PARTITIONS tabName=tableName partitionSpec? whereClause? orderByClause? limitClause?
| KW_SHOW KW_CREATE (db_schema db_name=id_ | KW_TABLE tabName=tableName)
| KW_SHOW KW_TABLE KW_EXTENDED (from_in db_name=id_)? KW_LIKE showStmtIdentifier partitionSpec?
| KW_SHOW isExtended=KW_EXTENDED? KW_TABLES (from_in db_name=dbSchemaName)? filter=showTablesFilterExpr?
| KW_SHOW KW_VIEWS (from_in db_name=dbSchemaName)? (KW_LIKE showStmtIdentifier | showStmtIdentifier)?
| KW_SHOW KW_MATERIALIZED KW_VIEWS (from_in db_name=dbSchemaName)? (KW_LIKE showStmtIdentifier|showStmtIdentifier)?
| KW_SHOW KW_SORTED? KW_COLUMNS from_in tableOrView (from_in db_name=dbSchemaName)? (KW_LIKE showStmtIdentifier|showStmtIdentifier)?
| KW_SHOW KW_FUNCTIONS (KW_LIKE functionNameForDDL)?
| KW_SHOW KW_PARTITIONS tabOrViewName=tableOrView partitionSpec? whereClause? orderByClause? limitClause?
| KW_SHOW KW_CREATE (db_schema db_name=dbSchemaName | KW_TABLE tabName=tableName)
| KW_SHOW KW_TABLE KW_EXTENDED (from_in db_name=dbSchemaName)? KW_LIKE showStmtIdentifier partitionSpec?
| KW_SHOW KW_TBLPROPERTIES tableName (LPAREN prptyName=StringLiteral RPAREN)?
| KW_SHOW KW_LOCKS (db_schema dbName=id_ isExtended=KW_EXTENDED? | parttype=partTypeExpr? isExtended=KW_EXTENDED?)
| KW_SHOW KW_LOCKS (db_schema dbName=dbSchemaName isExtended=KW_EXTENDED? | parttype=partTypeExpr? isExtended=KW_EXTENDED?)
| KW_SHOW KW_COMPACTIONS
( compactionId
| db_schema dbName=id_ compactionPool? compactionType? compactionStatus? orderByClause? limitClause?
| db_schema dbName=dbSchemaName compactionPool? compactionType? compactionStatus? orderByClause? limitClause?
| parttype=partTypeExpr? compactionPool? compactionType? compactionStatus? orderByClause? limitClause?
)
| KW_SHOW KW_TRANSACTIONS
| KW_SHOW KW_CONF StringLiteral
| KW_SHOW KW_RESOURCE (KW_PLAN rp_name=id_ | KW_PLANS)
| KW_SHOW KW_DATACONNECTORS
| KW_SHOW KW_FORMATTED? ( KW_INDEX | KW_INDEXES ) KW_ON tableName (from_in id_)?
| KW_SHOW KW_FORMATTED? ( KW_INDEX | KW_INDEXES ) KW_ON tableName (from_in dbSchemaName)?
;
showTablesFilterExpr
@ -366,7 +366,7 @@ lockStatement
;
lockDatabase
: KW_LOCK db_schema dbName=id_ lockMode
: KW_LOCK db_schema dbName=dbSchemaName lockMode
;
lockMode
@ -379,7 +379,7 @@ unlockStatement
;
unlockDatabase
: KW_UNLOCK db_schema dbName=id_
: KW_UNLOCK db_schema dbName=dbSchemaName
;
createRoleStatement
@ -450,14 +450,14 @@ privilegeObject
database or table type. Type is optional, default type is table
*/
privObject
: db_schema id_
: db_schema dbSchemaName
| KW_TABLE? tableName partitionSpec?
| KW_URI path=StringLiteral
| KW_SERVER id_
;
privObjectCols
: db_schema id_
: db_schema dbSchemaName
| KW_TABLE? tableName (LPAREN cols=columnNameList RPAREN)? partitionSpec?
| KW_URI path=StringLiteral
| KW_SERVER id_
@ -539,12 +539,12 @@ resourceType
;
createFunctionStatement
: KW_CREATE temp=KW_TEMPORARY? KW_FUNCTION functionIdentifier KW_AS StringLiteral
: KW_CREATE temp=KW_TEMPORARY? KW_FUNCTION functionNameCreate KW_AS StringLiteral
(KW_USING rList=resourceList)?
;
dropFunctionStatement
: KW_DROP temp=KW_TEMPORARY? KW_FUNCTION ifExists? functionIdentifier
: KW_DROP temp=KW_TEMPORARY? KW_FUNCTION ifExists? functionNameForDDL
;
reloadFunctionsStatement
@ -575,7 +575,7 @@ dropIndexStatement
: KW_DROP KW_INDEX ifExists? id_ KW_ON tableName;
createViewStatement
: KW_CREATE orReplace? KW_VIEW ifNotExists? name=tableName
: KW_CREATE orReplace? KW_VIEW ifNotExists? name=viewNameCreate
(LPAREN columnNameCommentList RPAREN)? tableComment? viewPartition?
tablePropertiesPrefixed?
KW_AS
@ -612,7 +612,7 @@ dropViewStatement
;
createMaterializedViewStatement
: KW_CREATE KW_MATERIALIZED KW_VIEW ifNotExists? name=tableName
: KW_CREATE KW_MATERIALIZED KW_VIEW ifNotExists? name=viewNameCreate
rewriteDisabled? tableComment? viewPartition? viewOrganization?
tableRowFormat? tableFileFormat? tableLocation?
tablePropertiesPrefixed? KW_AS selectStatementWithCTE
@ -660,11 +660,6 @@ definedAsSpec
: KW_DEFINED? KW_AS statement
;
showFunctionIdentifier
: functionIdentifier
| StringLiteral
;
showStmtIdentifier
: id_
| StringLiteral
@ -1345,8 +1340,8 @@ END SHOW COMPACTIONS statement
alterStatement
: KW_ALTER ( KW_TABLE tableName alterTableStatementSuffix
| KW_VIEW tableName KW_AS? alterViewStatementSuffix
| KW_MATERIALIZED KW_VIEW tableNameTree=tableName alterMaterializedViewStatementSuffix
| KW_VIEW viewName KW_AS? alterViewStatementSuffix
| KW_MATERIALIZED KW_VIEW tableNameTree=viewName alterMaterializedViewStatementSuffix
| db_schema alterDatabaseStatementSuffix
| KW_DATACONNECTOR alterDataConnectorStatementSuffix
| KW_INDEX alterIndexStatementSuffix
@ -1425,23 +1420,23 @@ alterDatabaseStatementSuffix
;
alterDatabaseSuffixProperties
: name=id_ KW_SET KW_DBPROPERTIES dbProperties
: name=dbSchemaName KW_SET KW_DBPROPERTIES dbProperties
;
alterDatabaseSuffixSetOwner
: dbName=id_ KW_SET KW_OWNER principalAlterName
: dbName=dbSchemaName KW_SET KW_OWNER principalAlterName
;
alterDatabaseSuffixSetLocation
: dbName=id_ KW_SET (KW_LOCATION | KW_MANAGEDLOCATION) newLocation=StringLiteral
: dbName=dbSchemaName KW_SET (KW_LOCATION | KW_MANAGEDLOCATION) newLocation=StringLiteral
;
alterDatabaseSuffixSetManagedLocation
: dbName=id_ KW_SET KW_MANAGEDLOCATION newLocation=StringLiteral
: dbName=dbSchemaName KW_SET KW_MANAGEDLOCATION newLocation=StringLiteral
;
alterStatementSuffixRename
: KW_RENAME KW_TO tableName
: KW_RENAME KW_TO tableNameCreate
;
alterStatementSuffixAddCol
@ -1635,15 +1630,15 @@ alterDataConnectorStatementSuffix
;
alterDataConnectorSuffixProperties
: name=id_ KW_SET KW_DCPROPERTIES dcProperties
: name=dbSchemaName KW_SET KW_DCPROPERTIES dcProperties
;
alterDataConnectorSuffixSetOwner
: dcName=id_ KW_SET KW_OWNER principalAlterName
: dcName=dbSchemaName KW_SET KW_OWNER principalAlterName
;
alterDataConnectorSuffixSetUrl
: dcName=id_ KW_SET KW_URL newUri=StringLiteral
: dcName=dbSchemaName KW_SET KW_URL newUri=StringLiteral
;
likeTableOrFile
@ -1656,7 +1651,7 @@ likeTableOrFile
Rules for parsing createtable
*/
createTableStatement
: KW_CREATE temp=KW_TEMPORARY? trans=KW_TRANSACTIONAL? ext=KW_EXTERNAL? KW_TABLE ifNotExists? name=tableName
: KW_CREATE temp=KW_TEMPORARY? trans=KW_TRANSACTIONAL? ext=KW_EXTERNAL? KW_TABLE ifNotExists? name=tableNameCreate
( likeTableOrFile
createTablePartitionSpec?
tableRowFormat?
@ -1674,7 +1669,7 @@ createTableStatement
tablePropertiesPrefixed?
(KW_AS selectStatementWithCTE)?
)
| KW_CREATE mgd=KW_MANAGED KW_TABLE ifNotExists? name=tableName
| KW_CREATE mgd=KW_MANAGED KW_TABLE ifNotExists? name=tableNameCreate
( likeTableOrFile
tableRowFormat?
tableFileFormat?
@ -1720,7 +1715,7 @@ dropDataConnectorStatement
tableAllColumns
: STAR
| tableName DOT STAR
| tableOrView DOT STAR
;
// (table|column)
@ -1816,7 +1811,7 @@ tableSample
;
tableSource
: tabname=tableName props=tableProperties? ts=tableSample? asOf=asOfClause? (KW_AS? alias=id_)?
: tabname=tableOrView props=tableProperties? ts=tableSample? asOf=asOfClause? (KW_AS? alias=id_)?
;
asOfClause
@ -1826,7 +1821,20 @@ asOfClause
;
uniqueJoinTableSource
: tabname=tableName ts=tableSample? (KW_AS? alias=id_)?
: tabname=tableOrView ts=tableSample? (KW_AS? alias=id_)?
;
dbSchemaName
: id_
;
dbSchemaNameCreate
: id_
;
tableOrView
: tableName
| viewName
;
tableName
@ -1834,10 +1842,19 @@ tableName
| tab=id_
;
tableNameCreate
: db=id_ DOT tab=id_ (DOT meta=id_)?
| tab=id_
;
viewName
: (db=id_ DOT)? view=id_
;
viewNameCreate
: (db=id_ DOT)? view=id_
;
subQuerySource
: LPAREN queryStatementExpression RPAREN KW_AS? id_
;
@ -2140,7 +2157,7 @@ trimFunction
// fun(par1, par2, par3)
function_
: trimFunction
| functionName
| functionNameForInvoke
LPAREN
(star=STAR | dist=all_distinct? (selectExpression (COMMA selectExpression)*)?)
(
@ -2160,9 +2177,23 @@ null_treatment
| KW_IGNORE KW_NULLS
;
functionName
: functionIdentifier // Keyword IF is also a function name
functionNameForDDL
: functionNameForInvoke
| StringLiteral
;
functionNameForInvoke
: userDefinedFuncName
| sql11ReservedKeywordsUsedAsFunctionName
| sysFuncNames
;
userDefinedFuncName
: functionIdentifier
;
functionNameCreate
: functionIdentifier
;
castExpression
@ -2584,12 +2615,6 @@ sysFuncNames
| KW_BETWEEN
;
descFuncNames
: sysFuncNames
| StringLiteral
| functionIdentifier
;
id_
: Identifier
| nonReserved

View File

@ -1,4 +1,4 @@
// Generated from /Users/xuxiaoqi/Documents/work/daishu-code/dt-sql-parser/src/grammar/hive/HiveSqlLexer.g4 by ANTLR 4.9.0-SNAPSHOT
// Generated from /Users/hayden/Desktop/dt-works/dt-sql-parser/src/grammar/hive/HiveSqlLexer.g4 by ANTLR 4.9.0-SNAPSHOT
import { ATN } from "antlr4ts/atn/ATN";

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,4 +1,4 @@
// Generated from /Users/xuxiaoqi/Documents/work/daishu-code/dt-sql-parser/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.9.0-SNAPSHOT
// Generated from /Users/hayden/Desktop/dt-works/dt-sql-parser/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.9.0-SNAPSHOT
import { ParseTreeListener } from "antlr4ts/tree/ParseTreeListener";
@ -111,7 +111,6 @@ import { AlterScheduledQueryChangeContext } from "./HiveSqlParser";
import { ScheduleSpecContext } from "./HiveSqlParser";
import { ExecutedAsSpecContext } from "./HiveSqlParser";
import { DefinedAsSpecContext } from "./HiveSqlParser";
import { ShowFunctionIdentifierContext } from "./HiveSqlParser";
import { ShowStmtIdentifierContext } from "./HiveSqlParser";
import { TableCommentContext } from "./HiveSqlParser";
import { CreateTablePartitionSpecContext } from "./HiveSqlParser";
@ -331,8 +330,13 @@ import { TableSampleContext } from "./HiveSqlParser";
import { TableSourceContext } from "./HiveSqlParser";
import { AsOfClauseContext } from "./HiveSqlParser";
import { UniqueJoinTableSourceContext } from "./HiveSqlParser";
import { DbSchemaNameContext } from "./HiveSqlParser";
import { DbSchemaNameCreateContext } from "./HiveSqlParser";
import { TableOrViewContext } from "./HiveSqlParser";
import { TableNameContext } from "./HiveSqlParser";
import { TableNameCreateContext } from "./HiveSqlParser";
import { ViewNameContext } from "./HiveSqlParser";
import { ViewNameCreateContext } from "./HiveSqlParser";
import { SubQuerySourceContext } from "./HiveSqlParser";
import { PartitioningSpecContext } from "./HiveSqlParser";
import { PartitionTableFunctionSourceContext } from "./HiveSqlParser";
@ -389,7 +393,10 @@ import { SortByClauseContext } from "./HiveSqlParser";
import { TrimFunctionContext } from "./HiveSqlParser";
import { Function_Context } from "./HiveSqlParser";
import { Null_treatmentContext } from "./HiveSqlParser";
import { FunctionNameContext } from "./HiveSqlParser";
import { FunctionNameForDDLContext } from "./HiveSqlParser";
import { FunctionNameForInvokeContext } from "./HiveSqlParser";
import { UserDefinedFuncNameContext } from "./HiveSqlParser";
import { FunctionNameCreateContext } from "./HiveSqlParser";
import { CastExpressionContext } from "./HiveSqlParser";
import { CaseExpressionContext } from "./HiveSqlParser";
import { WhenExpressionContext } from "./HiveSqlParser";
@ -458,7 +465,6 @@ import { PartitionSelectorValContext } from "./HiveSqlParser";
import { PartitionSelectorOperatorContext } from "./HiveSqlParser";
import { SubQuerySelectorOperatorContext } from "./HiveSqlParser";
import { SysFuncNamesContext } from "./HiveSqlParser";
import { DescFuncNamesContext } from "./HiveSqlParser";
import { Id_Context } from "./HiveSqlParser";
import { FunctionIdentifierContext } from "./HiveSqlParser";
import { PrincipalIdentifierContext } from "./HiveSqlParser";
@ -1712,17 +1718,6 @@ export interface HiveSqlParserListener extends ParseTreeListener {
*/
exitDefinedAsSpec?: (ctx: DefinedAsSpecContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.showFunctionIdentifier`.
* @param ctx the parse tree
*/
enterShowFunctionIdentifier?: (ctx: ShowFunctionIdentifierContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.showFunctionIdentifier`.
* @param ctx the parse tree
*/
exitShowFunctionIdentifier?: (ctx: ShowFunctionIdentifierContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.showStmtIdentifier`.
* @param ctx the parse tree
@ -4132,6 +4127,39 @@ export interface HiveSqlParserListener extends ParseTreeListener {
*/
exitUniqueJoinTableSource?: (ctx: UniqueJoinTableSourceContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.dbSchemaName`.
* @param ctx the parse tree
*/
enterDbSchemaName?: (ctx: DbSchemaNameContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.dbSchemaName`.
* @param ctx the parse tree
*/
exitDbSchemaName?: (ctx: DbSchemaNameContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.dbSchemaNameCreate`.
* @param ctx the parse tree
*/
enterDbSchemaNameCreate?: (ctx: DbSchemaNameCreateContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.dbSchemaNameCreate`.
* @param ctx the parse tree
*/
exitDbSchemaNameCreate?: (ctx: DbSchemaNameCreateContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.tableOrView`.
* @param ctx the parse tree
*/
enterTableOrView?: (ctx: TableOrViewContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.tableOrView`.
* @param ctx the parse tree
*/
exitTableOrView?: (ctx: TableOrViewContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.tableName`.
* @param ctx the parse tree
@ -4143,6 +4171,17 @@ export interface HiveSqlParserListener extends ParseTreeListener {
*/
exitTableName?: (ctx: TableNameContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.tableNameCreate`.
* @param ctx the parse tree
*/
enterTableNameCreate?: (ctx: TableNameCreateContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.tableNameCreate`.
* @param ctx the parse tree
*/
exitTableNameCreate?: (ctx: TableNameCreateContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.viewName`.
* @param ctx the parse tree
@ -4154,6 +4193,17 @@ export interface HiveSqlParserListener extends ParseTreeListener {
*/
exitViewName?: (ctx: ViewNameContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.viewNameCreate`.
* @param ctx the parse tree
*/
enterViewNameCreate?: (ctx: ViewNameCreateContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.viewNameCreate`.
* @param ctx the parse tree
*/
exitViewNameCreate?: (ctx: ViewNameCreateContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.subQuerySource`.
* @param ctx the parse tree
@ -4771,15 +4821,48 @@ export interface HiveSqlParserListener extends ParseTreeListener {
exitNull_treatment?: (ctx: Null_treatmentContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.functionName`.
* Enter a parse tree produced by `HiveSqlParser.functionNameForDDL`.
* @param ctx the parse tree
*/
enterFunctionName?: (ctx: FunctionNameContext) => void;
enterFunctionNameForDDL?: (ctx: FunctionNameForDDLContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.functionName`.
* Exit a parse tree produced by `HiveSqlParser.functionNameForDDL`.
* @param ctx the parse tree
*/
exitFunctionName?: (ctx: FunctionNameContext) => void;
exitFunctionNameForDDL?: (ctx: FunctionNameForDDLContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.functionNameForInvoke`.
* @param ctx the parse tree
*/
enterFunctionNameForInvoke?: (ctx: FunctionNameForInvokeContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.functionNameForInvoke`.
* @param ctx the parse tree
*/
exitFunctionNameForInvoke?: (ctx: FunctionNameForInvokeContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.userDefinedFuncName`.
* @param ctx the parse tree
*/
enterUserDefinedFuncName?: (ctx: UserDefinedFuncNameContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.userDefinedFuncName`.
* @param ctx the parse tree
*/
exitUserDefinedFuncName?: (ctx: UserDefinedFuncNameContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.functionNameCreate`.
* @param ctx the parse tree
*/
enterFunctionNameCreate?: (ctx: FunctionNameCreateContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.functionNameCreate`.
* @param ctx the parse tree
*/
exitFunctionNameCreate?: (ctx: FunctionNameCreateContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.castExpression`.
@ -5529,17 +5612,6 @@ export interface HiveSqlParserListener extends ParseTreeListener {
*/
exitSysFuncNames?: (ctx: SysFuncNamesContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.descFuncNames`.
* @param ctx the parse tree
*/
enterDescFuncNames?: (ctx: DescFuncNamesContext) => void;
/**
* Exit a parse tree produced by `HiveSqlParser.descFuncNames`.
* @param ctx the parse tree
*/
exitDescFuncNames?: (ctx: DescFuncNamesContext) => void;
/**
* Enter a parse tree produced by `HiveSqlParser.id_`.
* @param ctx the parse tree

View File

@ -1,4 +1,4 @@
// Generated from /Users/xuxiaoqi/Documents/work/daishu-code/dt-sql-parser/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.9.0-SNAPSHOT
// Generated from /Users/hayden/Desktop/dt-works/dt-sql-parser/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.9.0-SNAPSHOT
import { ParseTreeVisitor } from "antlr4ts/tree/ParseTreeVisitor";
@ -111,7 +111,6 @@ import { AlterScheduledQueryChangeContext } from "./HiveSqlParser";
import { ScheduleSpecContext } from "./HiveSqlParser";
import { ExecutedAsSpecContext } from "./HiveSqlParser";
import { DefinedAsSpecContext } from "./HiveSqlParser";
import { ShowFunctionIdentifierContext } from "./HiveSqlParser";
import { ShowStmtIdentifierContext } from "./HiveSqlParser";
import { TableCommentContext } from "./HiveSqlParser";
import { CreateTablePartitionSpecContext } from "./HiveSqlParser";
@ -331,8 +330,13 @@ import { TableSampleContext } from "./HiveSqlParser";
import { TableSourceContext } from "./HiveSqlParser";
import { AsOfClauseContext } from "./HiveSqlParser";
import { UniqueJoinTableSourceContext } from "./HiveSqlParser";
import { DbSchemaNameContext } from "./HiveSqlParser";
import { DbSchemaNameCreateContext } from "./HiveSqlParser";
import { TableOrViewContext } from "./HiveSqlParser";
import { TableNameContext } from "./HiveSqlParser";
import { TableNameCreateContext } from "./HiveSqlParser";
import { ViewNameContext } from "./HiveSqlParser";
import { ViewNameCreateContext } from "./HiveSqlParser";
import { SubQuerySourceContext } from "./HiveSqlParser";
import { PartitioningSpecContext } from "./HiveSqlParser";
import { PartitionTableFunctionSourceContext } from "./HiveSqlParser";
@ -389,7 +393,10 @@ import { SortByClauseContext } from "./HiveSqlParser";
import { TrimFunctionContext } from "./HiveSqlParser";
import { Function_Context } from "./HiveSqlParser";
import { Null_treatmentContext } from "./HiveSqlParser";
import { FunctionNameContext } from "./HiveSqlParser";
import { FunctionNameForDDLContext } from "./HiveSqlParser";
import { FunctionNameForInvokeContext } from "./HiveSqlParser";
import { UserDefinedFuncNameContext } from "./HiveSqlParser";
import { FunctionNameCreateContext } from "./HiveSqlParser";
import { CastExpressionContext } from "./HiveSqlParser";
import { CaseExpressionContext } from "./HiveSqlParser";
import { WhenExpressionContext } from "./HiveSqlParser";
@ -458,7 +465,6 @@ import { PartitionSelectorValContext } from "./HiveSqlParser";
import { PartitionSelectorOperatorContext } from "./HiveSqlParser";
import { SubQuerySelectorOperatorContext } from "./HiveSqlParser";
import { SysFuncNamesContext } from "./HiveSqlParser";
import { DescFuncNamesContext } from "./HiveSqlParser";
import { Id_Context } from "./HiveSqlParser";
import { FunctionIdentifierContext } from "./HiveSqlParser";
import { PrincipalIdentifierContext } from "./HiveSqlParser";
@ -1283,13 +1289,6 @@ export interface HiveSqlParserVisitor<Result> extends ParseTreeVisitor<Result> {
*/
visitDefinedAsSpec?: (ctx: DefinedAsSpecContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.showFunctionIdentifier`.
* @param ctx the parse tree
* @return the visitor result
*/
visitShowFunctionIdentifier?: (ctx: ShowFunctionIdentifierContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.showStmtIdentifier`.
* @param ctx the parse tree
@ -2823,6 +2822,27 @@ export interface HiveSqlParserVisitor<Result> extends ParseTreeVisitor<Result> {
*/
visitUniqueJoinTableSource?: (ctx: UniqueJoinTableSourceContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.dbSchemaName`.
* @param ctx the parse tree
* @return the visitor result
*/
visitDbSchemaName?: (ctx: DbSchemaNameContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.dbSchemaNameCreate`.
* @param ctx the parse tree
* @return the visitor result
*/
visitDbSchemaNameCreate?: (ctx: DbSchemaNameCreateContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.tableOrView`.
* @param ctx the parse tree
* @return the visitor result
*/
visitTableOrView?: (ctx: TableOrViewContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.tableName`.
* @param ctx the parse tree
@ -2830,6 +2850,13 @@ export interface HiveSqlParserVisitor<Result> extends ParseTreeVisitor<Result> {
*/
visitTableName?: (ctx: TableNameContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.tableNameCreate`.
* @param ctx the parse tree
* @return the visitor result
*/
visitTableNameCreate?: (ctx: TableNameCreateContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.viewName`.
* @param ctx the parse tree
@ -2837,6 +2864,13 @@ export interface HiveSqlParserVisitor<Result> extends ParseTreeVisitor<Result> {
*/
visitViewName?: (ctx: ViewNameContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.viewNameCreate`.
* @param ctx the parse tree
* @return the visitor result
*/
visitViewNameCreate?: (ctx: ViewNameCreateContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.subQuerySource`.
* @param ctx the parse tree
@ -3230,11 +3264,32 @@ export interface HiveSqlParserVisitor<Result> extends ParseTreeVisitor<Result> {
visitNull_treatment?: (ctx: Null_treatmentContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.functionName`.
* Visit a parse tree produced by `HiveSqlParser.functionNameForDDL`.
* @param ctx the parse tree
* @return the visitor result
*/
visitFunctionName?: (ctx: FunctionNameContext) => Result;
visitFunctionNameForDDL?: (ctx: FunctionNameForDDLContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.functionNameForInvoke`.
* @param ctx the parse tree
* @return the visitor result
*/
visitFunctionNameForInvoke?: (ctx: FunctionNameForInvokeContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.userDefinedFuncName`.
* @param ctx the parse tree
* @return the visitor result
*/
visitUserDefinedFuncName?: (ctx: UserDefinedFuncNameContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.functionNameCreate`.
* @param ctx the parse tree
* @return the visitor result
*/
visitFunctionNameCreate?: (ctx: FunctionNameCreateContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.castExpression`.
@ -3712,13 +3767,6 @@ export interface HiveSqlParserVisitor<Result> extends ParseTreeVisitor<Result> {
*/
visitSysFuncNames?: (ctx: SysFuncNamesContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.descFuncNames`.
* @param ctx the parse tree
* @return the visitor result
*/
visitDescFuncNames?: (ctx: DescFuncNamesContext) => Result;
/**
* Visit a parse tree produced by `HiveSqlParser.id_`.
* @param ctx the parse tree

View File

@ -17,20 +17,20 @@ export enum SyntaxContextType {
CATALOG = 'catalog',
/** database name path, such as catalog.db */
DATABASE = 'database',
/** database name path will be created */
/** database name path that will be created */
DATABASE_CREATE = 'databaseCreate',
/** table name path, such as catalog.db.tb */
TABLE = 'table',
/** table name path will be created */
/** table name path that will be created */
TABLE_CREATE = 'tableCreate',
/** view name */
/** view name path, such as db.tb.view */
VIEW = 'view',
/** view name path that will be created */
VIEW_CREATE = 'viewCreate',
/** function name */
FUNCTION = 'function',
/** principal name */
PRINCIPAL = 'principal',
/** hint arg name */
HTNTARG = 'hintArg',
/** function name that will be created */
FUNCTION_CREATE = 'functionCreate',
}
export interface WordRange {

View File

@ -18,11 +18,15 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
}
protected preferredRules: Set<number> = new Set([
HiveSqlParser.RULE_dbSchemaName, // db or schema name
HiveSqlParser.RULE_dbSchemaNameCreate, // db or schema name that will be created
HiveSqlParser.RULE_tableName, // table name
HiveSqlParser.RULE_tableNameCreate, // table name that will be created
HiveSqlParser.RULE_viewName, // view name
HiveSqlParser.RULE_functionIdentifier, // function name
HiveSqlParser.RULE_principalIdentifier, // USER/ROLE/GROUP name
HiveSqlParser.RULE_hintArgName, // hint name
HiveSqlParser.RULE_viewNameCreate, // view name that will be created
HiveSqlParser.RULE_userDefinedFuncName, // function name
HiveSqlParser.RULE_functionNameCreate, // function name that will be created
]);
protected get splitListener () {
@ -37,7 +41,6 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
): Suggestions<Token> {
const originalSyntaxSuggestions: SyntaxSuggestion<Token>[] = [];
const keywords: string[] = [];
for (let candidate of candidates.rules) {
const [ruleType, candidateRule] = candidate;
const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset;
@ -45,24 +48,36 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
let syntaxContextType: SyntaxContextType;
switch (ruleType) {
case HiveSqlParser.RULE_dbSchemaName: {
syntaxContextType = SyntaxContextType.DATABASE;
break;
}
case HiveSqlParser.RULE_dbSchemaNameCreate: {
syntaxContextType = SyntaxContextType.DATABASE_CREATE;
break;
}
case HiveSqlParser.RULE_tableName: {
syntaxContextType = SyntaxContextType.TABLE;
break;
}
case HiveSqlParser.RULE_tableNameCreate: {
syntaxContextType = SyntaxContextType.TABLE_CREATE
break;
}
case HiveSqlParser.RULE_viewName: {
syntaxContextType = SyntaxContextType.VIEW;
break;
}
case HiveSqlParser.RULE_functionIdentifier: {
case HiveSqlParser.RULE_viewNameCreate: {
syntaxContextType = SyntaxContextType.VIEW_CREATE;
break;
}
case HiveSqlParser.RULE_userDefinedFuncName: {
syntaxContextType = SyntaxContextType.FUNCTION;
break;
}
case HiveSqlParser.RULE_principalIdentifier: {
syntaxContextType = SyntaxContextType.PRINCIPAL;
break;
}
case HiveSqlParser.RULE_hintArgName: {
syntaxContextType = SyntaxContextType.HTNTARG;
case HiveSqlParser.RULE_functionNameCreate: {
syntaxContextType = SyntaxContextType.FUNCTION_CREATE;
break;
}
default:
@ -86,15 +101,15 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
}
}
return {
syntax: [],
keywords: [],
syntax: originalSyntaxSuggestions,
keywords,
};
}
}
export class HiveSqlSplitListener implements HiveSqlParserListener {
private _statementContext: StatementContext[] = [];
exitStatement = (ctx: StatementContext) => {
this._statementContext.push(ctx);
}

View File

@ -20,10 +20,10 @@ describe('Flink SQL Syntax Suggestion', () => {
lineNumber: 1,
column: 22
}
const suggestion = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax?.[0];
expect(suggestion?.syntaxContextType === SyntaxContextType.TABLE)
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat', '.', 'db', '.', 'tb' ])
})
@ -33,9 +33,10 @@ describe('Flink SQL Syntax Suggestion', () => {
lineNumber: 3,
column: 21
}
const suggestion = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax?.[0];
expect(suggestion?.syntaxContextType === SyntaxContextType.TABLE)
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat', '.', 'db' ])
})
@ -45,9 +46,10 @@ describe('Flink SQL Syntax Suggestion', () => {
lineNumber: 5,
column: 20
}
const suggestion = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax?.[0];
expect(suggestion?.syntaxContextType === SyntaxContextType.TABLE_CREATE)
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat', '.', 'db' ])
})
@ -57,9 +59,10 @@ describe('Flink SQL Syntax Suggestion', () => {
lineNumber: 7,
column: 21
}
const suggestion = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax?.[0];
expect(suggestion?.syntaxContextType === SyntaxContextType.TABLE)
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat' ])
})
@ -69,9 +72,10 @@ describe('Flink SQL Syntax Suggestion', () => {
lineNumber: 9,
column: 20
}
const suggestion = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax?.[0];
expect(suggestion?.syntaxContextType === SyntaxContextType.DATABASE)
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat', '.' ])
})
@ -81,9 +85,10 @@ describe('Flink SQL Syntax Suggestion', () => {
lineNumber: 9,
column: 20
}
const suggestion = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax?.[0];
expect(suggestion?.syntaxContextType === SyntaxContextType.DATABASE)
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat', '.' ]);
})
@ -93,9 +98,10 @@ describe('Flink SQL Syntax Suggestion', () => {
lineNumber: 19,
column: 10,
}
const suggestion = parser.getSuggestionAtCaretPosition(multipleSql, pos)?.syntax?.[0];
console.log(suggestion);
expect(suggestion?.syntaxContextType === SyntaxContextType.DATABASE);
const syntaxes = parser.getSuggestionAtCaretPosition(multipleSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat1', '.' ]);
})

View File

@ -5,7 +5,7 @@ import FlinkSQL from '../../../../src/parser/flinksql'
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
describe('Flink SQL Syntax Suggestion', () => {
describe('Flink SQL Token Suggestion', () => {
const parser = new FlinkSQL();
test('Use Statement ', () => {

View File

@ -0,0 +1,19 @@
INSERT INTO db.tb ;
SELECT * FROM db.;
CREATE TABLE db. VALUES;
DROP TABLE IF EXISTS db.a;
CREATE OR REPLACE VIEW db.v;
DROP VIEW db.v ;
CREATE FUNCTION fn1;
SELECT name, calculate_age(birthdate) AS age FROM students;
CREATE DATABASE db;
DROP SCHEMA IF EXISTS sch;

View File

@ -0,0 +1,20 @@
ALTER
;
CREATE
;
DELETE
;
DESCRIBE
;
DROP
;
EXPORT
;
IMPORT
;
INSERT
;
LOAD
;
SHOW
;

View File

@ -0,0 +1,147 @@
import fs from 'fs';
import path from 'path';
import { CaretPosition, SyntaxContextType } from '../../../../src/parser/common/basic-parser-types';
import HiveSQL from '../../../../src/parser/hive'
const syntaxSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'), 'utf-8');
describe('Hive SQL Syntax Suggestion', () => {
const parser = new HiveSQL();
test('Validate Syntax SQL', () => {
expect(parser.validate(syntaxSql).length).not.toBe(0);
expect(parser.validate(syntaxSql).length).not.toBe(0);
expect(parser.validate(syntaxSql).length).not.toBe(0);
});
test('Insert table ', () => {
const pos: CaretPosition = {
lineNumber: 1,
column: 18
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.', 'tb' ])
});
test('Select table ', () => {
const pos: CaretPosition = {
lineNumber: 3,
column: 18
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.' ])
});
test('Create table ', () => {
const pos: CaretPosition = {
lineNumber: 5,
column: 17
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.' ])
});
test('DROP table ', () => {
const pos: CaretPosition = {
lineNumber: 7,
column: 26
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.', 'a' ])
});
test('Create view ', () => {
const pos: CaretPosition = {
lineNumber: 9,
column: 28
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.', 'v' ])
});
test('Drop view ', () => {
const pos: CaretPosition = {
lineNumber: 11,
column: 15
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.', 'v' ])
});
test('Create function ', () => {
const pos: CaretPosition = {
lineNumber: 13,
column: 20
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'fn1' ])
});
test('Use function', () => {
const pos: CaretPosition = {
lineNumber: 15,
column: 27
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.FUNCTION);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'calculate_age' ])
});
test('Create database', () => {
const pos: CaretPosition = {
lineNumber: 17,
column: 19
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db' ])
});
test('Drop database', () => {
const pos: CaretPosition = {
lineNumber: 19,
column: 26
}
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'sch' ])
});
})

View File

@ -0,0 +1,232 @@
import fs from "fs";
import path from "path";
import { CaretPosition } from "../../../../src/parser/common/basic-parser-types";
import HiveSQL from "../../../../src/parser/hive";
const tokenSql = fs.readFileSync(
path.join(__dirname, "fixtures", "tokenSuggestion.sql"),
"utf-8"
);
describe("Hive SQL Syntax Suggestion", () => {
const parser = new HiveSQL();
test("After ALTER", () => {
const pos: CaretPosition = {
lineNumber: 1,
column: 7,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([
"APPLICATION",
"GROUP",
"USER",
"POOL",
"TRIGGER",
"RESOURCE",
"SCHEDULED",
"INDEX",
"CONNECTOR",
"DATABASE",
"SCHEMA",
"MATERIALIZED",
"VIEW",
"TABLE",
]);
});
test("After CREATE", () => {
const pos: CaretPosition = {
lineNumber: 3,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([
"CONNECTOR",
"APPLICATION",
"GROUP",
"USER",
"POOL",
"TRIGGER",
"RESOURCE",
"ROLE",
"INDEX",
"TEMPORARY",
"FUNCTION",
"SCHEDULED",
"MATERIALIZED",
"VIEW",
"OR",
"MANAGED",
"TABLE",
"EXTERNAL",
"TRANSACTIONAL",
"REMOTE",
"DATABASE",
"SCHEMA",
]);
});
test("After DELETE", () => {
const pos: CaretPosition = {
lineNumber: 5,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual(['FROM']);
});
test("After DESCRIBE", () => {
const pos: CaretPosition = {
lineNumber: 7,
column: 10,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([
"EXTENDED",
"FORMATTED",
"FUNCTION",
"CONNECTOR",
"DATABASE",
"SCHEMA",
]);
});
test("After DROP", () => {
const pos: CaretPosition = {
lineNumber: 9,
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([
"CONNECTOR",
"APPLICATION",
"GROUP",
"USER",
"POOL",
"TRIGGER",
"RESOURCE",
"ROLE",
"INDEX",
"TEMPORARY",
"FUNCTION",
"MATERIALIZED",
"VIEW",
"SCHEDULED",
"TABLE",
"DATABASE",
"SCHEMA",
]);
});
test("After EXPORT", () => {
const pos: CaretPosition = {
lineNumber: 11,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual(['TABLE']);
});
test("After IMPORT", () => {
const pos: CaretPosition = {
lineNumber: 13,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([
"FROM",
"TABLE",
"EXTERNAL",
]);
});
test("After INSERT", () => {
const pos: CaretPosition = {
lineNumber: 15,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([
"INTO",
"OVERWRITE",
]);
});
test("After LOAD", () => {
const pos: CaretPosition = {
lineNumber: 17,
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual(["DATA"
]);
});
test("After SHOW", () => {
const pos: CaretPosition = {
lineNumber: 19,
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([
"CURRENT",
"ROLES",
"PRINCIPALS",
"ROLE",
"GRANT",
"INDEX",
"INDEXES",
"FORMATTED",
"CONNECTORS",
"RESOURCE",
"CONF",
"TRANSACTIONS",
"COMPACTIONS",
"LOCKS",
"TBLPROPERTIES",
"TABLE",
"CREATE",
"PARTITIONS",
"FUNCTIONS",
"COLUMNS",
"SORTED",
"MATERIALIZED",
"VIEWS",
"TABLES",
"EXTENDED",
"DATABASES",
"SCHEMAS",
]);
});
});