test: hive complete data types' check and unit tests (#160)
* feat: add Authorization SQL and update syntax file * test: hive complete data types' check and unit tests * feat: hive add syntax complete automaticlly * feat: update hive's syntax complete feat:update hive's syntax complete --------- Co-authored-by: zhaoge <>
This commit is contained in:
@ -22,7 +22,15 @@ export enum SyntaxContextType {
|
||||
/** table name path, such as catalog.db.tb */
|
||||
TABLE = 'table',
|
||||
/** table name path will be created */
|
||||
TABLE_CREATE = 'tableCreate'
|
||||
TABLE_CREATE = 'tableCreate',
|
||||
/** view name */
|
||||
VIEW = 'view',
|
||||
/** function name */
|
||||
FUNCTION = 'function',
|
||||
/** principal name */
|
||||
PRINCIPAL = 'principal',
|
||||
/** hint arg name */
|
||||
HTNTARG = 'hintArg',
|
||||
}
|
||||
|
||||
export interface WordRange {
|
||||
|
@ -1,9 +1,10 @@
|
||||
import { Token } from 'antlr4ts';
|
||||
import { CandidatesCollection } from 'antlr4-c3';
|
||||
import { HiveSqlLexer } from '../lib/hive/HiveSqlLexer';
|
||||
import { HiveSqlParser, ProgramContext } from '../lib/hive/HiveSqlParser';
|
||||
import { HiveSqlParser, ProgramContext, StatementContext, ExplainStatementContext, ExecStatementContext } from '../lib/hive/HiveSqlParser';
|
||||
import BasicParser from './common/basicParser';
|
||||
import { Suggestions } from './common/basic-parser-types';
|
||||
import { HiveSqlParserListener } from '../lib/hive/HiveSqlParserListener';
|
||||
import { SyntaxContextType, Suggestions, SyntaxSuggestion } from './common/basic-parser-types';
|
||||
|
||||
|
||||
export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, HiveSqlParser> {
|
||||
@ -16,21 +17,93 @@ export default class HiveSQL extends BasicParser<HiveSqlLexer, ProgramContext, H
|
||||
return new HiveSqlParser(tokenStream);
|
||||
}
|
||||
|
||||
protected preferredRules: Set<number> = new Set([
|
||||
HiveSqlParser.RULE_tableName, // table name
|
||||
HiveSqlParser.RULE_viewName, // view name
|
||||
HiveSqlParser.RULE_functionIdentifier, // function name
|
||||
HiveSqlParser.RULE_principalIdentifier, // USER/ROLE/GROUP name
|
||||
HiveSqlParser.RULE_hintArgName, // hint name
|
||||
]);
|
||||
|
||||
protected get splitListener () {
|
||||
return null as any;
|
||||
return new HiveSqlSplitListener();
|
||||
}
|
||||
|
||||
protected preferredRules: Set<number> = new Set();
|
||||
|
||||
protected processCandidates(
|
||||
candidates: CandidatesCollection,
|
||||
allTokens: Token[],
|
||||
caretTokenIndex: number
|
||||
candidates: CandidatesCollection,
|
||||
allTokens: Token[],
|
||||
caretTokenIndex: number,
|
||||
tokenIndexOffset: number,
|
||||
): Suggestions<Token> {
|
||||
const originalSyntaxSuggestions: SyntaxSuggestion<Token>[] = [];
|
||||
const keywords: string[] = [];
|
||||
|
||||
for (let candidate of candidates.rules) {
|
||||
const [ruleType, candidateRule] = candidate;
|
||||
const startTokenIndex = candidateRule.startTokenIndex + tokenIndexOffset;
|
||||
const tokenRanges = allTokens.slice(startTokenIndex, caretTokenIndex + tokenIndexOffset + 1);
|
||||
|
||||
let syntaxContextType: SyntaxContextType;
|
||||
switch (ruleType) {
|
||||
case HiveSqlParser.RULE_tableName: {
|
||||
syntaxContextType = SyntaxContextType.TABLE;
|
||||
break;
|
||||
}
|
||||
case HiveSqlParser.RULE_viewName: {
|
||||
syntaxContextType = SyntaxContextType.VIEW;
|
||||
break;
|
||||
}
|
||||
case HiveSqlParser.RULE_functionIdentifier: {
|
||||
syntaxContextType = SyntaxContextType.FUNCTION;
|
||||
break;
|
||||
}
|
||||
case HiveSqlParser.RULE_principalIdentifier: {
|
||||
syntaxContextType = SyntaxContextType.PRINCIPAL;
|
||||
break;
|
||||
}
|
||||
case HiveSqlParser.RULE_hintArgName: {
|
||||
syntaxContextType = SyntaxContextType.HTNTARG;
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
if (syntaxContextType) {
|
||||
originalSyntaxSuggestions.push({
|
||||
syntaxContextType,
|
||||
wordRanges: tokenRanges,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
for (let candidate of candidates.tokens) {
|
||||
const symbolicName = this._parser.vocabulary.getSymbolicName(candidate[0]);
|
||||
const displayName = this._parser.vocabulary.getDisplayName(candidate[0]);
|
||||
if (symbolicName && symbolicName.startsWith('KW_')) {
|
||||
const keyword = displayName.startsWith("'") && displayName.endsWith("'") ? displayName.slice(1, -1) : displayName;
|
||||
keywords.push(keyword);
|
||||
}
|
||||
}
|
||||
return {
|
||||
syntax: [],
|
||||
keywords: []
|
||||
}
|
||||
keywords: [],
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export class HiveSqlSplitListener implements HiveSqlParserListener {
|
||||
private _statementContext: StatementContext[] = [];
|
||||
|
||||
exitStatement = (ctx: StatementContext) => {
|
||||
this._statementContext.push(ctx);
|
||||
}
|
||||
|
||||
enterStatement = (ctx: StatementContext) => {
|
||||
};
|
||||
|
||||
get statementsContext() {
|
||||
return this._statementContext;
|
||||
}
|
||||
}
|
||||
|
||||
|
Reference in New Issue
Block a user