add sparkSql base grammar

This commit is contained in:
xigua 2020-10-28 21:34:13 +08:00
parent 654d96028c
commit bfe055be71
6 changed files with 1945 additions and 0 deletions

1836
src/grammar/spark/SqlBase.g4 Normal file

File diff suppressed because it is too large Load Diff

19
src/parser/spark.ts Normal file
View File

@ -0,0 +1,19 @@
import { InputStream, CommonTokenStream, Lexer } from 'antlr4';
import { SqlBaseLexer } from '../lib/spark/SqlBaseLexer';
import { SqlBaseParser } from '../lib/spark/SqlBaseParser';
export * from '../lib/spark/SqlBaseVisitor';
export * from '../lib/spark/SqlBaseListener';
import BasicParser from './common/BasicParser';
export default class SparkSQL extends BasicParser {
public createLexer(input: string): Lexer {
const chars = new InputStream(input.toUpperCase()); // Some Lexer only support uppercase token, So you need transform
const lexer = <unknown> new SqlBaseLexer(chars) as Lexer;
return lexer;
}
public createParserFromLexer(lexer: Lexer) {
const tokenStream = new CommonTokenStream(lexer);
return new SqlBaseParser(tokenStream);
}
}

View File

@ -0,0 +1,14 @@
import SQLParser from '../../../src/parser/spark';
describe('SparkSQL Lexer tests', () => {
const mysqlParser = new SQLParser();
// const sql = 'select id,name,sex from user1;';
const sql = 'select * from person where age >= 20 order by age desc limit 2;';
const tokens = mysqlParser.getAllTokens(sql);
console.log('tokens', tokens);
test('token counts', () => {
expect(tokens.length).toBe(28);
});
});

View File

@ -0,0 +1,22 @@
import SQLParser, { SqlBaseListener } from '../../../src/parser/spark';
describe('Spark SQL Listener Tests', () => {
const expectTableName = 'user1';
const sql = `select id,name,sex from ${expectTableName};`;
const parser = new SQLParser();
const parserTree = parser.parse(sql);
test('Listener enterTableName', async () => {
let result = '';
class MyListener extends SqlBaseListener {
enterTableName(ctx): void {
result = ctx.getText().toLowerCase();
}
}
const listenTableName: any = new MyListener();
await parser.listen(listenTableName, parserTree);
expect(result).toBe(expectTableName);
});
});

View File

@ -0,0 +1,27 @@
import SQLParser from '../../../src/parser/spark';
const log = console.log.bind(console);
describe('Spark SQL Syntax Tests', () => {
const parser = new SQLParser();
test('Select Statement', () => {
const sql = 'select id,name from user1;';
const result = parser.validate(sql);
log('result', result);
expect(result.length).toBe(0);
});
test('Select 1+1', () => {
const sql = 'SELECT 1+1;';
const result = parser.validate(sql);
expect(result.length).toBe(0);
});
test('select', () => {
const sql = 'select * from person where age >= 20 order by age desc limit 2';
const result = parser.validate(sql);
log('result', result);
expect(result.length).toBe(0);
});
});

View File

@ -0,0 +1,27 @@
import SQLParser, { SqlBaseVisitor } from '../../../src/parser/spark';
describe('Spark SQL Visitor Tests', () => {
const expectTableName = 'user1';
const sql = `select id,name,sex from ${expectTableName};`;
const parser = new SQLParser();
const parserTree = parser.parse(sql, (error) => {
console.log('Parse error:', error);
});
console.log('Parser tree string:', parser.toString(parserTree));
test('Visitor visitTableName', () => {
let result = '';
class MyVisitor extends SqlBaseVisitor {
visitTableName(ctx): void {
result = ctx.getText().toLowerCase();
super.visitTableName(ctx);
}
}
const visitor: any = new MyVisitor();
visitor.visit(parserTree);
expect(result).toBe(expectTableName);
});
});