refactor(spark): extract spark export into src index
This commit is contained in:
		| @ -8,3 +8,6 @@ export * from './lib/hive/HiveSqlListener'; | ||||
| export * from './lib/hive/HiveSqlVisitor'; | ||||
| export * from './lib/plsql/PlSqlParserListener'; | ||||
| export * from './lib/plsql/PlSqlParserVisitor'; | ||||
| export * from './lib/spark/SparkSqlVisitor'; | ||||
| export * from './lib/spark/SparkSqlListener'; | ||||
|  | ||||
|  | ||||
| @ -1,9 +1,6 @@ | ||||
| import { InputStream, CommonTokenStream, Lexer } from 'antlr4'; | ||||
| import { SparkSqlLexer } from '../lib/spark/SparkSqlLexer'; | ||||
| import { SparkSqlParser } from '../lib/spark/SparkSqlParser'; | ||||
| export * from '../lib/spark/SparkSqlVisitor'; | ||||
| export * from '../lib/spark/SparkSqlListener'; | ||||
|  | ||||
| import BasicParser from './common/basicParser'; | ||||
|  | ||||
| export default class SparkSQL extends BasicParser { | ||||
|  | ||||
| @ -1,9 +1,9 @@ | ||||
| import SQLParser from '../../../src/parser/spark'; | ||||
| import { SparkSQL } from '../../../src'; | ||||
|  | ||||
| const log = console.log.bind(console); | ||||
|  | ||||
| describe('SparkSQL Lexer tests', () => { | ||||
|     const parser = new SQLParser(); | ||||
|     const parser = new SparkSQL(); | ||||
|  | ||||
|     test('select id,name from user1;', () => { | ||||
|         const sql = `select id,name from user1;`; | ||||
|  | ||||
| @ -1,9 +1,9 @@ | ||||
| import SQLParser, { SparkSqlListener } from '../../../src/parser/spark'; | ||||
| import { SparkSQL, SparkSqlListener } from '../../../src'; | ||||
|  | ||||
| describe('Spark SQL Listener Tests', () => { | ||||
|     const expectTableName = 'user1'; | ||||
|     const sql = `select id,name,sex from ${expectTableName};`; | ||||
|     const parser = new SQLParser(); | ||||
|     const parser = new SparkSQL(); | ||||
|  | ||||
|     const parserTree = parser.parse(sql); | ||||
|  | ||||
|  | ||||
| @ -1,10 +1,9 @@ | ||||
| /* eslint-disable max-len */ | ||||
| import SQLParser from '../../../src/parser/spark'; | ||||
| import { SparkSQL } from '../../../src'; | ||||
|  | ||||
| const error = console.log.bind(console, '***** error\n'); | ||||
|  | ||||
| const validateTest = (sqls) => { | ||||
|     const parser = new SQLParser(); | ||||
|     const parser = new SparkSQL(); | ||||
|     sqls.forEach((sql, i) => { | ||||
|         const result = parser.validate(sql); | ||||
|         if (result.length !== 0) { | ||||
|  | ||||
| @ -1,9 +1,9 @@ | ||||
| import SQLParser, { SparkSqlVisitor } from '../../../src/parser/spark'; | ||||
| import { SparkSQL, SparkSqlVisitor } from '../../../src'; | ||||
|  | ||||
| describe('Spark SQL Visitor Tests', () => { | ||||
|     const expectTableName = 'user1'; | ||||
|     const sql = `select id,name,sex from ${expectTableName};`; | ||||
|     const parser = new SQLParser(); | ||||
|     const parser = new SparkSQL(); | ||||
|  | ||||
|     const parserTree = parser.parse(sql, (error) => { | ||||
|         console.log('Parse error:', error); | ||||
|  | ||||
		Reference in New Issue
	
	Block a user