feat: deprecate legacy APIs (#304)

* feat: deprecation of legacy APIs

* feat: deprecate plsql language
This commit is contained in:
Hayden 2024-04-28 11:41:13 +08:00 committed by GitHub
parent a5387e4729
commit f1c3bbe17c
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
23 changed files with 1 additions and 248251 deletions

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,13 +1,4 @@
export { export { MySQL, FlinkSQL, SparkSQL, HiveSQL, PostgreSQL, TrinoSQL, ImpalaSQL } from './parser';
MySQL,
FlinkSQL,
SparkSQL,
HiveSQL,
PostgreSQL,
TrinoSQL,
ImpalaSQL,
PLSQL,
} from './parser';
export { export {
MySqlParserListener, MySqlParserListener,
@ -18,8 +9,6 @@ export {
SparkSqlParserVisitor, SparkSqlParserVisitor,
HiveSqlParserListener, HiveSqlParserListener,
HiveSqlParserVisitor, HiveSqlParserVisitor,
PlSqlParserListener,
PlSqlParserVisitor,
PostgreSqlParserListener, PostgreSqlParserListener,
PostgreSqlParserVisitor, PostgreSqlParserVisitor,
TrinoSqlListener, TrinoSqlListener,
@ -30,14 +19,6 @@ export {
export { EntityContextType } from './parser/common/types'; export { EntityContextType } from './parser/common/types';
export {
/**
* @deprecated SyntaxContextType has been renamed to {@link EntityContextType},
* It will be removed when the stable version is released.
*/
EntityContextType as SyntaxContextType,
} from './parser/common/types';
export { StmtContextType } from './parser/common/entityCollector'; export { StmtContextType } from './parser/common/entityCollector';
export type { CaretPosition, Suggestions, SyntaxSuggestion } from './parser/common/types'; export type { CaretPosition, Suggestions, SyntaxSuggestion } from './parser/common/types';
@ -47,8 +28,3 @@ export type { WordRange, TextSlice } from './parser/common/textAndWord';
export type { SyntaxError, ParseError, ErrorListener } from './parser/common/parseErrorListener'; export type { SyntaxError, ParseError, ErrorListener } from './parser/common/parseErrorListener';
export type { StmtContext, EntityContext } from './parser/common/entityCollector'; export type { StmtContext, EntityContext } from './parser/common/entityCollector';
/**
* @deprecated Legacy utils will be removed when the stable version is released.
*/
export * from './utils';

View File

@ -7,9 +7,6 @@ export { MySqlParserVisitor } from './mysql/MySqlParserVisitor';
export { HiveSqlParserListener } from './hive/HiveSqlParserListener'; export { HiveSqlParserListener } from './hive/HiveSqlParserListener';
export { HiveSqlParserVisitor } from './hive/HiveSqlParserVisitor'; export { HiveSqlParserVisitor } from './hive/HiveSqlParserVisitor';
export { PlSqlParserListener } from './plsql/PlSqlParserListener';
export { PlSqlParserVisitor } from './plsql/PlSqlParserVisitor';
export { SparkSqlParserListener } from './spark/SparkSqlParserListener'; export { SparkSqlParserListener } from './spark/SparkSqlParserListener';
export { SparkSqlParserVisitor } from './spark/SparkSqlParserVisitor'; export { SparkSqlParserVisitor } from './spark/SparkSqlParserVisitor';

View File

@ -1,8 +0,0 @@
import { Lexer } from "antlr4ng";
export abstract class PlSqlBaseLexer extends Lexer {
IsNewlineAtPos(pos: number): boolean {
const la = this._input.LA(pos);
return la == -1 || String.fromCharCode(la) == '\n';
}
}

View File

@ -1,29 +0,0 @@
import { TokenStream } from "antlr4ng";
import {SQLParserBase} from "../SQLParserBase";
export abstract class PlSqlBaseParser extends SQLParserBase {
private _isVersion10: boolean = false;
private _isVersion12: boolean = true;
constructor(input: TokenStream) {
super(input);
this._isVersion10 = false;
this._isVersion12 = true;
}
isVersion10(): boolean {
return this._isVersion10;
}
isVersion12(): boolean {
return this._isVersion12;
}
setVersion10(value: boolean): void {
this._isVersion10 = value;
}
setVersion12(value: boolean): void {
this._isVersion12 = value;
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,5 +1,4 @@
export { MySQL } from './mysql'; export { MySQL } from './mysql';
export { PLSQL } from './plsql';
export { HiveSQL } from './hive'; export { HiveSQL } from './hive';
export { FlinkSQL } from './flink'; export { FlinkSQL } from './flink';
export { SparkSQL } from './spark'; export { SparkSQL } from './spark';

View File

@ -1,37 +0,0 @@
import { CharStream, CommonTokenStream, Token } from 'antlr4ng';
import { CandidatesCollection } from 'antlr4-c3';
import { PlSqlLexer } from '../lib/plsql/PlSqlLexer';
import { PlSqlParser, ProgramContext } from '../lib/plsql/PlSqlParser';
import { BasicSQL } from './common/basicSQL';
import { Suggestions } from './common/types';
export class PLSQL extends BasicSQL<PlSqlLexer, ProgramContext, PlSqlParser> {
protected createLexerFromCharStream(charStreams: CharStream) {
return new PlSqlLexer(charStreams);
}
protected createParserFromTokenStream(tokenStream: CommonTokenStream) {
return new PlSqlParser(tokenStream);
}
protected preferredRules: Set<number> = new Set();
protected get splitListener() {
return null as any;
}
protected createEntityCollector(input: string, caretTokenIndex?: number) {
return null as any;
}
protected processCandidates(
candidates: CandidatesCollection,
allTokens: Token[],
caretTokenIndex: number
): Suggestions<Token> {
return {
syntax: [],
keywords: [],
};
}
}

View File

@ -1,202 +0,0 @@
import { Legacy_TokenType, Legacy_Token, Legacy_TokenReg } from './token';
/**
* @param {String} sql
* @deprecated use parser.createLexer() instead.
*/
function legacy_lexer(input: string): Legacy_Token[] {
let current = 0;
let line = 1;
const tokens: Legacy_Token[] = [];
const extract = (
currentChar: string,
validator: RegExp,
TokenType: Legacy_TokenType
): Legacy_Token => {
let value = '';
const start = current;
while (validator.test(currentChar)) {
value += currentChar;
currentChar = input[++current];
}
return {
type: TokenType,
start: start,
end: current,
lineNumber: line,
value: value,
};
};
const matchFunction = () => {
const bracketNum = [current];
for (let i = current + 1; i < input.length; i++) {
const currentChar = input[i];
if (currentChar === '\n') {
line++;
}
if (Legacy_TokenReg.LeftSmallBracket.test(currentChar)) {
bracketNum.push(i);
}
if (Legacy_TokenReg.RightSmallBracket.test(currentChar)) {
const start = bracketNum.pop();
const end = i + 1;
if (bracketNum.length === 0) {
current = end;
tokens.push({
type: Legacy_TokenType.FunctionArguments,
value: input.slice(start, end),
start,
lineNumber: line,
end,
});
return;
}
}
}
};
const matchQuotation = (
currentChar: string,
validator: RegExp,
TokenType: Legacy_TokenType
) => {
do {
if (currentChar === '\n') {
line++;
}
currentChar = input[++current];
} while (!validator.test(currentChar));
++current;
};
while (current < input.length) {
let char = input[current];
if (char === '\n') {
line++;
current++;
continue;
}
if (Legacy_TokenReg.LeftSmallBracket.test(char)) {
matchFunction();
continue;
}
if (Legacy_TokenReg.BackQuotation.test(char)) {
matchQuotation(char, Legacy_TokenReg.BackQuotation, Legacy_TokenType.BackQuotation);
continue;
}
if (Legacy_TokenReg.SingleQuotation.test(char)) {
matchQuotation(char, Legacy_TokenReg.SingleQuotation, Legacy_TokenType.SingleQuotation);
continue;
}
if (Legacy_TokenReg.DoubleQuotation.test(char)) {
matchQuotation(char, Legacy_TokenReg.DoubleQuotation, Legacy_TokenType.DoubleQuotation);
continue;
}
if (char === '-' && input[current + 1] === '-') {
let value = '';
const start = current;
while (char !== '\n' && current < input.length) {
value += char;
char = input[++current];
}
tokens.push({
type: Legacy_TokenType.Comment,
value,
start: start,
lineNumber: line,
end: current,
});
continue;
}
if (char === '/' && input[current + 1] === '*') {
let value = '';
const start = current;
const startLine = line;
while (!(char === '/' && input[current - 1] === '*')) {
if (char === '\n') {
line++;
}
value += char;
char = input[++current];
}
value += char;
++current;
tokens.push({
type: Legacy_TokenType.Comment,
value,
start: start,
lineNumber: startLine,
end: current,
});
continue;
}
if (Legacy_TokenReg.StatementTerminator.test(char)) {
const newToken = extract(
char,
Legacy_TokenReg.StatementTerminator,
Legacy_TokenType.StatementTerminator
);
tokens.push(newToken);
continue;
}
current++;
}
return tokens;
}
/**
* split sql
* @param {String} sql
* @deprecated use parser.splitSQLByStatement() instead.
*/
function legacy_splitSql(sql: string) {
const tokens = legacy_lexer(sql);
const sqlArr = [];
let startIndex = 0;
tokens.forEach((ele: Legacy_Token) => {
if (ele.type === Legacy_TokenType.StatementTerminator) {
sqlArr.push(sql.slice(startIndex, ele.end));
startIndex = ele.end;
}
});
if (startIndex < sql.length) {
sqlArr.push(sql.slice(startIndex));
}
return sqlArr;
}
/**
* clean comment
* @param {String} sql
* @deprecated will be removed in future.
*/
function legacy_cleanSql(sql: string) {
sql = sql.trim();
const tokens = legacy_lexer(sql);
let resultSql = '';
let startIndex = 0;
tokens.forEach((ele: Legacy_Token) => {
if (ele.type === Legacy_TokenType.Comment) {
resultSql += sql.slice(startIndex, ele.start);
startIndex = ele.end + 1;
}
});
resultSql += sql.slice(startIndex);
return resultSql;
}
export { legacy_cleanSql, legacy_splitSql, legacy_lexer };

View File

@ -1,62 +0,0 @@
/**
* @deprecated will be removed in future.
*/
export enum Legacy_TokenType {
/**
* Enclosed in single/double/back quotation, `` Symbol
* 'abc', "abc", `abc`
*/
SingleQuotation = 'SingleQuotation',
DoubleQuotation = 'DoubleQuotation',
BackQuotation = 'BackQuotation',
/**
* Language element type
*/
Comment = 'Comment',
/**
* Statement
*/
StatementTerminator = 'StatementTerminator',
/**
* Others
*/
Error = 'Error',
/**
* Left small Bracket
*/
LeftSmallBracket = 'LeftSmallBracket',
/**
* Left small Bracket
*/
RightSmallBracket = 'RightSmallBracket',
Comma = 'Comma',
FunctionArguments = 'FunctionArguments',
}
/**
* @deprecated will be removed in future.
* Token object
*/
export interface Legacy_Token {
type: Legacy_TokenType;
value: string;
start?: number;
end: number;
lineNumber: number;
message?: string;
}
/**
* @deprecated will be removed in future.
* Token recognition rules
*/
export const Legacy_TokenReg = {
[Legacy_TokenType.StatementTerminator]: /[;]/,
[Legacy_TokenType.SingleQuotation]: /['|\']/,
[Legacy_TokenType.DoubleQuotation]: /["]/,
[Legacy_TokenType.BackQuotation]: /[`]/,
[Legacy_TokenType.LeftSmallBracket]: /[(]/,
[Legacy_TokenType.RightSmallBracket]: /[)]/,
[Legacy_TokenType.Comma]: /[,]/,
};

View File

@ -1,12 +0,0 @@
import { PLSQL } from 'src/parser/plsql';
describe('PLSQL Lexer tests', () => {
const plsql = new PLSQL();
const sql = 'select id,name,sex from user1;';
const tokens = plsql.getAllTokens(sql);
test('token counts', () => {
expect(tokens.length).toBe(12);
});
});

View File

@ -1,24 +0,0 @@
import { PLSQL } from 'src/parser/plsql';
import { PlSqlParserListener } from 'src/lib/plsql/PlSqlParserListener';
describe('PLSQL Listener Tests', () => {
const expectTableName = 'user1';
const sql = `select id,name,sex from ${expectTableName};`;
const plsql = new PLSQL();
const parseTree = plsql.parse(sql);
test('Listener enterTableName', async () => {
class MyListener extends PlSqlParserListener {
result = '';
enterTable_ref_list = (ctx) => {
this.result = ctx.getText().toLowerCase();
};
}
const listener = new MyListener();
plsql.listen(listener, parseTree);
expect(listener.result).toBe(expectTableName);
});
});

View File

@ -1,23 +0,0 @@
import { PLSQL } from 'src/parser/plsql';
describe('PLSQL Syntax Tests', () => {
const plsql = new PLSQL();
test('Test simple select Statement', () => {
const sql = 'select id,name from user1;';
const result = plsql.validate(sql);
expect(result.length).toBe(0);
});
test(`Test select, where, order by`, () => {
const sql = `
select eid, emp_last, mgr_id, reportlevel
from reports_to_101 r, auto a
where r.c1 = a.c2
order by reportlevel, eid
`;
const result = plsql.validate(sql);
expect(result.length).toBe(0);
});
});

View File

@ -1,31 +0,0 @@
import { PLSQL } from 'src/parser/plsql';
import { PlSqlParserVisitor } from 'src/lib/plsql/PlSqlParserVisitor';
describe('PLSQL Visitor Tests', () => {
const expectTableName = 'user1';
const sql = `select id,name,sex from ${expectTableName};`;
const plsql = new PLSQL();
const parseTree = plsql.parse(sql);
test('Visitor visitTable_ref_list', () => {
class MyVisitor extends PlSqlParserVisitor<string> {
defaultResult(): string {
return '';
}
aggregateResult(aggregate: string, nextResult: string): string {
return aggregate + nextResult;
}
visitProgram = (ctx) => {
return this.visitChildren(ctx);
};
visitTable_ref_list = (ctx) => {
return ctx.getText().toLowerCase();
};
}
const visitor = new MyVisitor();
const result = visitor.visit(parseTree);
expect(result).toBe(expectTableName);
});
});

View File

@ -1,89 +0,0 @@
import { legacy_lexer, legacy_splitSql, legacy_cleanSql } from '../../src/utils';
import { Legacy_TokenType } from '../../src/utils/token';
describe('utils', () => {
test('split single sql', () => {
const sql = 'select id,name from user';
const result = legacy_splitSql(sql);
expect(result.length).toEqual(1);
});
test('split multiple sql', () => {
const sql = `-- a ;
select * from a;
/*
xxx
xxx
*/
select user from b`;
const result = legacy_splitSql(sql);
expect(result.length).toEqual(2);
});
test('split special quotation sql', () => {
const sql = `select regexp_replace('a', 'bc', 'xfe'feefe', '233');
select regexp_replace('abc', "fe", '233');`;
const result = legacy_splitSql(sql);
expect(result.length).toEqual(2);
});
// test nested bracket
test('split nested bracket sql', () => {
const sql = `WITH cte_sales_amounts (staff, sales, year)
AS
(
SELECT
first_name + ' ' + last_name,
SUM(quantity * list_price * (1 - discount)),
YEAR(order_date)
FROM
sales.orders o
INNER JOIN sales.order_items i ON i.order_id = o.order_id
INNER JOIN sales.staffs s ON s.staff_id = o.staff_id
)
SELECT staff, sales
FROM cte_sales_amounts
WHERE year = 2018;
SELECT * FROM table;`;
const result = legacy_splitSql(sql);
expect(result.length).toEqual(2);
});
test('lexer', () => {
const sql = `-- a ;
select * from a;
/*
xxx
xxx
*/
select user from b;`;
const result = legacy_lexer(sql);
expect(result.length).toEqual(4);
});
test('lexer for comments', () => {
const sql = `select * from a;--comments`;
const expected = `--comments`;
const result = legacy_lexer(sql);
const comments = result.find((token) => token.type === Legacy_TokenType.Comment);
expect(comments?.value).toEqual(expected);
});
test('cleanSql', () => {
const sql = `-- a ;
select * from a;
/*
xxx
xxx
*/
select user from b`;
const result = legacy_cleanSql(sql);
expect(result.indexOf('xxx')).toEqual(-1);
});
test('clean SQL white spaces', () => {
const sql = `
select * from a; `;
const expected = 'select * from a;';
const result = legacy_cleanSql(sql);
expect(result).toEqual(expected);
});
});