chroe: devops (#180)

* ci: add dependencies about lint tool

* ci: replace eslint with prettier

* ci: add husky, cz and commitlint

* style: lint fix via prettier

* ci: add prettier and check-types to github workflow

'
This commit is contained in:
Hayden
2023-10-13 11:16:36 +08:00
committed by GitHub
parent 4d1dfa676f
commit 7de192d486
105 changed files with 2615 additions and 1823 deletions

View File

@ -10,7 +10,8 @@ export const readSQL = (dirname: string, fileName: string) => {
const char = content[index];
tmp += char;
const isMulti = tmp.includes('EXECUTE STATEMENT SET') || tmp.includes('BEGIN STATEMENT SET;');
const isMulti =
tmp.includes('EXECUTE STATEMENT SET') || tmp.includes('BEGIN STATEMENT SET;');
if (!isMulti) {
// 非批量的先简单按照分号切割
@ -31,14 +32,14 @@ export const readSQL = (dirname: string, fileName: string) => {
/**
* Benchmark for a function.
* @param name
* @param fn
* @param name
* @param fn
* @param times , default 1
* @returns [ totalTime, averageTime, msg ]
*/
export function benchmark(name: string, fn: Function, times: number = 1): [number, number, string] {
const start = performance.now();
for (let i = 0; i < times; i++) {
fn();
}
@ -46,13 +47,14 @@ export function benchmark(name: string, fn: Function, times: number = 1): [numbe
const totalTime = end - start;
const averageTime = totalTime / times;
const msg = `Benchmark: ${name} executed ${times} times. Total time: ${totalTime.toFixed(2)}ms. Average time: ${averageTime.toFixed(2)}ms`;
console.log(msg)
const msg = `Benchmark: ${name} executed ${times} times. Total time: ${totalTime.toFixed(
2
)}ms. Average time: ${averageTime.toFixed(2)}ms`;
console.log(msg);
return [totalTime, averageTime, msg];
}
export function getReportTableRow(name, rows, times, totalTime, averageTime) {
return `| ${name} | ${rows} | ${times} | ${totalTime.toFixed(2)} | ${averageTime.toFixed(2)} |`;
}
@ -65,4 +67,4 @@ export function getReportTableHeader(title: string) {
export function exportReportTable(markdown: string, output: string) {
fs.writeFileSync(path.join(output, 'benchmark.md'), markdown);
}
}

View File

@ -2,7 +2,13 @@ import path from 'path';
import { writeFileSync } from 'node:fs';
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL, benchmark, getReportTableHeader, getReportTableRow, exportReportTable } from '../../../helper';
import {
readSQL,
benchmark,
getReportTableHeader,
getReportTableRow,
exportReportTable,
} from '../../../helper';
const features = {
selectTable: readSQL(__dirname, 'selectTable.sql'),
@ -15,13 +21,11 @@ describe('FlinkSQL benchmark tests', () => {
let reportsHeader = getReportTableHeader('FlinkSQL Benchmark');
const reportData: string[] = [];
test('createTable Over 100 Rows', async () => {
const [totalTimes, averageTimes , msg] = benchmark('CreateTable Over 100 Rows', () => {
const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 100 Rows', () => {
const testSQL = features.createTable[0];
const res = parser.validate(testSQL);
expect(res).toEqual([])
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('CreateTable', 100, 1, totalTimes, averageTimes));
});
@ -30,91 +34,75 @@ describe('FlinkSQL benchmark tests', () => {
const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 1000 Rows', () => {
const testSQL = features.createTable[1];
const res = parser.validate(testSQL);
expect(res).toEqual([])
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('CreateTable', 1000, 1, totalTimes, averageTimes));
});
test('createTable Over 5000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('CreateTable Over 5000 Rows', () => {
const testSQL = features.createTable[2];
const res = parser.validate(testSQL);
expect(res).toEqual([])
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('CreateTable', 5000, 1, totalTimes, averageTimes));
});
test('selectTable Over 100 Rows', async () => {
const [totalTimes, averageTimes , msg] = benchmark('SelectTable Over 100 Rows', () => {
const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 100 Rows', () => {
const testSQL = features.selectTable[0];
const res = parser.validate(testSQL);
expect(res).toEqual([])
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('SelectTable', 100, 1, totalTimes, averageTimes));
});
test('selectTable Over 1000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 1000 Rows', () => {
const testSQL = features.selectTable[1];
const res = parser.validate(testSQL);
expect(res).toEqual([])
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('SelectTable', 1000, 1, totalTimes, averageTimes));
});
test('selectTable Over 5000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('SelectTable Over 5000 Rows', () => {
const testSQL = features.selectTable[2];
const res = parser.validate(testSQL);
expect(res).toEqual([])
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('SelectTable', 5000, 1, totalTimes, averageTimes));
});
test('insertTable Over 100 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 100 Rows', () => {
const testSQL = features.insertTable[0];
const res = parser.validate(testSQL);
expect(res).toEqual([])
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('InsertTable', 100, 1, totalTimes, averageTimes));
});
test('insertTable Over 1000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 1000 Rows', () => {
const testSQL = features.insertTable[1];
const res = parser.validate(testSQL);
expect(res).toEqual([])
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('InsertTable', 1000, 1, totalTimes, averageTimes));
});
test('insertTable Over 5000 Rows', async () => {
const [totalTimes, averageTimes, msg] = benchmark('InsertTable Over 5000 Rows', () => {
const testSQL = features.insertTable[2];
const res = parser.validate(testSQL);
expect(res).toEqual([])
expect(res).toEqual([]);
});
reportData.push(getReportTableRow('InsertTable', 5000, 1, totalTimes, averageTimes));
});
afterAll(() => {
exportReportTable(reportsHeader + reportData.join('\n'), __dirname)
})
});
exportReportTable(reportsHeader + reportData.join('\n'), __dirname);
});
});

View File

@ -1,6 +1,7 @@
import FlinkSQL from '../../../src/parser/flinksql';
import { FlinkSqlParserListener } from '../../../src/lib/flinksql/FlinkSqlParserListener';
import { TableExpressionContext } from '../../../src/lib/flinksql/FlinkSqlParser';
import { ParseTreeListener } from 'antlr4ts/tree';
describe('Flink SQL Listener Tests', () => {
const expectTableName = 'user1';
@ -12,14 +13,13 @@ describe('Flink SQL Listener Tests', () => {
test('Listener enterTableName', async () => {
let result = '';
class MyListener implements FlinkSqlParserListener {
enterTableExpression = (ctx: TableExpressionContext): void => {
result = ctx.text.toLowerCase();
}
};
}
const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree);
await parser.listen(listenTableName as ParseTreeListener, parserTree);
expect(result).toBe(expectTableName);
});
});

View File

@ -1,9 +1,12 @@
import fs from 'fs';
import path from 'path';
import { CaretPosition, SyntaxContextType } from '../../../../src/parser/common/basic-parser-types';
import FlinkSQL from '../../../../src/parser/flinksql'
import FlinkSQL from '../../../../src/parser/flinksql';
const syntaxSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'), 'utf-8');
const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
'utf-8'
);
const multipleSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'multipleSql.sql'), 'utf-8');
describe('Flink SQL Syntax Suggestion', () => {
@ -13,187 +16,201 @@ describe('Flink SQL Syntax Suggestion', () => {
expect(parser.validate(syntaxSql).length).not.toBe(0);
expect(parser.validate(syntaxSql).length).not.toBe(0);
expect(parser.validate(syntaxSql).length).not.toBe(0);
})
});
test("Multiple SQL use database", () => {
test('Multiple SQL use database', () => {
const pos: CaretPosition = {
lineNumber: 19,
column: 10,
}
};
const syntaxes = parser.getSuggestionAtCaretPosition(multipleSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat1', '.' ]);
})
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cat1', '.']);
});
test('Drop catalog', () => {
const pos: CaretPosition = {
lineNumber: 1,
column: 17
}
column: 17,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.CATALOG);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.CATALOG
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat' ]);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cat']);
});
test('Select table', () => {
const pos: CaretPosition = {
lineNumber: 3,
column: 19
}
column: 19,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
console.log(syntaxes);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat', '.' ])
})
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cat', '.']);
});
test('Create table', () => {
const pos: CaretPosition = {
lineNumber: 5,
column: 20
}
column: 20,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat', '.', 'db' ])
})
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cat', '.', 'db']);
});
test('Show tables from', () => {
const pos: CaretPosition = {
lineNumber: 7,
column: 21
}
column: 21,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat' ])
})
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cat']);
});
test('Alter database', () => {
const pos: CaretPosition = {
lineNumber: 9,
column: 20
}
column: 20,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'cat', '.' ])
})
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cat', '.']);
});
test('Drop view', () => {
const pos: CaretPosition = {
lineNumber: 11,
column: 12
}
column: 12,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'v' ]);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['v']);
});
test('Select view', () => {
const pos: CaretPosition = {
lineNumber: 13,
column: 15
}
column: 15,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([]);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
});
test('Create view', () => {
const pos: CaretPosition = {
lineNumber: 15,
column: 15
}
column: 15,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual(['cv']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['cv']);
});
test('Function call', () => {
const pos: CaretPosition = {
lineNumber: 17,
column: 27
}
column: 27,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.FUNCTION);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual(['calculate_age']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['calculate_age']);
});
test('Create Function', () => {
const pos: CaretPosition = {
lineNumber: 19,
column: 20
}
column: 20,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual(['fnc']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['fnc']);
});
test('Show columns from view', () => {
const pos: CaretPosition = {
lineNumber: 21,
column: 22
}
column: 22,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual(['vie']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['vie']);
});
test('Show create table', () => {
const pos: CaretPosition = {
lineNumber: 23,
column: 22
}
column: 22,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual(['tb1']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['tb1']);
});
test('Show create view', () => {
const pos: CaretPosition = {
lineNumber: 25,
column: 20
}
column: 20,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual(['v1']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['v1']);
});
})
});

View File

@ -1,7 +1,7 @@
import fs from 'fs';
import path from 'path';
import { CaretPosition } from '../../../../src/parser/common/basic-parser-types';
import FlinkSQL from '../../../../src/parser/flinksql'
import FlinkSQL from '../../../../src/parser/flinksql';
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
@ -11,47 +11,50 @@ describe('Flink SQL Token Suggestion', () => {
test('Use Statement ', () => {
const pos: CaretPosition = {
lineNumber: 3,
column: 5
}
column: 5,
};
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion)
.toEqual([ 'MODULES', 'CATALOG' ])
})
expect(suggestion).toEqual(['MODULES', 'CATALOG']);
});
test('Create Statement ', () => {
const pos: CaretPosition = {
lineNumber: 5,
column: 8
}
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion)
.toEqual([ 'CATALOG', 'FUNCTION', 'TEMPORARY', 'VIEW', 'DATABASE', 'TABLE' ])
})
expect(suggestion).toEqual([
'CATALOG',
'FUNCTION',
'TEMPORARY',
'VIEW',
'DATABASE',
'TABLE',
]);
});
test('Show Statement ', () => {
const pos: CaretPosition = {
lineNumber: 7,
column: 6
}
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion)
.toEqual([
'MODULES',
'FULL',
'FUNCTIONS',
'USER',
'CREATE',
'COLUMNS',
'TABLES',
'CURRENT',
'CATALOGS',
'DATABASES',
'JARS',
'VIEWS'
])
})
})
expect(suggestion).toEqual([
'MODULES',
'FULL',
'FUNCTIONS',
'USER',
'CREATE',
'COLUMNS',
'TABLES',
'CURRENT',
'CATALOGS',
'DATABASES',
'JARS',
'VIEWS',
]);
});
});

View File

@ -1,5 +1,5 @@
import FlinkSQL from "../../../../src/parser/flinksql";
import { readSQL } from "../../../helper";
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
const features = {
table: readSQL(__dirname, 'alterTable.sql'),
@ -26,11 +26,10 @@ describe('FlinkSQL Alter Statements Syntax Tests', () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.function.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});

View File

@ -1,9 +1,9 @@
import FlinkSQL from "../../../../src/parser/flinksql";
import { readSQL } from "../../../helper";
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
// 综合测试的 sql 不做切割
const features = {
chores: readSQL(__dirname, 'chore.sql')
chores: readSQL(__dirname, 'chore.sql'),
};
describe('FlinkSQL Chore Syntax Tests', () => {

View File

@ -1,10 +1,10 @@
import fs from 'fs';
import path from 'path';
import FlinkSQL from "../../../../src/parser/flinksql";
import FlinkSQL from '../../../../src/parser/flinksql';
// 注释 sql 不做切割
const features = {
comments: fs.readFileSync(path.join(__dirname, 'fixtures', 'comment.sql'), 'utf-8')
comments: fs.readFileSync(path.join(__dirname, 'fixtures', 'comment.sql'), 'utf-8'),
};
describe('FlinkSQL Comment Syntax Tests', () => {

View File

@ -1,10 +1,10 @@
import fs from 'fs';
import path from 'path';
import FlinkSQL from "../../../../src/parser/flinksql";
import FlinkSQL from '../../../../src/parser/flinksql';
// 综合测试的 sql 不做切割
const features = {
templates: fs.readFileSync(path.join(__dirname, 'fixtures', 'templates.sql'), 'utf-8')
templates: fs.readFileSync(path.join(__dirname, 'fixtures', 'templates.sql'), 'utf-8'),
};
describe('FlinkSQL Comprehensive Tests', () => {
@ -13,4 +13,3 @@ describe('FlinkSQL Comprehensive Tests', () => {
expect(parser.validate(features.templates).length).toBe(0);
});
});

View File

@ -1,8 +1,8 @@
import FlinkSQL from "../../../../src/parser/flinksql";
import { readSQL } from "../../../helper";
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
const features = {
describes: readSQL(__dirname, 'describe.sql')
describes: readSQL(__dirname, 'describe.sql'),
};
describe('FlinkSQL Describe Syntax Tests', () => {

View File

@ -1,5 +1,5 @@
import FlinkSQL from "../../../../src/parser/flinksql";
import { readSQL } from "../../../helper";
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
const features = {
table: readSQL(__dirname, 'dropTable.sql'),
@ -30,7 +30,7 @@ describe('FlinkSQL Drop Statements Tests', () => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
features.database.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);

View File

@ -1,8 +1,8 @@
import FlinkSQL from "../../../../src/parser/flinksql";
import { readSQL } from "../../../helper";
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
const features = {
dtAddFiles: readSQL(__dirname, 'dtAddFile.sql')
dtAddFiles: readSQL(__dirname, 'dtAddFile.sql'),
};
describe('FlinkSQL DT Add File Syntax Tests', () => {

View File

@ -1,8 +1,8 @@
import FlinkSQL from "../../../../src/parser/flinksql";
import { readSQL } from "../../../helper";
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
const features = {
explains: readSQL(__dirname, 'explain.sql')
explains: readSQL(__dirname, 'explain.sql'),
};
describe('FlinkSQL Explain Syntax Tests', () => {

View File

@ -1,13 +1,12 @@
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
const parser = new FlinkSQL();
const features = {
InsertFromSelectQueries: readSQL(__dirname, 'insertFromSelectQueries.sql'),
InsertValuesIntoTable: readSQL(__dirname, 'insertValuesIntoTable.sql'),
InsertMultipleTable: readSQL(__dirname, 'insertMultipleTable.sql')
InsertMultipleTable: readSQL(__dirname, 'insertMultipleTable.sql'),
};
describe('FlinkSQL Insert Syntax Tests', () => {

View File

@ -1,22 +1,22 @@
import FlinkSQL from "../../../../src/parser/flinksql";
import { readSQL } from "../../../helper";
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
const parser = new FlinkSQL();
const features = {
base: readSQL(__dirname, "select.sql"),
withClause: readSQL(__dirname, "selectWithClause.sql"),
distinct: readSQL(__dirname, "selectDistinct.sql"),
windowTVF: readSQL(__dirname, "selectWindowTVF.sql"),
aggregation: readSQL(__dirname, "selectAggregation.sql"),
join: readSQL(__dirname, "selectJoin.sql"),
setOperation: readSQL(__dirname, "selectSetOperations.sql"),
pattern: readSQL(__dirname, "selectPatternRecognition.sql"),
where: readSQL(__dirname, "selectWhere.sql"),
base: readSQL(__dirname, 'select.sql'),
withClause: readSQL(__dirname, 'selectWithClause.sql'),
distinct: readSQL(__dirname, 'selectDistinct.sql'),
windowTVF: readSQL(__dirname, 'selectWindowTVF.sql'),
aggregation: readSQL(__dirname, 'selectAggregation.sql'),
join: readSQL(__dirname, 'selectJoin.sql'),
setOperation: readSQL(__dirname, 'selectSetOperations.sql'),
pattern: readSQL(__dirname, 'selectPatternRecognition.sql'),
where: readSQL(__dirname, 'selectWhere.sql'),
};
describe("FlinkSQL Query Statement Tests", () => {
describe("Base Select", () => {
describe('FlinkSQL Query Statement Tests', () => {
describe('Base Select', () => {
features.base.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
@ -24,7 +24,7 @@ describe("FlinkSQL Query Statement Tests", () => {
});
});
describe("With Clause Select", () => {
describe('With Clause Select', () => {
features.withClause.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
@ -32,60 +32,59 @@ describe("FlinkSQL Query Statement Tests", () => {
});
});
describe("Select DISTINCT", () => {
describe('Select DISTINCT', () => {
features.distinct.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
})
})
});
});
describe("Select Window TVF", () => {
describe('Select Window TVF', () => {
features.windowTVF.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
})
})
});
});
describe("Select Aggregation", () => {
describe('Select Aggregation', () => {
features.aggregation.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
})
})
});
});
describe("Select Join", () => {
describe('Select Join', () => {
features.join.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
})
})
});
});
describe("Select Set Operations", () => {
describe('Select Set Operations', () => {
features.setOperation.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
})
})
});
});
describe("Select Pattern Recognition", () => {
describe('Select Pattern Recognition', () => {
features.pattern.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
})
})
});
});
describe("Select Where", () => {
describe('Select Where', () => {
features.where.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0)
})
})
})
expect(parser.validate(sql).length).toBe(0);
});
});
});
});

View File

@ -1,8 +1,8 @@
import FlinkSQL from "../../../../src/parser/flinksql";
import { readSQL } from "../../../helper";
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
const features = {
shows: readSQL(__dirname, 'show.sql')
shows: readSQL(__dirname, 'show.sql'),
};
describe('FlinkSQL Show Syntax Tests', () => {

View File

@ -1,13 +1,13 @@
import FlinkSQL from "../../../../src/parser/flinksql";
import { readSQL } from "../../../helper";
import FlinkSQL from '../../../../src/parser/flinksql';
import { readSQL } from '../../../helper';
const features = {
uses: readSQL(__dirname, 'use.sql')
uses: readSQL(__dirname, 'use.sql'),
};
describe('FlinkSQL Use Syntax Tests', () => {
const parser = new FlinkSQL();
const parser = new FlinkSQL();
features.uses.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);

View File

@ -13,13 +13,16 @@ describe('Flink SQL Visitor Tests', () => {
test('Visitor visitTableName', () => {
let result = '';
class MyVisitor extends AbstractParseTreeVisitor<any> implements FlinkSqlParserVisitor<any>{
class MyVisitor
extends AbstractParseTreeVisitor<any>
implements FlinkSqlParserVisitor<any>
{
protected defaultResult() {
return result;
}
}
visitTableExpression = (ctx): void => {
result = ctx.text.toLowerCase();
}
};
}
const visitor: any = new MyVisitor();
visitor.visit(parserTree);

View File

@ -1,5 +1,6 @@
import GenericSQL from '../../../src/parser/generic';
import { SqlParserListener } from '../../../src/lib/generic/SqlParserListener';
import { ParseTreeListener } from 'antlr4ts/tree';
describe('Generic SQL Listener Tests', () => {
const expectTableName = 'user1';
@ -13,11 +14,11 @@ describe('Generic SQL Listener Tests', () => {
class MyListener implements SqlParserListener {
enterTableName = (ctx): void => {
result = ctx.text.toLowerCase();
}
};
}
const listenTableName: any = new MyListener();
await parser.listen(listenTableName, parserTree);
await parser.listen(listenTableName as ParseTreeListener, parserTree);
expect(result).toBe(expectTableName);
});
});

View File

@ -17,10 +17,10 @@ describe('Generic SQL Visitor Tests', () => {
protected defaultResult() {
return result;
}
visitTableName = (ctx): void => {
result = ctx.text.toLowerCase();
}
};
}
const visitor = new MyVisitor();
visitor.visit(parserTree);

View File

@ -1,8 +1,8 @@
import { ParseTreeListener } from 'antlr4ts/tree';
import { ProgramContext } from '../../../src/lib/hive/HiveSqlParser';
import { HiveSqlParserListener } from '../../../src/lib/hive/HiveSqlParserListener';
import HiveSQL from '../../../src/parser/hive';
describe('HiveSQL Listener Tests', () => {
const parser = new HiveSQL();
test('Listener enterSelectList', async () => {
@ -16,9 +16,9 @@ describe('HiveSQL Listener Tests', () => {
result = ctx.text;
}
}
const listenTableName: any = new MyListener();
const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree as ProgramContext);
await parser.listen(listenTableName as ParseTreeListener, parserTree as ProgramContext);
expect(result).toBe(expectTableName.toUpperCase());
});
test('Listener enterCreateTable', async () => {
@ -30,9 +30,9 @@ describe('HiveSQL Listener Tests', () => {
result = ctx.text;
}
}
const listenTableName: any = new MyListener();
const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree as ProgramContext);
await parser.listen(listenTableName as ParseTreeListener, parserTree as ProgramContext);
expect(result).toBe('DROPTABLETABLE_NAME');
});
});

View File

@ -1,9 +1,12 @@
import fs from 'fs';
import path from 'path';
import { CaretPosition, SyntaxContextType } from '../../../../src/parser/common/basic-parser-types';
import HiveSQL from '../../../../src/parser/hive'
import HiveSQL from '../../../../src/parser/hive';
const syntaxSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'), 'utf-8');
const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
'utf-8'
);
describe('Hive SQL Syntax Suggestion', () => {
const parser = new HiveSQL();
@ -17,131 +20,140 @@ describe('Hive SQL Syntax Suggestion', () => {
test('Insert table ', () => {
const pos: CaretPosition = {
lineNumber: 1,
column: 18
}
column: 18,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.', 'tb' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'tb']);
});
test('Select table ', () => {
const pos: CaretPosition = {
lineNumber: 3,
column: 18
}
column: 18,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
});
test('Create table ', () => {
const pos: CaretPosition = {
lineNumber: 5,
column: 17
}
column: 17,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
});
test('DROP table ', () => {
const pos: CaretPosition = {
lineNumber: 7,
column: 26
}
column: 26,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.TABLE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.', 'a' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'a']);
});
test('Create view ', () => {
const pos: CaretPosition = {
lineNumber: 9,
column: 28
}
column: 28,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.', 'v' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'v']);
});
test('Drop view ', () => {
const pos: CaretPosition = {
lineNumber: 11,
column: 15
}
column: 15,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.VIEW);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db', '.', 'v' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'v']);
});
test('Create function ', () => {
const pos: CaretPosition = {
lineNumber: 13,
column: 20
}
column: 20,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'fn1' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['fn1']);
});
test('Use function', () => {
const pos: CaretPosition = {
lineNumber: 15,
column: 27
}
column: 27,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.FUNCTION);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'calculate_age' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['calculate_age']);
});
test('Create database', () => {
const pos: CaretPosition = {
lineNumber: 17,
column: 19
}
column: 19,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'db' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db']);
});
test('Drop database', () => {
const pos: CaretPosition = {
lineNumber: 19,
column: 26
}
column: 26,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find(syn => syn.syntaxContextType === SyntaxContextType.DATABASE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map(token => token.text))
.toEqual([ 'sch' ])
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['sch']);
});
})
});

View File

@ -1,232 +1,191 @@
import fs from "fs";
import path from "path";
import { CaretPosition } from "../../../../src/parser/common/basic-parser-types";
import HiveSQL from "../../../../src/parser/hive";
import fs from 'fs';
import path from 'path';
import { CaretPosition } from '../../../../src/parser/common/basic-parser-types';
import HiveSQL from '../../../../src/parser/hive';
const tokenSql = fs.readFileSync(
path.join(__dirname, "fixtures", "tokenSuggestion.sql"),
"utf-8"
);
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
describe("Hive SQL Syntax Suggestion", () => {
describe('Hive SQL Syntax Suggestion', () => {
const parser = new HiveSQL();
test("After ALTER", () => {
test('After ALTER', () => {
const pos: CaretPosition = {
lineNumber: 1,
column: 7,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
"APPLICATION",
"GROUP",
"USER",
"POOL",
"TRIGGER",
"RESOURCE",
"SCHEDULED",
"INDEX",
"CONNECTOR",
"DATABASE",
"SCHEMA",
"MATERIALIZED",
"VIEW",
"TABLE",
'APPLICATION',
'GROUP',
'USER',
'POOL',
'TRIGGER',
'RESOURCE',
'SCHEDULED',
'INDEX',
'CONNECTOR',
'DATABASE',
'SCHEMA',
'MATERIALIZED',
'VIEW',
'TABLE',
]);
});
test("After CREATE", () => {
test('After CREATE', () => {
const pos: CaretPosition = {
lineNumber: 3,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
"CONNECTOR",
"APPLICATION",
"GROUP",
"USER",
"POOL",
"TRIGGER",
"RESOURCE",
"ROLE",
"INDEX",
"TEMPORARY",
"FUNCTION",
"SCHEDULED",
"MATERIALIZED",
"VIEW",
"OR",
"MANAGED",
"TABLE",
"EXTERNAL",
"TRANSACTIONAL",
"REMOTE",
"DATABASE",
"SCHEMA",
'CONNECTOR',
'APPLICATION',
'GROUP',
'USER',
'POOL',
'TRIGGER',
'RESOURCE',
'ROLE',
'INDEX',
'TEMPORARY',
'FUNCTION',
'SCHEDULED',
'MATERIALIZED',
'VIEW',
'OR',
'MANAGED',
'TABLE',
'EXTERNAL',
'TRANSACTIONAL',
'REMOTE',
'DATABASE',
'SCHEMA',
]);
});
test("After DELETE", () => {
test('After DELETE', () => {
const pos: CaretPosition = {
lineNumber: 5,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual(['FROM']);
});
test("After DESCRIBE", () => {
test('After DESCRIBE', () => {
const pos: CaretPosition = {
lineNumber: 7,
column: 10,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
"EXTENDED",
"FORMATTED",
"FUNCTION",
"CONNECTOR",
"DATABASE",
"SCHEMA",
'EXTENDED',
'FORMATTED',
'FUNCTION',
'CONNECTOR',
'DATABASE',
'SCHEMA',
]);
});
test("After DROP", () => {
test('After DROP', () => {
const pos: CaretPosition = {
lineNumber: 9,
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
"CONNECTOR",
"APPLICATION",
"GROUP",
"USER",
"POOL",
"TRIGGER",
"RESOURCE",
"ROLE",
"INDEX",
"TEMPORARY",
"FUNCTION",
"MATERIALIZED",
"VIEW",
"SCHEDULED",
"TABLE",
"DATABASE",
"SCHEMA",
'CONNECTOR',
'APPLICATION',
'GROUP',
'USER',
'POOL',
'TRIGGER',
'RESOURCE',
'ROLE',
'INDEX',
'TEMPORARY',
'FUNCTION',
'MATERIALIZED',
'VIEW',
'SCHEDULED',
'TABLE',
'DATABASE',
'SCHEMA',
]);
});
test("After EXPORT", () => {
test('After EXPORT', () => {
const pos: CaretPosition = {
lineNumber: 11,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual(['TABLE']);
});
test("After IMPORT", () => {
test('After IMPORT', () => {
const pos: CaretPosition = {
lineNumber: 13,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([
"FROM",
"TABLE",
"EXTERNAL",
]);
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual(['FROM', 'TABLE', 'EXTERNAL']);
});
test("After INSERT", () => {
test('After INSERT', () => {
const pos: CaretPosition = {
lineNumber: 15,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual([
"INTO",
"OVERWRITE",
]);
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual(['INTO', 'OVERWRITE']);
});
test("After LOAD", () => {
test('After LOAD', () => {
const pos: CaretPosition = {
lineNumber: 17,
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
expect(suggestion).toEqual(["DATA"
]);
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual(['DATA']);
});
test("After SHOW", () => {
test('After SHOW', () => {
const pos: CaretPosition = {
lineNumber: 19,
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
"CURRENT",
"ROLES",
"PRINCIPALS",
"ROLE",
"GRANT",
"INDEX",
"INDEXES",
"FORMATTED",
"CONNECTORS",
"RESOURCE",
"CONF",
"TRANSACTIONS",
"COMPACTIONS",
"LOCKS",
"TBLPROPERTIES",
"TABLE",
"CREATE",
"PARTITIONS",
"FUNCTIONS",
"COLUMNS",
"SORTED",
"MATERIALIZED",
"VIEWS",
"TABLES",
"EXTENDED",
"DATABASES",
"SCHEMAS",
'CURRENT',
'ROLES',
'PRINCIPALS',
'ROLE',
'GRANT',
'INDEX',
'INDEXES',
'FORMATTED',
'CONNECTORS',
'RESOURCE',
'CONF',
'TRANSACTIONS',
'COMPACTIONS',
'LOCKS',
'TBLPROPERTIES',
'TABLE',
'CREATE',
'PARTITIONS',
'FUNCTIONS',
'COLUMNS',
'SORTED',
'MATERIALIZED',
'VIEWS',
'TABLES',
'EXTENDED',
'DATABASES',
'SCHEMAS',
]);
});
});

View File

@ -12,7 +12,7 @@ const features = {
indexes: readSQL(__dirname, 'createIndex.sql'),
macros: readSQL(__dirname, 'createMacro.sql'),
connectors: readSQL(__dirname, 'createConnector.sql'),
scheduledQueries: readSQL(__dirname, 'createScheduledQuery.sql')
scheduledQueries: readSQL(__dirname, 'createScheduledQuery.sql'),
};
describe('HiveSQL Create Syntax Tests', () => {

View File

@ -5,7 +5,7 @@ const parser = new HiveSQL();
const features = {
drops: readSQL(__dirname, 'drop.sql'),
reloads: readSQL(__dirname, 'reload.sql')
reloads: readSQL(__dirname, 'reload.sql'),
};
describe('HiveSQL Drop Syntax Tests', () => {

View File

@ -2,7 +2,7 @@ import HiveSQL from '../../../../src/parser/hive';
import { readSQL } from '../../../helper';
const features = {
exports: readSQL(__dirname, 'export.sql')
exports: readSQL(__dirname, 'export.sql'),
};
describe('HiveSQL Export Syntax Tests', () => {
@ -14,4 +14,3 @@ describe('HiveSQL Export Syntax Tests', () => {
});
});
});

View File

@ -1,8 +1,8 @@
import HiveSQL from '../../../../src/parser/hive';
import { readSQL } from "../../../helper";
import { readSQL } from '../../../helper';
const features = {
imports: readSQL(__dirname, 'import.sql')
imports: readSQL(__dirname, 'import.sql'),
};
describe('HiveSQL Import Syntax Tests', () => {

View File

@ -5,7 +5,7 @@ const parser = new HiveSQL();
const features = {
insertFromQueries: readSQL(__dirname, 'insertFromQuery.sql'),
insertFromValues: readSQL(__dirname, 'insertFormValues.sql')
insertFromValues: readSQL(__dirname, 'insertFormValues.sql'),
};
describe('HiveSQL Insert Syntax Tests', () => {

View File

@ -15,7 +15,6 @@ describe('HiveSQL Visitor Tests', () => {
test('Visitor visitTableName', () => {
let result = '';
class MyVisitor extends AbstractParseTreeVisitor<any> implements HiveSqlParserVisitor<any> {
defaultResult() {
return result;
}

View File

@ -1,3 +1,4 @@
import { ParseTreeListener } from 'antlr4ts/tree';
import { Target_listContext } from '../../../src/lib/pgsql/PostgreSQLParser';
import { PostgreSQLParserListener } from '../../../src/lib/pgsql/PostgreSQLParserListener';
import PostgresSQL from '../../../src/parser/pgsql';
@ -16,9 +17,9 @@ describe('PostgresSQL Listener Tests', () => {
result = ctx.text.toLowerCase();
}
}
const listenTableName: any = new MyListener();
const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree);
await parser.listen(listenTableName as ParseTreeListener, parserTree);
expect(result).toBe(expectTableName);
});
});

View File

@ -1,14 +1,14 @@
import PostgresSQL from "../../../src/parser/pgsql";
import { readSQL } from "../../helper";
import PostgresSQL from '../../../src/parser/pgsql';
import { readSQL } from '../../helper';
const parser = new PostgresSQL();
const features = {
base: readSQL(__dirname, "select.sql"),
base: readSQL(__dirname, 'select.sql'),
};
describe("Postgre SQL Query Statement Tests", () => {
describe("Base Select", () => {
describe('Postgre SQL Query Statement Tests', () => {
describe('Base Select', () => {
features.base.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);

View File

@ -1,4 +1,4 @@
import PostgresSQL from "../../../src/parser/pgsql";
import PostgresSQL from '../../../src/parser/pgsql';
describe('PostgresSQL SQL Syntax Tests', () => {
const parser = new PostgresSQL();
@ -21,5 +21,4 @@ describe('PostgresSQL SQL Syntax Tests', () => {
const result = parser.validate(sql);
expect(result.length).toBe(0);
});
});

View File

@ -1,6 +1,6 @@
import { AbstractParseTreeVisitor } from "antlr4ts/tree/AbstractParseTreeVisitor";
import { PostgreSQLParserVisitor } from "../../../src/lib/pgsql/PostgreSQLParserVisitor";
import PostgresSQL from "../../../src/parser/pgsql";
import { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor';
import { PostgreSQLParserVisitor } from '../../../src/lib/pgsql/PostgreSQLParserVisitor';
import PostgresSQL from '../../../src/parser/pgsql';
describe('Generic SQL Visitor Tests', () => {
const expectTableName = 'user1';
@ -13,11 +13,14 @@ describe('Generic SQL Visitor Tests', () => {
test('Visitor visitTableName', () => {
let result = '';
class MyVisitor extends AbstractParseTreeVisitor<any> implements PostgreSQLParserVisitor<any> {
class MyVisitor
extends AbstractParseTreeVisitor<any>
implements PostgreSQLParserVisitor<any>
{
protected defaultResult() {
return result;
}
visitTable_ref(ctx) {
result = ctx.text.toLowerCase();
}

View File

@ -1,3 +1,4 @@
import { ParseTreeListener } from 'antlr4ts/tree';
import { PlSqlParserListener } from '../../../src/lib/plsql/PlSqlParserListener';
import PLSQL from '../../../src/parser/plsql';
@ -11,14 +12,13 @@ describe('PLSQL Listener Tests', () => {
test('Listener enterTableName', async () => {
let result = '';
class MyListener implements PlSqlParserListener {
// eslint-disable-next-line camelcase
enterTable_ref_list = (ctx): void => {
result = ctx.text.toLowerCase();
}
};
}
const listenTableName: any = new MyListener();
const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree);
await parser.listen(listenTableName as ParseTreeListener, parserTree);
expect(result).toBe(expectTableName);
});
});

View File

@ -15,10 +15,9 @@ describe('PLSQL Visitor Tests', () => {
protected defaultResult() {
return result;
}
// eslint-disable-next-line camelcase
visitTable_ref_list = (ctx): void => {
result = ctx.text.toLowerCase();
}
};
}
const visitor: any = new MyVisitor();
visitor.visit(parserTree);

View File

@ -1,3 +1,4 @@
import { ParseTreeListener } from 'antlr4ts/tree';
import { SparkSqlParserListener } from '../../../src/lib/spark/SparkSqlParserListener';
import SparkSQL from '../../../src/parser/spark';
@ -13,11 +14,11 @@ describe('Spark SQL Listener Tests', () => {
class MyListener implements SparkSqlParserListener {
exitRelationPrimary = (ctx): void => {
result = ctx.text.toLowerCase();
}
};
}
const listenTableName = new MyListener();
parser.listen(listenTableName, parserTree);
parser.listen(listenTableName as ParseTreeListener, parserTree);
expect(result).toBe(expectTableName);
});
});

View File

@ -3,7 +3,10 @@ import path from 'path';
import { CaretPosition, SyntaxContextType } from '../../../../src/parser/common/basic-parser-types';
import SparkSQL from '../../../../src/parser/spark';
const syntaxSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'), 'utf-8');
const syntaxSql = fs.readFileSync(
path.join(__dirname, 'fixtures', 'syntaxSuggestion.sql'),
'utf-8'
);
describe('Spark SQL Syntax Suggestion', () => {
const parser = new SparkSQL();
@ -20,11 +23,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 18,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.TABLE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['db', '.', 'tb']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'tb']);
});
test('Select table ', () => {
@ -33,11 +37,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 18,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.TABLE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['db', '.']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
});
test('Create table ', () => {
@ -46,11 +51,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 17,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['db', '.']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
});
test('DROP table ', () => {
@ -59,11 +65,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 26,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.TABLE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['db', '.', 'a']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'a']);
});
test('Create view ', () => {
@ -72,11 +79,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 28,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['db', '.', 'v']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'v']);
});
test('Drop view ', () => {
@ -85,11 +93,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 15,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.VIEW);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.VIEW
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['db', '.', 'v']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.', 'v']);
});
test('Create function ', () => {
@ -98,11 +107,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 20,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['fn1']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['fn1']);
});
test('Use function', () => {
@ -111,11 +121,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 27,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.FUNCTION
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['calculate_age']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['calculate_age']);
});
test('Create database', () => {
@ -124,11 +135,12 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 19,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE_CREATE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['db']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db']);
});
test('Drop database', () => {
@ -137,10 +149,11 @@ describe('Spark SQL Syntax Suggestion', () => {
column: 26,
};
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
const suggestion = syntaxes?.find((syn) => syn.syntaxContextType === SyntaxContextType.DATABASE);
const suggestion = syntaxes?.find(
(syn) => syn.syntaxContextType === SyntaxContextType.DATABASE
);
expect(suggestion).not.toBeUndefined();
expect(suggestion?.wordRanges.map((token) => token.text))
.toEqual(['sch']);
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['sch']);
});
});

View File

@ -13,19 +13,9 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 1,
column: 7,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos,
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
'TABLE',
'INDEX',
'VIEW',
'DATABASE',
'NAMESPACE',
'SCHEMA',
]);
expect(suggestion).toEqual(['TABLE', 'INDEX', 'VIEW', 'DATABASE', 'NAMESPACE', 'SCHEMA']);
});
test('After CREATE', () => {
@ -33,10 +23,7 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 3,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos,
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
'TEMPORARY',
@ -59,10 +46,7 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 5,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos,
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual(['FROM']);
});
@ -72,10 +56,7 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 7,
column: 10,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos,
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
'WITH',
@ -98,10 +79,7 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 9,
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos,
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
'TEMPORARY',
@ -121,15 +99,9 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 11,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos,
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
'OVERWRITE',
'INTO',
]);
expect(suggestion).toEqual(['OVERWRITE', 'INTO']);
});
test('After LOAD', () => {
@ -137,14 +109,9 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 13,
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos,
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
'DATA',
]);
expect(suggestion).toEqual(['DATA']);
});
test('After SHOW', () => {
@ -152,10 +119,7 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 15,
column: 6,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos,
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual([
'LOCKS',
@ -190,10 +154,7 @@ describe('Spark SQL Syntax Suggestion', () => {
lineNumber: 17,
column: 8,
};
const suggestion = parser.getSuggestionAtCaretPosition(
tokenSql,
pos,
)?.keywords;
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
expect(suggestion).toEqual(['TABLE']);
});

View File

@ -1,7 +1,6 @@
import SparkSQL from '../../../../src/parser/spark';
import { readSQL } from '../../../helper';
const parser = new SparkSQL();
const features = {

View File

@ -1,7 +1,6 @@
import SparkSQL from '../../../../src/parser/spark';
import { readSQL } from '../../../helper';
const parser = new SparkSQL();
const features = {

View File

@ -12,14 +12,17 @@ describe('Spark SQL Visitor Tests', () => {
});
test('Visitor visitRelationPrimary', () => {
class MyVisitor extends AbstractParseTreeVisitor<any> implements SparkSqlParserVisitor<any> {
class MyVisitor
extends AbstractParseTreeVisitor<any>
implements SparkSqlParserVisitor<any>
{
result: string = '';
protected defaultResult() {
return this.result;
}
visitRelationPrimary = (ctx): void => {
this.result = ctx.text.toLowerCase();
}
};
}
const visitor = new MyVisitor();
visitor.visit(parserTree);

View File

@ -1,5 +1,6 @@
import trinoSQL from '../../../src/parser/trinosql';
import { TrinoSqlListener } from '../../../src/lib/trinosql/TrinoSqlListener';
import { ParseTreeListener } from 'antlr4ts/tree';
describe('trino SQL Listener Tests', () => {
const expectTableName = 'user1';
@ -11,14 +12,13 @@ describe('trino SQL Listener Tests', () => {
test('Listener enterTableName', async () => {
let result = '';
class MyListener implements TrinoSqlListener {
enterTableName = (ctx): void => {
result = ctx.text.toLowerCase();
};
}
const listenTableName = new MyListener();
await parser.listen(listenTableName, parserTree);
await parser.listen(listenTableName as ParseTreeListener, parserTree);
expect(result).toBe(expectTableName);
});
});

View File

@ -1,11 +1,11 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
table: readSQL(__dirname, 'alter_table.sql'),
view: readSQL(__dirname, 'alter_view.sql'),
schema: readSQL(__dirname, 'alter_schema.sql'),
materializedView: readSQL(__dirname, 'alter_materialized_view.sql')
materializedView: readSQL(__dirname, 'alter_materialized_view.sql'),
};
describe('TrinoSQL Alter Statements Syntax Tests', () => {
@ -19,16 +19,15 @@ describe('TrinoSQL Alter Statements Syntax Tests', () => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
features.schema.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.materializedView.forEach((sql) => {
});
features.materializedView.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
});

View File

@ -1,9 +1,8 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
analyze: readSQL(__dirname, 'analyze.sql'),
};
describe('TrinoSQL Analyze Statements Syntax Tests', () => {
@ -15,4 +14,3 @@ describe('TrinoSQL Analyze Statements Syntax Tests', () => {
});
});
});

View File

@ -1,9 +1,8 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
call: readSQL(__dirname, 'call.sql'),
};
describe('TrinoSQL Call Statements Syntax Tests', () => {
@ -15,4 +14,3 @@ describe('TrinoSQL Call Statements Syntax Tests', () => {
});
});
});

View File

@ -1,9 +1,8 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
comment: readSQL(__dirname, 'comment.sql'),
};
describe('TrinoSQL Comment Statements Syntax Tests', () => {
@ -15,4 +14,3 @@ describe('TrinoSQL Comment Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
commit: readSQL(__dirname, 'commit.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Commit Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
table: readSQL(__dirname, 'create_table.sql'),
@ -12,7 +12,7 @@ const features = {
describe('TrinoSQL Create Statements Syntax Tests', () => {
const parser = new TrinoSQL();
features.table.forEach((sql) => {
features.table.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
@ -21,28 +21,26 @@ describe('TrinoSQL Create Statements Syntax Tests', () => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
features.schema.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
features.tableAsSelect.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
features.role.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
features.materializedView.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
deallocatePrepare: readSQL(__dirname, 'deallocate_prepare.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL deallocatePrepare Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
delete: readSQL(__dirname, 'delete.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Delete Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
deny: readSQL(__dirname, 'deny.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Deny Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
describe: readSQL(__dirname, 'describe.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Describe Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
table: readSQL(__dirname, 'drop_table.sql'),
@ -12,7 +12,7 @@ const features = {
describe('TrinoSQL Drop Statements Syntax Tests', () => {
const parser = new TrinoSQL();
features.table.forEach((sql) => {
features.table.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
@ -21,28 +21,26 @@ describe('TrinoSQL Drop Statements Syntax Tests', () => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
features.schema.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
features.column.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
features.role.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
features.materializedView.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
execute: readSQL(__dirname, 'execute.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Execute Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
explain: readSQL(__dirname, 'explain.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Explain Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
grant: readSQL(__dirname, 'grant.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Grant Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
insertIntoTable: readSQL(__dirname, 'insert_into.sql'),
@ -13,4 +13,3 @@ describe('TrinoSQL Insert Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
matchRecognize: readSQL(__dirname, 'match_recognize.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Match Recognize Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
merge: readSQL(__dirname, 'merge.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Merge Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
prepare: readSQL(__dirname, 'prepare.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Prepare Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
refreshMaterializedView: readSQL(__dirname, 'refresh_materialized_view.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Refresh Materialized View Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
resetSession: readSQL(__dirname, 'reset_session.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Reset Session Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
revoke: readSQL(__dirname, 'revoke.sql'),
@ -20,4 +20,3 @@ describe('TrinoSQL Revoke Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
rollbackTransaction: readSQL(__dirname, 'rollback_transaction.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Rollback Transaction Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
select: readSQL(__dirname, 'select.sql'),
@ -13,12 +13,12 @@ const features = {
selectWithFetch: readSQL(__dirname, 'select_with_fetch.sql'),
selectWithUNNEST: readSQL(__dirname, 'select_with_ unnest.sql'),
selectWithExists: readSQL(__dirname, 'select_with_exists.sql'),
selectWithUnion: readSQL(__dirname, 'select_with_union.sql')
selectWithUnion: readSQL(__dirname, 'select_with_union.sql'),
};
describe('TrinoSQL Select Statements Syntax Tests', () => {
const parser = new TrinoSQL();
features.select.forEach((sql) => {
features.select.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
@ -27,57 +27,56 @@ describe('TrinoSQL Select Statements Syntax Tests', () => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
features.selectWithSetOperations.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
features.selectWithSubQueries.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
features.selectWithTableSample.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
features.selectWithRowType.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
features.selectWithOffset.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
features.selectWithJoin.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
features.selectWithFetch.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
features.selectWithUNNEST.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
features.selectWithExists.forEach((sql) => {
});
features.selectWithExists.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
features.selectWithUnion.forEach((sql) => {
it(sql, () => {
expect(parser.validate(sql).length).toBe(0);
});
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
role: readSQL(__dirname, 'set_role.sql'),
@ -32,4 +32,3 @@ describe('TrinoSQL Set Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
tables: readSQL(__dirname, 'show_tables.sql'),
@ -75,4 +75,3 @@ describe('TrinoSQL Show Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
startTransaction: readSQL(__dirname, 'start_transaction.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Start Transaction Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
truncateTable: readSQL(__dirname, 'truncate_table.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Truncate Table Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
update: readSQL(__dirname, 'update.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Update Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
use: readSQL(__dirname, 'use.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Use Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
values: readSQL(__dirname, 'values.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Values Statements Syntax Tests', () => {
});
});
});

View File

@ -1,5 +1,5 @@
import TrinoSQL from "../../../../src/parser/trinosql";
import { readSQL } from "../../../helper";
import TrinoSQL from '../../../../src/parser/trinosql';
import { readSQL } from '../../../helper';
const features = {
windowWithRowPatternRecognition: readSQL(__dirname, 'window_with_row_pattern_recognition.sql'),
@ -14,4 +14,3 @@ describe('TrinoSQL Window With Row Pattern Recognition Statements Syntax Tests',
});
});
});

View File

@ -13,13 +13,13 @@ describe('trino SQL Visitor Tests', () => {
test('Visitor visitTableName', () => {
let result = '';
class MyVisitor extends AbstractParseTreeVisitor<any> implements TrinoSqlVisitor<any>{
class MyVisitor extends AbstractParseTreeVisitor<any> implements TrinoSqlVisitor<any> {
protected defaultResult() {
return result;
}
visitTableName = (ctx): void => {
result = ctx.text.toLowerCase();
}
};
}
const visitor: any = new MyVisitor();
visitor.visit(parserTree);

View File

@ -64,9 +64,7 @@ describe('utils', () => {
const sql = `select * from a;--comments`;
const expected = `--comments`;
const result = lexer(sql);
const comments = result.find((token) =>
token.type === TokenType.Comment,
);
const comments = result.find((token) => token.type === TokenType.Comment);
expect(comments?.value).toEqual(expected);
});
test('cleanSql', () => {