Merge branch 'master' of github.com:DTStack/dt-sql-parser
This commit is contained in:
commit
526a58ad2e
@ -7,7 +7,6 @@ module.exports = {
|
|||||||
// All imported modules in your tests should be mocked automatically
|
// All imported modules in your tests should be mocked automatically
|
||||||
// automock: false,
|
// automock: false,
|
||||||
|
|
||||||
|
|
||||||
// Stop running tests after `n` failures
|
// Stop running tests after `n` failures
|
||||||
// bail: 0,
|
// bail: 0,
|
||||||
|
|
||||||
@ -85,7 +84,7 @@ module.exports = {
|
|||||||
"ts",
|
"ts",
|
||||||
"tsx",
|
"tsx",
|
||||||
"json",
|
"json",
|
||||||
"node"
|
"node",
|
||||||
],
|
],
|
||||||
|
|
||||||
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
|
// A map from regular expressions to module names or to arrays of module names that allow to stub out resources with a single module
|
||||||
@ -154,10 +153,7 @@ module.exports = {
|
|||||||
// testLocationInResults: false,
|
// testLocationInResults: false,
|
||||||
|
|
||||||
// The glob patterns Jest uses to detect test files
|
// The glob patterns Jest uses to detect test files
|
||||||
testMatch: [
|
testMatch: ["**/__tests__/**/*.[jt]s?(x)", "**/?(*.)+(spec|test).[tj]s?(x)"],
|
||||||
"**/__tests__/**/*.[jt]s?(x)",
|
|
||||||
"**/?(*.)+(spec|test).[tj]s?(x)"
|
|
||||||
],
|
|
||||||
|
|
||||||
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
// An array of regexp pattern strings that are matched against all test paths, matched tests are skipped
|
||||||
// testPathIgnorePatterns: [
|
// testPathIgnorePatterns: [
|
||||||
@ -175,17 +171,18 @@ module.exports = {
|
|||||||
|
|
||||||
// A map from regular expressions to paths to transformers
|
// A map from regular expressions to paths to transformers
|
||||||
transform: {
|
transform: {
|
||||||
'\\.[jt]sx?$': ['ts-jest', {
|
"\\.[jt]sx?$": [
|
||||||
tsconfig: {
|
"ts-jest",
|
||||||
noUnusedLocals: false,
|
{
|
||||||
}
|
tsconfig: {
|
||||||
}]
|
noUnusedLocals: false,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
},
|
},
|
||||||
|
|
||||||
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
// An array of regexp pattern strings that are matched against all source file paths, matched files will skip transformation
|
||||||
transformIgnorePatterns: [
|
transformIgnorePatterns: ["/node_modules/(?!antlr4)"],
|
||||||
'/node_modules/(?!antlr4)'
|
|
||||||
],
|
|
||||||
|
|
||||||
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
// An array of regexp pattern strings that are matched against all modules before the module loader will automatically return a mock for them
|
||||||
// unmockedModulePathPatterns: undefined,
|
// unmockedModulePathPatterns: undefined,
|
||||||
@ -198,7 +195,7 @@ module.exports = {
|
|||||||
|
|
||||||
// Whether to use watchman for file crawling
|
// Whether to use watchman for file crawling
|
||||||
// watchman: true,
|
// watchman: true,
|
||||||
moduleNameMapper : {
|
moduleNameMapper: {
|
||||||
'^antlr4$': '<rootDir>/node_modules/antlr4/src/antlr4/index.web.js',
|
"^antlr4$": "<rootDir>/node_modules/antlr4/src/antlr4/index.web.js",
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
3727
pnpm-lock.yaml
Normal file
3727
pnpm-lock.yaml
Normal file
File diff suppressed because it is too large
Load Diff
@ -329,7 +329,7 @@ dropFunction
|
|||||||
// Insert statements
|
// Insert statements
|
||||||
|
|
||||||
insertStatement
|
insertStatement
|
||||||
: (EXECUTE? insertSimpleStatement) | insertMulStatementCompatibility | (EXECUTE insertMulStatement)
|
: (EXECUTE? insertSimpleStatement) | (EXECUTE insertMulStatement)
|
||||||
;
|
;
|
||||||
|
|
||||||
insertSimpleStatement
|
insertSimpleStatement
|
||||||
@ -354,15 +354,10 @@ valuesRowDefinition
|
|||||||
RR_BRACKET
|
RR_BRACKET
|
||||||
;
|
;
|
||||||
|
|
||||||
insertMulStatementCompatibility
|
|
||||||
: BEGIN STATEMENT SET SEMICOLON (insertSimpleStatement SEMICOLON)+ END
|
|
||||||
;
|
|
||||||
|
|
||||||
insertMulStatement
|
insertMulStatement
|
||||||
: STATEMENT SET BEGIN (insertSimpleStatement SEMICOLON)+ END
|
: STATEMENT SET BEGIN (insertSimpleStatement SEMICOLON)+ END
|
||||||
;
|
;
|
||||||
|
|
||||||
|
|
||||||
// Select statements
|
// Select statements
|
||||||
|
|
||||||
queryStatement
|
queryStatement
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Generated from /Users/mortalYoung/Projects/dt-sql-parser/src/grammar/flinksql/FlinkSqlLexer.g4 by ANTLR 4.12.0
|
// Generated from /Users/zhenglin/Documents/parser/dt-sql-parser/src/grammar/flinksql/FlinkSqlLexer.g4 by ANTLR 4.12.0
|
||||||
// noinspection ES6UnusedImports,JSUnusedGlobalSymbols,JSUnusedLocalSymbols
|
// noinspection ES6UnusedImports,JSUnusedGlobalSymbols,JSUnusedLocalSymbols
|
||||||
import {
|
import {
|
||||||
ATN,
|
ATN,
|
||||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because it is too large
Load Diff
@ -1,4 +1,4 @@
|
|||||||
// Generated from /Users/mortalYoung/Projects/dt-sql-parser/src/grammar/flinksql/FlinkSqlParser.g4 by ANTLR 4.12.0
|
// Generated from /Users/zhenglin/Documents/parser/dt-sql-parser/src/grammar/flinksql/FlinkSqlParser.g4 by ANTLR 4.12.0
|
||||||
|
|
||||||
import {ParseTreeListener} from "antlr4";
|
import {ParseTreeListener} from "antlr4";
|
||||||
|
|
||||||
@ -77,7 +77,6 @@ import { InsertSimpleStatementContext } from "./FlinkSqlParser";
|
|||||||
import { InsertPartitionDefinitionContext } from "./FlinkSqlParser";
|
import { InsertPartitionDefinitionContext } from "./FlinkSqlParser";
|
||||||
import { ValuesDefinitionContext } from "./FlinkSqlParser";
|
import { ValuesDefinitionContext } from "./FlinkSqlParser";
|
||||||
import { ValuesRowDefinitionContext } from "./FlinkSqlParser";
|
import { ValuesRowDefinitionContext } from "./FlinkSqlParser";
|
||||||
import { InsertMulStatementCompatibilityContext } from "./FlinkSqlParser";
|
|
||||||
import { InsertMulStatementContext } from "./FlinkSqlParser";
|
import { InsertMulStatementContext } from "./FlinkSqlParser";
|
||||||
import { QueryStatementContext } from "./FlinkSqlParser";
|
import { QueryStatementContext } from "./FlinkSqlParser";
|
||||||
import { ValuesCaluseContext } from "./FlinkSqlParser";
|
import { ValuesCaluseContext } from "./FlinkSqlParser";
|
||||||
@ -932,16 +931,6 @@ export default class FlinkSqlParserListener extends ParseTreeListener {
|
|||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
*/
|
*/
|
||||||
exitValuesRowDefinition?: (ctx: ValuesRowDefinitionContext) => void;
|
exitValuesRowDefinition?: (ctx: ValuesRowDefinitionContext) => void;
|
||||||
/**
|
|
||||||
* Enter a parse tree produced by `FlinkSqlParser.insertMulStatementCompatibility`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
*/
|
|
||||||
enterInsertMulStatementCompatibility?: (ctx: InsertMulStatementCompatibilityContext) => void;
|
|
||||||
/**
|
|
||||||
* Exit a parse tree produced by `FlinkSqlParser.insertMulStatementCompatibility`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
*/
|
|
||||||
exitInsertMulStatementCompatibility?: (ctx: InsertMulStatementCompatibilityContext) => void;
|
|
||||||
/**
|
/**
|
||||||
* Enter a parse tree produced by `FlinkSqlParser.insertMulStatement`.
|
* Enter a parse tree produced by `FlinkSqlParser.insertMulStatement`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Generated from /Users/mortalYoung/Projects/dt-sql-parser/src/grammar/flinksql/FlinkSqlParser.g4 by ANTLR 4.12.0
|
// Generated from /Users/zhenglin/Documents/parser/dt-sql-parser/src/grammar/flinksql/FlinkSqlParser.g4 by ANTLR 4.12.0
|
||||||
|
|
||||||
import {ParseTreeVisitor} from 'antlr4';
|
import {ParseTreeVisitor} from 'antlr4';
|
||||||
|
|
||||||
@ -77,7 +77,6 @@ import { InsertSimpleStatementContext } from "./FlinkSqlParser";
|
|||||||
import { InsertPartitionDefinitionContext } from "./FlinkSqlParser";
|
import { InsertPartitionDefinitionContext } from "./FlinkSqlParser";
|
||||||
import { ValuesDefinitionContext } from "./FlinkSqlParser";
|
import { ValuesDefinitionContext } from "./FlinkSqlParser";
|
||||||
import { ValuesRowDefinitionContext } from "./FlinkSqlParser";
|
import { ValuesRowDefinitionContext } from "./FlinkSqlParser";
|
||||||
import { InsertMulStatementCompatibilityContext } from "./FlinkSqlParser";
|
|
||||||
import { InsertMulStatementContext } from "./FlinkSqlParser";
|
import { InsertMulStatementContext } from "./FlinkSqlParser";
|
||||||
import { QueryStatementContext } from "./FlinkSqlParser";
|
import { QueryStatementContext } from "./FlinkSqlParser";
|
||||||
import { ValuesCaluseContext } from "./FlinkSqlParser";
|
import { ValuesCaluseContext } from "./FlinkSqlParser";
|
||||||
@ -637,12 +636,6 @@ export default class FlinkSqlParserVisitor<Result> extends ParseTreeVisitor<Resu
|
|||||||
* @return the visitor result
|
* @return the visitor result
|
||||||
*/
|
*/
|
||||||
visitValuesRowDefinition?: (ctx: ValuesRowDefinitionContext) => Result;
|
visitValuesRowDefinition?: (ctx: ValuesRowDefinitionContext) => Result;
|
||||||
/**
|
|
||||||
* Visit a parse tree produced by `FlinkSqlParser.insertMulStatementCompatibility`.
|
|
||||||
* @param ctx the parse tree
|
|
||||||
* @return the visitor result
|
|
||||||
*/
|
|
||||||
visitInsertMulStatementCompatibility?: (ctx: InsertMulStatementCompatibilityContext) => Result;
|
|
||||||
/**
|
/**
|
||||||
* Visit a parse tree produced by `FlinkSqlParser.insertMulStatement`.
|
* Visit a parse tree produced by `FlinkSqlParser.insertMulStatement`.
|
||||||
* @param ctx the parse tree
|
* @param ctx the parse tree
|
||||||
|
@ -44,7 +44,6 @@ export default abstract class BasicParser {
|
|||||||
parser.addErrorListener(new ParserErrorCollector(syntaxErrors));
|
parser.addErrorListener(new ParserErrorCollector(syntaxErrors));
|
||||||
|
|
||||||
parser.program();
|
parser.program();
|
||||||
|
|
||||||
return lexerError.concat(syntaxErrors);
|
return lexerError.concat(syntaxErrors);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,10 +1,30 @@
|
|||||||
import fs from 'fs';
|
import fs from 'fs';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
|
|
||||||
export const readSQL = (dirname: string, fileName: string, isSegment = true) => {
|
export const readSQL = (dirname: string, fileName: string) => {
|
||||||
const sqlFiles = fs.readFileSync(path.join(dirname, 'fixtures', fileName), 'utf-8')
|
const content = fs.readFileSync(path.join(dirname, 'fixtures', fileName), 'utf-8');
|
||||||
if (!isSegment) return [sqlFiles];
|
const result: string[] = [];
|
||||||
return sqlFiles.split(';')
|
let tmp = '';
|
||||||
.filter(Boolean)
|
|
||||||
.map((i) => i.trim());
|
for (let index = 0; index < content.length; index++) {
|
||||||
}
|
const char = content[index];
|
||||||
|
tmp += char;
|
||||||
|
|
||||||
|
const isMulti = tmp.includes('EXECUTE STATEMENT SET');
|
||||||
|
|
||||||
|
if (!isMulti) {
|
||||||
|
// 非批量的先简单按照分号切割
|
||||||
|
if (tmp.endsWith(';')) {
|
||||||
|
result.push(tmp.trim());
|
||||||
|
tmp = '';
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if (tmp.endsWith('END;')) {
|
||||||
|
result.push(tmp.trim());
|
||||||
|
tmp = '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result;
|
||||||
|
};
|
||||||
|
@ -0,0 +1,47 @@
|
|||||||
|
INSERT INTO country_page_view
|
||||||
|
SELECT user,
|
||||||
|
cnt
|
||||||
|
FROM page_view_source;
|
||||||
|
|
||||||
|
INSERT INTO catalog1.db1.country_page_view
|
||||||
|
SELECT user,
|
||||||
|
cnt
|
||||||
|
FROM page_view_source;
|
||||||
|
|
||||||
|
|
||||||
|
--- Execute InsertStatement
|
||||||
|
EXECUTE
|
||||||
|
INSERT INTO country_page_view PARTITION (date = '2019-8-30', country = 'China')
|
||||||
|
SELECT user,
|
||||||
|
cnt
|
||||||
|
FROM page_view_source;
|
||||||
|
|
||||||
|
--- Partition Clause: Static Partition
|
||||||
|
INSERT INTO country_page_view PARTITION (date = '2019-8-30', country = 'China')
|
||||||
|
SELECT user,
|
||||||
|
cnt
|
||||||
|
FROM page_view_source;
|
||||||
|
|
||||||
|
--- Partition Clause: Dynamic Partition
|
||||||
|
INSERT INTO country_page_view PARTITION (date = '2019-8-30')
|
||||||
|
SELECT user,
|
||||||
|
cnt,
|
||||||
|
country
|
||||||
|
FROM page_view_source;
|
||||||
|
|
||||||
|
--- Column List Statement
|
||||||
|
INSERT INTO country_page_view PARTITION (date = '2019-8-30', country = 'China') (date, country)
|
||||||
|
SELECT user,
|
||||||
|
cnt
|
||||||
|
FROM page_view_source;
|
||||||
|
|
||||||
|
--- Insert Method: OverWrite
|
||||||
|
INSERT OVERWRITE country_page_view PARTITION (date = '2019-8-30')
|
||||||
|
SELECT user,
|
||||||
|
cnt,
|
||||||
|
country
|
||||||
|
FROM page_view_source;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
11
test/parser/flinksql/syntax/fixtures/insertMultipleTable.sql
Normal file
11
test/parser/flinksql/syntax/fixtures/insertMultipleTable.sql
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
EXECUTE STATEMENT SET
|
||||||
|
BEGIN
|
||||||
|
INSERT INTO country_page_view
|
||||||
|
VALUES ('Chinese', 'mumiao', 18),
|
||||||
|
('Amercian', 'georage', 22);
|
||||||
|
INSERT INTO country_page_view
|
||||||
|
VALUES ('Chinese', 'mumiao', 18),
|
||||||
|
('Amercian', 'georage', 22);
|
||||||
|
END;
|
||||||
|
|
||||||
|
|
@ -0,0 +1,22 @@
|
|||||||
|
INSERT INTO country_page_view
|
||||||
|
VALUES ('Chinese', 'mumiao', 18),
|
||||||
|
('Amercian', 'georage', 22);
|
||||||
|
|
||||||
|
EXECUTE
|
||||||
|
INSERT INTO country_page_view
|
||||||
|
VALUES ('Chinese', 'mumiao', 18),
|
||||||
|
('Amercian', 'georage', 22);
|
||||||
|
|
||||||
|
EXECUTE
|
||||||
|
INSERT OverWrite country_page_view
|
||||||
|
VALUES ('Chinese', 'mumiao', 18),
|
||||||
|
('Amercian', 'georage', 22);
|
||||||
|
|
||||||
|
EXECUTE
|
||||||
|
INSERT INTO country_page_view
|
||||||
|
VALUES ('Chinese', 'mumiao', 18),
|
||||||
|
('Amercian', 'georage', 22);
|
||||||
|
|
||||||
|
INSERT INTO catalog1.db1.country_page_view
|
||||||
|
VALUES ('Chinese', 'mumiao', 18),
|
||||||
|
('Amercian', 'georage', 22);
|
@ -1,83 +1,29 @@
|
|||||||
import FlinkSQL from "../../../../src/parser/flinksql";
|
import FlinkSQL from '../../../../src/parser/flinksql';
|
||||||
|
import { readSQL } from '../../../helper';
|
||||||
|
|
||||||
describe('FlinkSQL Create Table Syntax Tests', () => {
|
|
||||||
const parser = new FlinkSQL();
|
|
||||||
// insert statements
|
|
||||||
test('Test one simple Insert Statement', () => {
|
|
||||||
const sql = `
|
|
||||||
INSERT INTO country_page_view
|
|
||||||
SELECT user, cnt FROM page_view_source;
|
|
||||||
`;
|
|
||||||
const result = parser.validate(sql);
|
|
||||||
expect(result.length).toBe(0);
|
|
||||||
});
|
|
||||||
test('Test Insert Overwrite Statement', () => {
|
|
||||||
const sql = `
|
|
||||||
INSERT OVERWRITE country_page_view PARTITION (date='2019-8-30', country='China')
|
|
||||||
SELECT user, cnt FROM page_view_source;
|
|
||||||
`;
|
|
||||||
const result = parser.validate(sql);
|
|
||||||
expect(result.length).toBe(0);
|
|
||||||
});
|
|
||||||
test('Test execute Statement', () => {
|
|
||||||
const sql = `
|
|
||||||
EXECUTE INSERT INTO country_page_view PARTITION (date='2019-8-30', country='China')
|
|
||||||
SELECT user, cnt FROM page_view_source;
|
|
||||||
`;
|
|
||||||
const result = parser.validate(sql);
|
|
||||||
expect(result.length).toBe(0);
|
|
||||||
});
|
|
||||||
test('Test Partition Clause Statement', () => {
|
|
||||||
const sql = `
|
|
||||||
INSERT INTO country_page_view PARTITION (date='2019-8-30', country='China')
|
|
||||||
SELECT user, cnt FROM page_view_source;
|
|
||||||
`;
|
|
||||||
const result = parser.validate(sql);
|
|
||||||
expect(result.length).toBe(0);
|
|
||||||
});
|
|
||||||
test('Test Column List Statement', () => {
|
|
||||||
const sql = `
|
|
||||||
INSERT INTO emps PARTITION (x='ab', y='bc') (x, y) SELECT * FROM emps;
|
|
||||||
`;
|
|
||||||
const result = parser.validate(sql);
|
|
||||||
expect(result.length).toBe(0);
|
|
||||||
});
|
|
||||||
test('Test Insert Values Statement', () => {
|
|
||||||
const sql = `
|
|
||||||
INSERT INTO students
|
|
||||||
VALUES ('fred flintstone', 35, 1.28), ('barney rubble', 32, 2.32);
|
|
||||||
`;
|
|
||||||
const result = parser.validate(sql);
|
|
||||||
expect(result.length).toBe(0);
|
|
||||||
});
|
|
||||||
test('Test insert into multiple tables Statement for 1.14', () => {
|
|
||||||
const sql = `
|
|
||||||
BEGIN STATEMENT SET;
|
|
||||||
|
|
||||||
INSERT INTO pageviews
|
const parser = new FlinkSQL();
|
||||||
SELECT page_id, count(1)
|
|
||||||
FROM pageviews
|
|
||||||
GROUP BY page_id;
|
|
||||||
|
|
||||||
INSERT INTO uniqueview
|
const features = {
|
||||||
SELECT page_id, count(distinct user_id)
|
InsertFromSelectQueries: readSQL(__dirname, 'insertFromSelectQueries.sql'),
|
||||||
FROM pageviews
|
InsertValuesIntoTable: readSQL(__dirname, 'insertValuesIntoTable.sql'),
|
||||||
GROUP BY page_id;
|
InsertMultipleTable: readSQL(__dirname, 'insertMultipleTable.sql')
|
||||||
|
};
|
||||||
|
|
||||||
END;
|
describe('FlinkSQL Insert Syntax Tests', () => {
|
||||||
`;
|
features.InsertFromSelectQueries.forEach((insertFromSelectQueries) => {
|
||||||
const result = parser.validate(sql);
|
it(insertFromSelectQueries, () => {
|
||||||
expect(result.length).toBe(0);
|
expect(parser.validate(insertFromSelectQueries).length).toBe(0);
|
||||||
|
});
|
||||||
});
|
});
|
||||||
test('Test insert into multiple tables Statement for 1.15', () => {
|
features.InsertValuesIntoTable.forEach((insertValuesIntoTable) => {
|
||||||
const sql = `
|
it(insertValuesIntoTable, () => {
|
||||||
EXECUTE STATEMENT SET
|
expect(parser.validate(insertValuesIntoTable).length).toBe(0);
|
||||||
BEGIN
|
});
|
||||||
INSERT INTO students VALUES ('fred flintstone', 35, 1.28), ('barney rubble', 32, 2.32);
|
});
|
||||||
INSERT INTO students VALUES ('fred flintstone', 35, 1.28), ('barney rubble', 32, 2.32);
|
features.InsertMultipleTable.forEach((insertMultipleTable) => {
|
||||||
END;
|
it(insertMultipleTable, () => {
|
||||||
`;
|
expect(parser.validate(insertMultipleTable).length).toBe(0);
|
||||||
const result = parser.validate(sql);
|
});
|
||||||
expect(result.length).toBe(0);
|
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
Loading…
Reference in New Issue
Block a user