refactor: standard naming (#278)
* refactor: rename flinksql to flink * refactor: rename pgsql to postgresql * refactor: rename trinosql to trino * refactor: replace all default exports with named export * refactor: rename basicParser to basicSQL * refactor: rename basic-parser-types to types * refactor: replace arrow func with plain func
This commit is contained in:
@ -2,8 +2,8 @@ import { ParseTreeListener } from 'antlr4ng';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { HiveSqlParserListener } from 'src/lib/hive/HiveSqlParserListener';
|
||||
import { EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import HiveSQL, { HiveEntityCollector } from 'src/parser/hive';
|
||||
import { EntityContextType } from 'src/parser/common/types';
|
||||
import { HiveSQL, HiveEntityCollector } from 'src/parser/hive';
|
||||
import { HiveSqlSplitListener } from 'src/parser/hive/hiveSplitListener';
|
||||
import { StmtContextType } from 'src/parser/common/entityCollector';
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
import HiveSQL, { HiveSqlSplitListener } from 'src/parser/hive';
|
||||
import { HiveSQL, HiveSqlSplitListener } from 'src/parser/hive';
|
||||
import { HiveSqlParserListener } from 'src/lib';
|
||||
|
||||
const validSQL1 = `INSERT INTO country_page_view
|
||||
|
@ -1,16 +1,16 @@
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
|
||||
describe('HiveSQL Lexer tests', () => {
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
test('select token counts', () => {
|
||||
const sql = 'SELECT * FROM t1';
|
||||
const tokens = parser.getAllTokens(sql);
|
||||
const tokens = hive.getAllTokens(sql);
|
||||
expect(tokens.length).toBe(7);
|
||||
});
|
||||
|
||||
test('select token counts', () => {
|
||||
const sql = 'show create table_name;';
|
||||
const tokens = parser.getAllTokens(sql);
|
||||
const tokens = hive.getAllTokens(sql);
|
||||
expect(tokens.length).toBe(6);
|
||||
});
|
||||
});
|
||||
|
@ -1,14 +1,14 @@
|
||||
import { ParseTreeListener } from 'antlr4ng';
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { ProgramContext, SelectItemContext } from 'src/lib/hive/HiveSqlParser';
|
||||
import { HiveSqlParserListener } from 'src/lib/hive/HiveSqlParserListener';
|
||||
|
||||
describe('HiveSQL Listener Tests', () => {
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
test('Listener enterSelectList', async () => {
|
||||
const expectTableName = 'username';
|
||||
const sql = `select ${expectTableName} from tablename where inc_day='20190601' limit 1000;`;
|
||||
const parseTree = parser.parse(sql);
|
||||
const parseTree = hive.parse(sql);
|
||||
|
||||
let result = '';
|
||||
class MyListener implements HiveSqlParserListener {
|
||||
@ -22,12 +22,12 @@ describe('HiveSQL Listener Tests', () => {
|
||||
}
|
||||
const listenTableName = new MyListener();
|
||||
|
||||
await parser.listen(listenTableName as ParseTreeListener, parseTree as ProgramContext);
|
||||
await hive.listen(listenTableName as ParseTreeListener, parseTree as ProgramContext);
|
||||
expect(result).toBe(expectTableName);
|
||||
});
|
||||
test('Listener enterCreateTable', async () => {
|
||||
const sql = `drop table table_name;`;
|
||||
const parseTree = parser.parse(sql);
|
||||
const parseTree = hive.parse(sql);
|
||||
let result = '';
|
||||
class MyListener implements HiveSqlParserListener {
|
||||
enterDropTableStatement(ctx) {
|
||||
@ -41,7 +41,7 @@ describe('HiveSQL Listener Tests', () => {
|
||||
}
|
||||
const listenTableName = new MyListener();
|
||||
|
||||
await parser.listen(listenTableName as ParseTreeListener, parseTree as ProgramContext);
|
||||
await hive.listen(listenTableName as ParseTreeListener, parseTree as ProgramContext);
|
||||
expect(result).toBe('droptabletable_name');
|
||||
});
|
||||
|
||||
@ -67,7 +67,7 @@ describe('HiveSQL Listener Tests', () => {
|
||||
key_value_pair;`,
|
||||
];
|
||||
const sql = singleStatementArr.join('\n');
|
||||
const sqlSlices = parser.splitSQLByStatement(sql);
|
||||
const sqlSlices = hive.splitSQLByStatement(sql);
|
||||
|
||||
expect(sqlSlices).not.toBeNull();
|
||||
|
||||
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/types';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||
@ -9,14 +9,14 @@ const syntaxSql = fs.readFileSync(
|
||||
);
|
||||
|
||||
describe('HiveSQL Multiple Statements Syntax Suggestion', () => {
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
test('Select from table ', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 1,
|
||||
column: 15,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
@ -30,7 +30,7 @@ describe('HiveSQL Multiple Statements Syntax Suggestion', () => {
|
||||
lineNumber: 9,
|
||||
column: 17,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE_CREATE
|
||||
);
|
||||
@ -44,7 +44,7 @@ describe('HiveSQL Multiple Statements Syntax Suggestion', () => {
|
||||
lineNumber: 15,
|
||||
column: 13,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
@ -58,7 +58,7 @@ describe('HiveSQL Multiple Statements Syntax Suggestion', () => {
|
||||
lineNumber: 21,
|
||||
column: 75,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === EntityContextType.TABLE
|
||||
);
|
||||
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
|
@ -1,7 +1,7 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/basic-parser-types';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { CaretPosition, EntityContextType } from 'src/parser/common/types';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
@ -10,12 +10,12 @@ const syntaxSql = fs.readFileSync(
|
||||
);
|
||||
|
||||
describe('Hive SQL Syntax Suggestion', () => {
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
test('Validate Syntax SQL', () => {
|
||||
expect(parser.validate(syntaxSql).length).not.toBe(0);
|
||||
expect(parser.validate(syntaxSql).length).not.toBe(0);
|
||||
expect(parser.validate(syntaxSql).length).not.toBe(0);
|
||||
expect(hive.validate(syntaxSql).length).not.toBe(0);
|
||||
expect(hive.validate(syntaxSql).length).not.toBe(0);
|
||||
expect(hive.validate(syntaxSql).length).not.toBe(0);
|
||||
});
|
||||
|
||||
test('Insert table ', () => {
|
||||
@ -23,7 +23,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 1,
|
||||
column: 18,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
@ -40,7 +40,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 3,
|
||||
column: 18,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
@ -57,7 +57,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 5,
|
||||
column: 17,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
@ -74,7 +74,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 7,
|
||||
column: 26,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
@ -91,7 +91,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 9,
|
||||
column: 28,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
@ -108,7 +108,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 11,
|
||||
column: 15,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
@ -125,7 +125,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 13,
|
||||
column: 20,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
@ -142,7 +142,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 15,
|
||||
column: 27,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
@ -159,7 +159,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 17,
|
||||
column: 19,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
@ -176,7 +176,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 19,
|
||||
column: 26,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
@ -193,7 +193,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 23,
|
||||
column: 8,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
@ -210,7 +210,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 25,
|
||||
column: 18,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
@ -227,7 +227,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 27,
|
||||
column: 32,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
@ -244,7 +244,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 29,
|
||||
column: 31,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
@ -261,7 +261,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 31,
|
||||
column: 115,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
@ -282,7 +282,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 33,
|
||||
column: 31,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
@ -299,7 +299,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 35,
|
||||
column: 45,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
@ -316,7 +316,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 37,
|
||||
column: 18,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
@ -333,7 +333,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 39,
|
||||
column: 14,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
@ -350,7 +350,7 @@ describe('Hive SQL Syntax Suggestion', () => {
|
||||
lineNumber: 41,
|
||||
column: 19,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(
|
||||
const syntaxes = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(syntaxSql, pos.lineNumber),
|
||||
pos
|
||||
)?.syntax;
|
||||
|
@ -1,20 +1,20 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { commentOtherLine } from 'test/helper';
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { CaretPosition } from 'src/parser/common/basic-parser-types';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { CaretPosition } from 'src/parser/common/types';
|
||||
|
||||
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
|
||||
|
||||
describe('Hive SQL Token Suggestion', () => {
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
test('After ALTER', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 1,
|
||||
column: 7,
|
||||
};
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(
|
||||
const suggestion = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
@ -41,7 +41,7 @@ describe('Hive SQL Token Suggestion', () => {
|
||||
lineNumber: 3,
|
||||
column: 8,
|
||||
};
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(
|
||||
const suggestion = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
@ -76,7 +76,7 @@ describe('Hive SQL Token Suggestion', () => {
|
||||
lineNumber: 5,
|
||||
column: 8,
|
||||
};
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(
|
||||
const suggestion = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
@ -88,7 +88,7 @@ describe('Hive SQL Token Suggestion', () => {
|
||||
lineNumber: 7,
|
||||
column: 10,
|
||||
};
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(
|
||||
const suggestion = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
@ -107,7 +107,7 @@ describe('Hive SQL Token Suggestion', () => {
|
||||
lineNumber: 9,
|
||||
column: 6,
|
||||
};
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(
|
||||
const suggestion = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
@ -137,7 +137,7 @@ describe('Hive SQL Token Suggestion', () => {
|
||||
lineNumber: 11,
|
||||
column: 8,
|
||||
};
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(
|
||||
const suggestion = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
@ -149,7 +149,7 @@ describe('Hive SQL Token Suggestion', () => {
|
||||
lineNumber: 13,
|
||||
column: 8,
|
||||
};
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(
|
||||
const suggestion = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
@ -161,7 +161,7 @@ describe('Hive SQL Token Suggestion', () => {
|
||||
lineNumber: 15,
|
||||
column: 8,
|
||||
};
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(
|
||||
const suggestion = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
@ -173,7 +173,7 @@ describe('Hive SQL Token Suggestion', () => {
|
||||
lineNumber: 17,
|
||||
column: 6,
|
||||
};
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(
|
||||
const suggestion = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
@ -185,7 +185,7 @@ describe('Hive SQL Token Suggestion', () => {
|
||||
lineNumber: 19,
|
||||
column: 6,
|
||||
};
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(
|
||||
const suggestion = hive.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
|
@ -1,7 +1,7 @@
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
const features = {
|
||||
aborts: readSQL(__dirname, 'abort.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('HiveSQL Abort Syntax Tests', () => {
|
||||
features.aborts.forEach((ab) => {
|
||||
it(ab, () => {
|
||||
expect(parser.validate(ab).length).toBe(0);
|
||||
expect(hive.validate(ab).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
const features = {
|
||||
databases: readSQL(__dirname, 'alterDatabase.sql'),
|
||||
@ -16,7 +16,7 @@ describe('HiveSQL Alter Syntax Tests', () => {
|
||||
describe('ALTER DATABASE', () => {
|
||||
features.databases.forEach((db) => {
|
||||
it(db, () => {
|
||||
expect(parser.validate(db).length).toBe(0);
|
||||
expect(hive.validate(db).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -24,7 +24,7 @@ describe('HiveSQL Alter Syntax Tests', () => {
|
||||
describe('ALTER CONNECTOR', () => {
|
||||
features.connectors.forEach((ctors) => {
|
||||
it(ctors, () => {
|
||||
expect(parser.validate(ctors).length).toBe(0);
|
||||
expect(hive.validate(ctors).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -32,7 +32,7 @@ describe('HiveSQL Alter Syntax Tests', () => {
|
||||
describe('ALTER TABLE', () => {
|
||||
features.tables.forEach((tb) => {
|
||||
it(tb, () => {
|
||||
expect(parser.validate(tb).length).toBe(0);
|
||||
expect(hive.validate(tb).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -40,7 +40,7 @@ describe('HiveSQL Alter Syntax Tests', () => {
|
||||
describe('ALTER INDEX', () => {
|
||||
features.indexes.forEach((index) => {
|
||||
it(index, () => {
|
||||
expect(parser.validate(index).length).toBe(0);
|
||||
expect(hive.validate(index).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -48,7 +48,7 @@ describe('HiveSQL Alter Syntax Tests', () => {
|
||||
describe('ALTER VIEW', () => {
|
||||
features.views.forEach((view) => {
|
||||
it(view, () => {
|
||||
expect(parser.validate(view).length).toBe(0);
|
||||
expect(hive.validate(view).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -56,7 +56,7 @@ describe('HiveSQL Alter Syntax Tests', () => {
|
||||
describe('ALTER SCHEDULE QUERY', () => {
|
||||
features.scheduleQueries.forEach((sq) => {
|
||||
it(sq, () => {
|
||||
expect(parser.validate(sq).length).toBe(0);
|
||||
expect(hive.validate(sq).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
const features = {
|
||||
manageRoles: readSQL(__dirname, 'authorization.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('HiveSQL Related To Authorization Tests', () => {
|
||||
features.manageRoles.forEach((manageRole) => {
|
||||
it(manageRole, () => {
|
||||
expect(parser.validate(manageRole).length).toBe(0);
|
||||
expect(hive.validate(manageRole).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
const features = {
|
||||
databases: readSQL(__dirname, 'createDatabase.sql'),
|
||||
@ -19,7 +19,7 @@ describe('HiveSQL Create Syntax Tests', () => {
|
||||
describe('CREATE DATABASE', () => {
|
||||
features.databases.forEach((database) => {
|
||||
it(database, () => {
|
||||
expect(parser.validate(database).length).toBe(0);
|
||||
expect(hive.validate(database).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -27,7 +27,7 @@ describe('HiveSQL Create Syntax Tests', () => {
|
||||
describe('CREATE TABLE', () => {
|
||||
features.tables.forEach((table) => {
|
||||
it(table, () => {
|
||||
expect(parser.validate(table).length).toBe(0);
|
||||
expect(hive.validate(table).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -35,7 +35,7 @@ describe('HiveSQL Create Syntax Tests', () => {
|
||||
describe('CREATE VIEW', () => {
|
||||
features.views.forEach((view) => {
|
||||
it(view, () => {
|
||||
expect(parser.validate(view).length).toBe(0);
|
||||
expect(hive.validate(view).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -43,7 +43,7 @@ describe('HiveSQL Create Syntax Tests', () => {
|
||||
describe('CREATE FUNCTION', () => {
|
||||
features.functions.forEach((func) => {
|
||||
it(func, () => {
|
||||
expect(parser.validate(func).length).toBe(0);
|
||||
expect(hive.validate(func).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -51,7 +51,7 @@ describe('HiveSQL Create Syntax Tests', () => {
|
||||
describe('CREATE ROLE', () => {
|
||||
features.roles.forEach((role) => {
|
||||
it(role, () => {
|
||||
expect(parser.validate(role).length).toBe(0);
|
||||
expect(hive.validate(role).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -59,7 +59,7 @@ describe('HiveSQL Create Syntax Tests', () => {
|
||||
describe('CREATE INDEX', () => {
|
||||
features.indexes.forEach((index) => {
|
||||
it(index, () => {
|
||||
expect(parser.validate(index).length).toBe(0);
|
||||
expect(hive.validate(index).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -67,7 +67,7 @@ describe('HiveSQL Create Syntax Tests', () => {
|
||||
describe('CREATE MACRO', () => {
|
||||
features.macros.forEach((macro) => {
|
||||
it(macro, () => {
|
||||
expect(parser.validate(macro).length).toBe(0);
|
||||
expect(hive.validate(macro).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -75,7 +75,7 @@ describe('HiveSQL Create Syntax Tests', () => {
|
||||
describe('CREATE CONNECTOR', () => {
|
||||
features.connectors.forEach((cnctor) => {
|
||||
it(cnctor, () => {
|
||||
expect(parser.validate(cnctor).length).toBe(0);
|
||||
expect(hive.validate(cnctor).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@ -83,7 +83,7 @@ describe('HiveSQL Create Syntax Tests', () => {
|
||||
describe('CREATE SCHEDULE QUERY', () => {
|
||||
features.scheduledQueries.forEach((sq) => {
|
||||
it(sq, () => {
|
||||
expect(parser.validate(sq).length).toBe(0);
|
||||
expect(hive.validate(sq).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
const features = {
|
||||
dataTypes: readSQL(__dirname, 'dataTypes.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('HiveSQL Check Data Types Tests', () => {
|
||||
features.dataTypes.forEach((dataType) => {
|
||||
it(dataType, () => {
|
||||
expect(parser.validate(dataType).length).toBe(0);
|
||||
expect(hive.validate(dataType).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
const features = {
|
||||
deletes: readSQL(__dirname, 'delete.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('HiveSQL Delete Syntax Tests', () => {
|
||||
features.deletes.forEach((del) => {
|
||||
it(del, () => {
|
||||
expect(parser.validate(del).length).toBe(0);
|
||||
expect(hive.validate(del).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
const features = {
|
||||
desc: readSQL(__dirname, 'describe.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('HiveSQL Describe Syntax Tests', () => {
|
||||
features.desc.forEach((des) => {
|
||||
it(des, () => {
|
||||
expect(parser.validate(des).length).toBe(0);
|
||||
expect(hive.validate(des).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
const features = {
|
||||
drops: readSQL(__dirname, 'drop.sql'),
|
||||
@ -11,13 +11,13 @@ const features = {
|
||||
describe('HiveSQL Drop Syntax Tests', () => {
|
||||
features.drops.forEach((drop) => {
|
||||
it(drop, () => {
|
||||
expect(parser.validate(drop).length).toBe(0);
|
||||
expect(hive.validate(drop).length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
features.reloads.forEach((reload) => {
|
||||
it(reload, () => {
|
||||
expect(parser.validate(reload).length).toBe(0);
|
||||
expect(hive.validate(reload).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,4 +1,4 @@
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const features = {
|
||||
@ -6,11 +6,11 @@ const features = {
|
||||
};
|
||||
|
||||
describe('HiveSQL Export Syntax Tests', () => {
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
features.exports.forEach((exp) => {
|
||||
it(exp, () => {
|
||||
expect(parser.validate(exp).length).toBe(0);
|
||||
expect(hive.validate(exp).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,4 +1,4 @@
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const features = {
|
||||
@ -6,11 +6,11 @@ const features = {
|
||||
};
|
||||
|
||||
describe('HiveSQL Import Syntax Tests', () => {
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
features.imports.forEach((imp) => {
|
||||
it(imp, () => {
|
||||
expect(parser.validate(imp).length).toBe(0);
|
||||
expect(hive.validate(imp).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
const features = {
|
||||
insertFromQueries: readSQL(__dirname, 'insertFromQuery.sql'),
|
||||
@ -11,13 +11,13 @@ const features = {
|
||||
describe('HiveSQL Insert Syntax Tests', () => {
|
||||
features.insertFromQueries.forEach((ifq) => {
|
||||
it(ifq, () => {
|
||||
expect(parser.validate(ifq).length).toBe(0);
|
||||
expect(hive.validate(ifq).length).toBe(0);
|
||||
});
|
||||
});
|
||||
|
||||
features.insertFromValues.forEach((ifv) => {
|
||||
it(ifv, () => {
|
||||
expect(parser.validate(ifv).length).toBe(0);
|
||||
expect(hive.validate(ifv).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
const features = {
|
||||
loads: readSQL(__dirname, 'load.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('HiveSQL Load Syntax Tests', () => {
|
||||
features.loads.forEach((load) => {
|
||||
it(load, () => {
|
||||
expect(parser.validate(load).length).toBe(0);
|
||||
expect(hive.validate(load).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
const features = {
|
||||
merges: readSQL(__dirname, 'merge.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('HiveSQL Merge Syntax Tests', () => {
|
||||
features.merges.forEach((merge) => {
|
||||
it(merge, () => {
|
||||
expect(parser.validate(merge).length).toBe(0);
|
||||
expect(hive.validate(merge).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
const features = {
|
||||
selects: readSQL(__dirname, 'select.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('HiveSQL Select Syntax Tests', () => {
|
||||
features.selects.forEach((select) => {
|
||||
it(select, () => {
|
||||
expect(parser.validate(select).length).toBe(0);
|
||||
expect(hive.validate(select).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
const features = {
|
||||
shows: readSQL(__dirname, 'show.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('HiveSQL Show Syntax Tests', () => {
|
||||
features.shows.forEach((show) => {
|
||||
it(show, () => {
|
||||
expect(parser.validate(show).length).toBe(0);
|
||||
expect(hive.validate(show).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { readSQL } from 'test/helper';
|
||||
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
const features = {
|
||||
updates: readSQL(__dirname, 'update.sql'),
|
||||
@ -10,7 +10,7 @@ const features = {
|
||||
describe('HiveSQL Update Syntax Tests', () => {
|
||||
features.updates.forEach((update) => {
|
||||
it(update, () => {
|
||||
expect(parser.validate(update).length).toBe(0);
|
||||
expect(hive.validate(update).length).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -1,16 +1,16 @@
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
|
||||
const randomText = `dhsdansdnkla ndjnsla ndnalks`;
|
||||
const unCompleteSQL = `CREATE TABLE`;
|
||||
|
||||
describe('Hive SQL validate invalid sql', () => {
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
test('validate random text', () => {
|
||||
expect(parser.validate(randomText).length).not.toBe(0);
|
||||
expect(hive.validate(randomText).length).not.toBe(0);
|
||||
});
|
||||
|
||||
test('validate unComplete sql', () => {
|
||||
expect(parser.validate(unCompleteSQL).length).not.toBe(0);
|
||||
expect(hive.validate(unCompleteSQL).length).not.toBe(0);
|
||||
});
|
||||
});
|
||||
|
@ -1,15 +1,15 @@
|
||||
import { AbstractParseTreeVisitor } from 'antlr4ng';
|
||||
|
||||
import HiveSQL from 'src/parser/hive';
|
||||
import { HiveSQL } from 'src/parser/hive';
|
||||
import { HiveSqlParserVisitor } from 'src/lib/hive/HiveSqlParserVisitor';
|
||||
import { ProgramContext, TableNameContext } from 'src/lib/hive/HiveSqlParser';
|
||||
|
||||
describe('HiveSQL Visitor Tests', () => {
|
||||
const expectTableName = 'dm_gis.dlv_addr_tc_count';
|
||||
const sql = `select citycode,tc,inc_day from ${expectTableName} where inc_day='20190501' limit 100;`;
|
||||
const parser = new HiveSQL();
|
||||
const hive = new HiveSQL();
|
||||
|
||||
const parseTree = parser.parse(sql, (error) => {
|
||||
const parseTree = hive.parse(sql, (error) => {
|
||||
console.error('Parse error:', error);
|
||||
});
|
||||
|
||||
|
Reference in New Issue
Block a user