Test/hive dml (#155)
* feat: add showIndex parser rule * test: uncomment show index test cases * test: add unit tests about DML syntax to HiveSQL * test: add unit tests about export and import syntax to HiveSQL * refactor: recompile hive grammar * test: correct description of HiveSQL unit tests
This commit is contained in:
parent
6dad62ddf4
commit
885b85e842
@ -346,6 +346,7 @@ showStatement
|
|||||||
| KW_SHOW KW_CONF StringLiteral
|
| KW_SHOW KW_CONF StringLiteral
|
||||||
| KW_SHOW KW_RESOURCE (KW_PLAN rp_name=id_ | KW_PLANS)
|
| KW_SHOW KW_RESOURCE (KW_PLAN rp_name=id_ | KW_PLANS)
|
||||||
| KW_SHOW KW_DATACONNECTORS
|
| KW_SHOW KW_DATACONNECTORS
|
||||||
|
| KW_SHOW KW_FORMATTED? ( KW_INDEX | KW_INDEXES ) KW_ON tableName (from_in id_)?
|
||||||
;
|
;
|
||||||
|
|
||||||
showTablesFilterExpr
|
showTablesFilterExpr
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Generated from /Users/xuxiaoqi/Documents/dt-sql-parser-copy/src/grammar/hive/HiveSqlLexer.g4 by ANTLR 4.9.0-SNAPSHOT
|
// Generated from /Users/hayden/Desktop/dt-works/dt-sql-parser/src/grammar/hive/HiveSqlLexer.g4 by ANTLR 4.9.0-SNAPSHOT
|
||||||
|
|
||||||
|
|
||||||
import { ATN } from "antlr4ts/atn/ATN";
|
import { ATN } from "antlr4ts/atn/ATN";
|
||||||
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -1,4 +1,4 @@
|
|||||||
// Generated from /Users/xuxiaoqi/Documents/dt-sql-parser-copy/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.9.0-SNAPSHOT
|
// Generated from /Users/hayden/Desktop/dt-works/dt-sql-parser/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.9.0-SNAPSHOT
|
||||||
|
|
||||||
|
|
||||||
import { ParseTreeListener } from "antlr4ts/tree/ParseTreeListener";
|
import { ParseTreeListener } from "antlr4ts/tree/ParseTreeListener";
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// Generated from /Users/xuxiaoqi/Documents/dt-sql-parser-copy/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.9.0-SNAPSHOT
|
// Generated from /Users/hayden/Desktop/dt-works/dt-sql-parser/src/grammar/hive/HiveSqlParser.g4 by ANTLR 4.9.0-SNAPSHOT
|
||||||
|
|
||||||
|
|
||||||
import { ParseTreeVisitor } from "antlr4ts/tree/ParseTreeVisitor";
|
import { ParseTreeVisitor } from "antlr4ts/tree/ParseTreeVisitor";
|
||||||
|
@ -1,8 +1,9 @@
|
|||||||
import { HiveSqlListener } from '../../../src/lib/hive/HiveSqlListener';
|
import { ProgramContext } from '../../../src/lib/hive/HiveSqlParser';
|
||||||
|
import { HiveSqlParserListener } from '../../../src/lib/hive/HiveSqlParserListener';
|
||||||
import HiveSQL from '../../../src/parser/hive';
|
import HiveSQL from '../../../src/parser/hive';
|
||||||
|
|
||||||
|
|
||||||
describe('Hive SQL Listener Tests', () => {
|
describe('HiveSQL Listener Tests', () => {
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
test('Listener enterSelectList', async () => {
|
test('Listener enterSelectList', async () => {
|
||||||
const expectTableName = 'userName';
|
const expectTableName = 'userName';
|
||||||
@ -10,28 +11,28 @@ describe('Hive SQL Listener Tests', () => {
|
|||||||
const parserTree = parser.parse(sql);
|
const parserTree = parser.parse(sql);
|
||||||
|
|
||||||
let result = '';
|
let result = '';
|
||||||
class MyListener implements HiveSqlListener {
|
class MyListener implements HiveSqlParserListener {
|
||||||
enterSelectItem(ctx) {
|
enterSelectItem(ctx) {
|
||||||
result = ctx.text;
|
result = ctx.text;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const listenTableName: any = new MyListener();
|
const listenTableName: any = new MyListener();
|
||||||
|
|
||||||
await parser.listen(listenTableName, parserTree);
|
await parser.listen(listenTableName, parserTree as ProgramContext);
|
||||||
expect(result).toBe(expectTableName.toUpperCase());
|
expect(result).toBe(expectTableName.toUpperCase());
|
||||||
});
|
});
|
||||||
test('Listener enterCreateTable', async () => {
|
test('Listener enterCreateTable', async () => {
|
||||||
const sql = `drop table table_name;`;
|
const sql = `drop table table_name;`;
|
||||||
const parserTree = parser.parse(sql);
|
const parserTree = parser.parse(sql);
|
||||||
let result = '';
|
let result = '';
|
||||||
class MyListener implements HiveSqlListener {
|
class MyListener implements HiveSqlParserListener {
|
||||||
enterDropTableStatement(ctx) {
|
enterDropTableStatement(ctx) {
|
||||||
result = ctx.text;
|
result = ctx.text;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
const listenTableName: any = new MyListener();
|
const listenTableName: any = new MyListener();
|
||||||
|
|
||||||
await parser.listen(listenTableName, parserTree);
|
await parser.listen(listenTableName, parserTree as ProgramContext);
|
||||||
expect(result).toBe('DROPTABLETABLE_NAME');
|
expect(result).toBe('DROPTABLETABLE_NAME');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import HiveSQL from '../../../src/parser/hive';
|
import HiveSQL from '../../../src/parser/hive';
|
||||||
|
|
||||||
describe('Hive SQL Syntax Tests', () => {
|
describe('HiveSQL Syntax Tests', () => {
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
test('Create Table Statement', () => {
|
test('Create Table Statement', () => {
|
||||||
const sql = 'CREATE TABLE person(name STRING,age INT);';
|
const sql = 'CREATE TABLE person(name STRING,age INT);';
|
||||||
|
@ -7,7 +7,7 @@ const features = {
|
|||||||
aborts: readSQL(__dirname, 'abort.sql'),
|
aborts: readSQL(__dirname, 'abort.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('Hive Abort Syntax Tests', () => {
|
describe('HiveSQL Abort Syntax Tests', () => {
|
||||||
features.aborts.forEach((ab) => {
|
features.aborts.forEach((ab) => {
|
||||||
it(ab, () => {
|
it(ab, () => {
|
||||||
expect(parser.validate(ab).length).toBe(0);
|
expect(parser.validate(ab).length).toBe(0);
|
||||||
|
@ -12,7 +12,7 @@ const features = {
|
|||||||
scheduleQueries: readSQL(__dirname, 'alterScheduleQuery.sql'),
|
scheduleQueries: readSQL(__dirname, 'alterScheduleQuery.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('Hive Alter Syntax Tests', () => {
|
describe('HiveSQL Alter Syntax Tests', () => {
|
||||||
describe('ALTER DATABASE', () => {
|
describe('ALTER DATABASE', () => {
|
||||||
features.databases.forEach((db) => {
|
features.databases.forEach((db) => {
|
||||||
it(db, () => {
|
it(db, () => {
|
||||||
|
@ -15,7 +15,7 @@ const features = {
|
|||||||
scheduledQueries: readSQL(__dirname, 'createScheduledQuery.sql')
|
scheduledQueries: readSQL(__dirname, 'createScheduledQuery.sql')
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('Hive Create Syntax Tests', () => {
|
describe('HiveSQL Create Syntax Tests', () => {
|
||||||
describe('CREATE DATABASE', () => {
|
describe('CREATE DATABASE', () => {
|
||||||
features.databases.forEach((database) => {
|
features.databases.forEach((database) => {
|
||||||
it(database, () => {
|
it(database, () => {
|
||||||
|
16
test/parser/hive/syntax/delete.test.ts
Normal file
16
test/parser/hive/syntax/delete.test.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import HiveSQL from '../../../../src/parser/hive';
|
||||||
|
import { readSQL } from '../../../helper';
|
||||||
|
|
||||||
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
const features = {
|
||||||
|
deletes: readSQL(__dirname, 'delete.sql'),
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('HiveSQL Delete Syntax Tests', () => {
|
||||||
|
features.deletes.forEach((del) => {
|
||||||
|
it(del, () => {
|
||||||
|
expect(parser.validate(del).length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@ -7,7 +7,7 @@ const features = {
|
|||||||
desc: readSQL(__dirname, 'describe.sql'),
|
desc: readSQL(__dirname, 'describe.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('Hive Describe Syntax Tests', () => {
|
describe('HiveSQL Describe Syntax Tests', () => {
|
||||||
features.desc.forEach((des) => {
|
features.desc.forEach((des) => {
|
||||||
it(des, () => {
|
it(des, () => {
|
||||||
expect(parser.validate(des).length).toBe(0);
|
expect(parser.validate(des).length).toBe(0);
|
||||||
|
@ -8,7 +8,7 @@ const features = {
|
|||||||
reloads: readSQL(__dirname, 'reload.sql')
|
reloads: readSQL(__dirname, 'reload.sql')
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('Hive Drop Syntax Tests', () => {
|
describe('HiveSQL Drop Syntax Tests', () => {
|
||||||
features.drops.forEach((drop) => {
|
features.drops.forEach((drop) => {
|
||||||
it(drop, () => {
|
it(drop, () => {
|
||||||
expect(parser.validate(drop).length).toBe(0);
|
expect(parser.validate(drop).length).toBe(0);
|
||||||
|
17
test/parser/hive/syntax/exportStatement.test.ts
Normal file
17
test/parser/hive/syntax/exportStatement.test.ts
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
import HiveSQL from '../../../../src/parser/hive';
|
||||||
|
import { readSQL } from '../../../helper';
|
||||||
|
|
||||||
|
const features = {
|
||||||
|
exports: readSQL(__dirname, 'export.sql')
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('HiveSQL Export Syntax Tests', () => {
|
||||||
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
features.exports.forEach((exp) => {
|
||||||
|
it(exp, () => {
|
||||||
|
expect(parser.validate(exp).length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
3
test/parser/hive/syntax/fixtures/delete.sql
Normal file
3
test/parser/hive/syntax/fixtures/delete.sql
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
DELETE FROM tbl;
|
||||||
|
|
||||||
|
DELETE FROM tbl1 WHERE id = 1;
|
6
test/parser/hive/syntax/fixtures/export.sql
Normal file
6
test/parser/hive/syntax/fixtures/export.sql
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
EXPORT TABLE tbl TO 'hdfs_exports_location/department';
|
||||||
|
|
||||||
|
EXPORT TABLE employee
|
||||||
|
PARTITION(emp_country="in", emp_state="ka")
|
||||||
|
TO 'hdfs_exports_location/employee'
|
||||||
|
FOR REPLICATION('eventid');
|
8
test/parser/hive/syntax/fixtures/import.sql
Normal file
8
test/parser/hive/syntax/fixtures/import.sql
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
IMPORT FROM 'hdfs_exports_location/department';
|
||||||
|
|
||||||
|
IMPORT TABLE employee PARTITION (emp_country="us", emp_state="tn") FROM 'hdfs_exports_location/employee';
|
||||||
|
|
||||||
|
IMPORT EXTERNAL TABLE employee
|
||||||
|
PARTITION (emp_country="us", emp_state="tn")
|
||||||
|
FROM 'hdfs_exports_location/employee'
|
||||||
|
LOCATION 'import_target_path';
|
11
test/parser/hive/syntax/fixtures/insertFormValues.sql
Normal file
11
test/parser/hive/syntax/fixtures/insertFormValues.sql
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
INSERT INTO TABLE students
|
||||||
|
VALUES ('fred flintstone', 35, 1.28), ('barney rubble', 32, 2.32);
|
||||||
|
|
||||||
|
INSERT INTO TABLE pageviews PARTITION (datestamp = '2014-09-23')
|
||||||
|
VALUES ('jsmith', 'mail.com', 'sports.com'), ('jdoe', 'mail.com', null);
|
||||||
|
|
||||||
|
INSERT INTO TABLE pageviews PARTITION (datestamp)
|
||||||
|
VALUES ('tjohnson', 'sports.com', 'finance.com', '2014-09-23'), ('tlee', 'finance.com', null, '2014-09-21');
|
||||||
|
|
||||||
|
INSERT INTO TABLE pageviews
|
||||||
|
VALUES ('tjohnson', 'sports.com', 'finance.com', '2014-09-23'), ('tlee', 'finance.com', null, '2014-09-21');
|
79
test/parser/hive/syntax/fixtures/insertFromQuery.sql
Normal file
79
test/parser/hive/syntax/fixtures/insertFromQuery.sql
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
-- Inserting data into Hive Tables from queries
|
||||||
|
INSERT INTO table_name PARTITION (partition_col)
|
||||||
|
SELECT col1, col2, partition_col
|
||||||
|
FROM source_table;
|
||||||
|
|
||||||
|
FROM source_table
|
||||||
|
INSERT OVERWRITE TABLE table_name PARTITION (partition_col='value1')
|
||||||
|
SELECT col1, col2
|
||||||
|
WHERE partition_col = 'value1'
|
||||||
|
INSERT INTO TABLE table_name PARTITION (partition_col='value2')
|
||||||
|
SELECT col1, col2
|
||||||
|
WHERE partition_col = 'value2';
|
||||||
|
|
||||||
|
FROM page_view_stg pvs
|
||||||
|
INSERT OVERWRITE TABLE page_view PARTITION(dt='2008-06-08', country)
|
||||||
|
SELECT pvs.viewTime, pvs.userid, pvs.page_url, pvs.referrer_url, null, null, pvs.ip, pvs.cnt;
|
||||||
|
|
||||||
|
|
||||||
|
-- Writing data into the filesystem from queries
|
||||||
|
INSERT OVERWRITE LOCAL DIRECTORY '/path/to/output'
|
||||||
|
SELECT col1, col2
|
||||||
|
FROM table_name;
|
||||||
|
|
||||||
|
INSERT OVERWRITE DIRECTORY '/path/to/output'
|
||||||
|
STORED AS PARQUET
|
||||||
|
SELECT col1, col2
|
||||||
|
FROM table_name;
|
||||||
|
|
||||||
|
INSERT INTO table_name PARTITION (year, month, day)
|
||||||
|
SELECT col1, col2,
|
||||||
|
CASE
|
||||||
|
WHEN month = 'January' THEN 2023
|
||||||
|
WHEN month = 'February' THEN 2023
|
||||||
|
ELSE 2024
|
||||||
|
END AS year,
|
||||||
|
CASE
|
||||||
|
WHEN month = 'January' THEN 1
|
||||||
|
WHEN month = 'February' THEN 2
|
||||||
|
ELSE 3
|
||||||
|
END AS month,
|
||||||
|
CAST(day AS int) AS day
|
||||||
|
FROM source_table;
|
||||||
|
|
||||||
|
INSERT INTO table_name PARTITION (country, state)
|
||||||
|
SELECT col1, col2,
|
||||||
|
CONCAT(country, '_', state) AS country_state
|
||||||
|
FROM source_table;
|
||||||
|
|
||||||
|
INSERT INTO table_name PARTITION (country, state)
|
||||||
|
SELECT col1, col2, country, state
|
||||||
|
FROM (
|
||||||
|
SELECT col1, col2,
|
||||||
|
CASE
|
||||||
|
WHEN country = 'USA' THEN 'United States'
|
||||||
|
ELSE country
|
||||||
|
END AS country,
|
||||||
|
CASE
|
||||||
|
WHEN country = 'USA' THEN 'NA'
|
||||||
|
ELSE state
|
||||||
|
END AS state
|
||||||
|
FROM source_table
|
||||||
|
) subquery;
|
||||||
|
|
||||||
|
INSERT OVERWRITE LOCAL DIRECTORY '/path/to/output'
|
||||||
|
ROW FORMAT DELIMITED
|
||||||
|
FIELDS TERMINATED BY ','
|
||||||
|
ESCAPED BY '^'
|
||||||
|
COLLECTION ITEMS TERMINATED BY '.'
|
||||||
|
MAP KEYS TERMINATED BY ':'
|
||||||
|
LINES TERMINATED BY 'n'
|
||||||
|
NULL DEFINED AS 'x'
|
||||||
|
SELECT col1, col2
|
||||||
|
FROM table_name;
|
||||||
|
|
||||||
|
INSERT OVERWRITE LOCAL DIRECTORY '/path/to/output'
|
||||||
|
ROW FORMAT SERDE 'org.apache.hadoop.hive.serde2.avro.AvroSerDe'
|
||||||
|
STORED AS AVRO
|
||||||
|
SELECT col1, col2
|
||||||
|
FROM table_name;
|
7
test/parser/hive/syntax/fixtures/load.sql
Normal file
7
test/parser/hive/syntax/fixtures/load.sql
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
LOAD DATA INPATH 'hdfs://namenode:9000/user/hive/project/data1' INTO TABLE tbl1;
|
||||||
|
|
||||||
|
LOAD DATA LOCAL INPATH '/user/hive/project/data1'
|
||||||
|
OVERWRITE INTO TABLE tablename
|
||||||
|
PARTITION (pt1=1, pt2=2)
|
||||||
|
INPUTFORMAT 'com.apache.hadoop.textInputFormat'
|
||||||
|
SERDE 'JsonSerDe';
|
10
test/parser/hive/syntax/fixtures/merge.sql
Normal file
10
test/parser/hive/syntax/fixtures/merge.sql
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
MERGE INTO demo.a AS T1 USING demo.b AS T2
|
||||||
|
ON T1.id = T2.id
|
||||||
|
WHEN MATCHED THEN UPDATE SET name = 'wf1'
|
||||||
|
WHEN NOT MATCHED THEN INSERT VALUES(T2.id,T2.name);
|
||||||
|
|
||||||
|
MERGE INTO demo.a AS T1 USING demo.b AS T2
|
||||||
|
ON T1.id = T2.id
|
||||||
|
WHEN MATCHED AND sex='male' THEN UPDATE SET name = 'wf1'
|
||||||
|
WHEN MATCHED AND sex='female' THEN UPDATE SET age = 10
|
||||||
|
WHEN NOT MATCHED AND age>10 THEN INSERT VALUES(T2.id,T2.name);
|
@ -39,9 +39,9 @@ SHOW TBLPROPERTIES tblname("foo");
|
|||||||
SHOW CREATE TABLE db.tbl1;
|
SHOW CREATE TABLE db.tbl1;
|
||||||
|
|
||||||
-- Show Indexes
|
-- Show Indexes
|
||||||
-- SHOW INDEX ON idx_tbl;
|
SHOW INDEX ON idx_tbl;
|
||||||
|
|
||||||
-- SHOW FORMATTED INDEXES ON idx_tbl2 FROM db_1;
|
SHOW FORMATTED INDEXES ON idx_tbl2 FROM db_1;
|
||||||
|
|
||||||
-- Show Columns
|
-- Show Columns
|
||||||
SHOW COLUMNS FROM tble;
|
SHOW COLUMNS FROM tble;
|
||||||
|
7
test/parser/hive/syntax/fixtures/update.sql
Normal file
7
test/parser/hive/syntax/fixtures/update.sql
Normal file
@ -0,0 +1,7 @@
|
|||||||
|
UPDATE table_name
|
||||||
|
SET col1 = new_value;
|
||||||
|
|
||||||
|
UPDATE table_name
|
||||||
|
SET col1 = new_value,
|
||||||
|
col2 = new_value2
|
||||||
|
WHERE id=1;
|
16
test/parser/hive/syntax/importStatement.test.ts
Normal file
16
test/parser/hive/syntax/importStatement.test.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import HiveSQL from '../../../../src/parser/hive';
|
||||||
|
import { readSQL } from "../../../helper";
|
||||||
|
|
||||||
|
const features = {
|
||||||
|
imports: readSQL(__dirname, 'import.sql')
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('HiveSQL Import Syntax Tests', () => {
|
||||||
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
features.imports.forEach((imp) => {
|
||||||
|
it(imp, () => {
|
||||||
|
expect(parser.validate(imp).length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
23
test/parser/hive/syntax/insertStatement.test.ts
Normal file
23
test/parser/hive/syntax/insertStatement.test.ts
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
import HiveSQL from '../../../../src/parser/hive';
|
||||||
|
import { readSQL } from '../../../helper';
|
||||||
|
|
||||||
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
const features = {
|
||||||
|
insertFromQueries: readSQL(__dirname, 'insertFromQuery.sql'),
|
||||||
|
insertFromValues: readSQL(__dirname, 'insertFormValues.sql')
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('HiveSQL Insert Syntax Tests', () => {
|
||||||
|
features.insertFromQueries.forEach((ifq) => {
|
||||||
|
it(ifq, () => {
|
||||||
|
expect(parser.validate(ifq).length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
||||||
|
features.insertFromValues.forEach((ifv) => {
|
||||||
|
it(ifv, () => {
|
||||||
|
expect(parser.validate(ifv).length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
16
test/parser/hive/syntax/loadStatement.test.ts
Normal file
16
test/parser/hive/syntax/loadStatement.test.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import HiveSQL from '../../../../src/parser/hive';
|
||||||
|
import { readSQL } from '../../../helper';
|
||||||
|
|
||||||
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
const features = {
|
||||||
|
loads: readSQL(__dirname, 'load.sql'),
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('HiveSQL Load Syntax Tests', () => {
|
||||||
|
features.loads.forEach((load) => {
|
||||||
|
it(load, () => {
|
||||||
|
expect(parser.validate(load).length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
16
test/parser/hive/syntax/merge.test.ts
Normal file
16
test/parser/hive/syntax/merge.test.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import HiveSQL from '../../../../src/parser/hive';
|
||||||
|
import { readSQL } from '../../../helper';
|
||||||
|
|
||||||
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
const features = {
|
||||||
|
merges: readSQL(__dirname, 'merge.sql'),
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('HiveSQL Merge Syntax Tests', () => {
|
||||||
|
features.merges.forEach((merge) => {
|
||||||
|
it(merge, () => {
|
||||||
|
expect(parser.validate(merge).length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@ -7,7 +7,7 @@ const features = {
|
|||||||
shows: readSQL(__dirname, 'show.sql'),
|
shows: readSQL(__dirname, 'show.sql'),
|
||||||
};
|
};
|
||||||
|
|
||||||
describe('Hive Show Syntax Tests', () => {
|
describe('HiveSQL Show Syntax Tests', () => {
|
||||||
features.shows.forEach((show) => {
|
features.shows.forEach((show) => {
|
||||||
it(show, () => {
|
it(show, () => {
|
||||||
expect(parser.validate(show).length).toBe(0);
|
expect(parser.validate(show).length).toBe(0);
|
||||||
|
16
test/parser/hive/syntax/update.test.ts
Normal file
16
test/parser/hive/syntax/update.test.ts
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
import HiveSQL from '../../../../src/parser/hive';
|
||||||
|
import { readSQL } from '../../../helper';
|
||||||
|
|
||||||
|
const parser = new HiveSQL();
|
||||||
|
|
||||||
|
const features = {
|
||||||
|
updates: readSQL(__dirname, 'update.sql'),
|
||||||
|
};
|
||||||
|
|
||||||
|
describe('HiveSQL Update Syntax Tests', () => {
|
||||||
|
features.updates.forEach((update) => {
|
||||||
|
it(update, () => {
|
||||||
|
expect(parser.validate(update).length).toBe(0);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
});
|
@ -1,8 +1,9 @@
|
|||||||
import { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor';
|
import { AbstractParseTreeVisitor } from 'antlr4ts/tree/AbstractParseTreeVisitor';
|
||||||
import { HiveSqlVisitor } from '../../../src/lib/hive/HiveSqlVisitor';
|
import { HiveSqlParserVisitor } from '../../../src/lib/hive/HiveSqlParserVisitor';
|
||||||
import HiveSQL from '../../../src/parser/hive';
|
import HiveSQL from '../../../src/parser/hive';
|
||||||
|
import { ProgramContext } from '../../../src/lib/hive/HiveSqlParser';
|
||||||
|
|
||||||
describe('Hive SQL Visitor Tests', () => {
|
describe('HiveSQL Visitor Tests', () => {
|
||||||
const expectTableName = 'dm_gis.dlv_addr_tc_count';
|
const expectTableName = 'dm_gis.dlv_addr_tc_count';
|
||||||
const sql = `select citycode,tc,inc_day from ${expectTableName} where inc_day='20190501' limit 100;`;
|
const sql = `select citycode,tc,inc_day from ${expectTableName} where inc_day='20190501' limit 100;`;
|
||||||
const parser = new HiveSQL();
|
const parser = new HiveSQL();
|
||||||
@ -13,7 +14,7 @@ describe('Hive SQL Visitor Tests', () => {
|
|||||||
|
|
||||||
test('Visitor visitTableName', () => {
|
test('Visitor visitTableName', () => {
|
||||||
let result = '';
|
let result = '';
|
||||||
class MyVisitor extends AbstractParseTreeVisitor<any> implements HiveSqlVisitor<any> {
|
class MyVisitor extends AbstractParseTreeVisitor<any> implements HiveSqlParserVisitor<any> {
|
||||||
|
|
||||||
defaultResult() {
|
defaultResult() {
|
||||||
return result;
|
return result;
|
||||||
@ -25,7 +26,7 @@ describe('Hive SQL Visitor Tests', () => {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const visitor = new MyVisitor();
|
const visitor = new MyVisitor();
|
||||||
visitor.visit(parserTree);
|
visitor.visit(parserTree as ProgramContext);
|
||||||
|
|
||||||
expect(result).toBe(expectTableName);
|
expect(result).toBe(expectTableName);
|
||||||
});
|
});
|
||||||
|
Loading…
Reference in New Issue
Block a user