test: hiveSQL createStatement unit tests

This commit is contained in:
hayden 2023-07-06 17:28:43 +08:00 committed by Ziv
parent e97e0b2ef6
commit 28de63e1f0
10 changed files with 297 additions and 0 deletions

View File

@ -0,0 +1,90 @@
import HiveSQL from '../../../../src/parser/hive';
import { readSQL } from '../../../helper';
const parser = new HiveSQL();
const features = {
databases: readSQL(__dirname, 'createDatabase.sql'),
tables: readSQL(__dirname, 'createTable.sql'),
views: readSQL(__dirname, 'createView.sql'),
functions: readSQL(__dirname, 'createFunction.sql'),
roles: readSQL(__dirname, 'createRole.sql'),
indexes: readSQL(__dirname, 'createIndex.sql'),
macros: readSQL(__dirname, 'createMacro.sql'),
connectors: readSQL(__dirname, 'createConnector.sql'),
scheduledQueries: readSQL(__dirname, 'createScheduledQuery.sql')
};
describe('Hive Create Syntax Tests', () => {
describe('CREATE DATABASE', () => {
features.databases.forEach((database) => {
it(database, () => {
expect(parser.validate(database).length).toBe(0);
});
});
});
describe('CREATE TABLE', () => {
features.tables.forEach((table) => {
it(table, () => {
expect(parser.validate(table).length).toBe(0);
});
});
});
describe('CREATE VIEW', () => {
features.views.forEach((view) => {
it(view, () => {
expect(parser.validate(view).length).toBe(0);
});
});
});
describe('CREATE FUNCTION', () => {
features.functions.forEach((func) => {
it(func, () => {
expect(parser.validate(func).length).toBe(0);
});
});
});
describe('CREATE ROLE', () => {
features.roles.forEach((role) => {
it(role, () => {
expect(parser.validate(role).length).toBe(0);
});
});
});
// describe('CREATE INDEX', () => {
// features.indexes.forEach((index) => {
// it(index, () => {
// expect(parser.validate(index).length).toBe(0);
// });
// });
// });
describe('CREATE MACRO', () => {
features.macros.forEach((macro) => {
it(macro, () => {
expect(parser.validate(macro).length).toBe(0);
});
});
});
describe('CREATE CONNECTOR', () => {
features.connectors.forEach((cnctor) => {
it(cnctor, () => {
expect(parser.validate(cnctor).length).toBe(0);
});
});
});
describe('CREATE SCHEDULE QUERY', () => {
features.scheduledQueries.forEach((sq) => {
it(sq, () => {
expect(parser.validate(sq).length).toBe(0);
});
});
});
});

View File

@ -0,0 +1,9 @@
CREATE CONNECTOR IF NOT EXISTS mysql_local
TYPE 'mysql'
URL 'jdbc:mysql://localhost:5432';
CREATE CONNECTOR pg_local
TYPE 'postgres'
URL 'jdbc:postgresql://localhost:5432'
COMMENT '这是一个 postgres 连接器'
WITH DCPROPERTIES ("hive.sql.dbcp.username"="postgres", "hive.sql.dbcp.password"="postgres");

View File

@ -0,0 +1,15 @@
CREATE DATABASE mydb;
CREATE SCHEMA myschema;
CREATE DATABASE IF NOT EXISTS mydb
COMMENT 'my test db'
LOCATION '/myhive/myoutdb'
MANAGEDLOCATION '/myhive/myindb'
WITH DBPROPERTIES ('creator'='ypc','date'='2021-03-09');
CREATE SCHEMA IF NOT EXISTS myschema
COMMENT 'my test myschema'
LOCATION '/myhive/myoutschema'
MANAGEDLOCATION '/myhive/myinschema'
WITH DBPROPERTIES ('creator'='ypc','date'='2021-03-09');

View File

@ -0,0 +1,12 @@
CREATE FUNCTION base_analizer AS 'com.udf.BaseFieldUDF';
CREATE TEMPORARY FUNCTION flat_analizer AS 'com.udtf.EventJsonUDTF';
CREATE FUNCTION base_analizer
AS "com.BaseFieldUDF"
USING JAR 'hdfs://hadoop12:9000/user/hive/jars/hivefunction-1.0-SNAPSHOT.jar';
CREATE FUNCTION test_udf
AS "com.BaseFieldUDF"
USING JAR 'hdfs://hadoop12:9000/user/hive/jars/hivetestfunc-1.0-SNAPSHOT.jar'
, FILE 'hdfs://hadoop12:9000/user/hive/files/hivetestfunc.java'
, ARCHIVE 'hdfs://hadoop12:9000/user/hive/files/hivetestfunc.txt'

View File

@ -0,0 +1,36 @@
CREATE INDEX table01_index
ON TABLE table01 (column2)
AS 'org.apache.hadoop.hive.ql.index.compact.CompactIndexHandler'
COMMENT '这是一个索引';
CREATE INDEX table02_index
ON TABLE table02 (column3)
AS 'org.apache.hadoop.hive.ql.index.compact.CompactIndexHandler'
WITH DEFERRED REBUILD;
CREATE INDEX table03_index
ON TABLE table03 (column4)
AS 'COMPACT'
TBLPROPERTIES ("prop1"="value1", "prop2"="value2");
CREATE INDEX table04_index
ON TABLE table04 (column5)
AS 'COMPACT'
IN TABLE indextable1
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler';
IDXPROPERTIES ("prop3"="value3", "prop4"="value4");
CREATE INDEX table05_index
ON TABLE table05 (column6)
AS 'COMPACT'
STORED AS RCFILE;
CREATE INDEX table06_index
ON TABLE table06 (column7)
AS 'COMPACT'
ROW FORMAT DELIMITED FIELDS TERMINATED BY '\t'
STORED AS TEXTFILE;

View File

@ -0,0 +1,21 @@
CREATE TEMPORARY MACRO fixed_number() 42;
CREATE TEMPORARY MACRO string_len_plus_two(x STRING) length(x) + 2;
CREATE TEMPORARY MACRO simple_add (x INT, y INT) x + y;
CREATE TEMPORARY MACRO get_degree(degree_type STRING)
IF (degree_type IS NOT NULL,
CASE degree_type
WHEN 1 THEN '小学'
WHEN 2 THEN '初中'
WHEN 3 THEN '职业高中'
WHEN 4 THEN '中专'
WHEN 5 THEN '高中'
WHEN 6 THEN '大专'
WHEN 7 THEN '本科'
WHEN 8 THEN '硕士'
WHEN 9 THEN '博士'
ELSE NULL
END,
NULL);

View File

@ -0,0 +1 @@
CREATE ROLE std_user;

View File

@ -0,0 +1,20 @@
CREATE SCHEDULED QUERY sc1
CRON '0 */10 * * * ? *'
AS INSERT INTO t VALUES (1);
CREATE SCHEDULED QUERY t_analyze
CRON '0 */1 * * * ? *'
AS ANALYZE TABLE t
COMPUTE STATISTICS FOR COLUMNS;
CREATE SCHEDULED QUERY s_day
EVERY 2 DAY OFFSET BY 'offsetTs'
EXECUTED AS 'SELECT * FROM aa'
ENABLE
DEFINED AS INSERT INTO t VALUES (1);
CREATE SCHEDULED QUERY s_hour
EVERY HOUR AT '0:07:30'
EXECUTED AS 'SELECT * FROM aa'
DISABLE
DEFINED AS INSERT INTO t VALUES (1);

View File

@ -0,0 +1,54 @@
CREATE TEMPORARY TABLE list_bucket_multiple (col1 STRING, col2 INT, col3 STRING);
CREATE TRANSACTIONAL TABLE transactional_table_test(
key STRING,
value STRING
)
PARTITIONED BY(ds STRING) STORED AS ORC;
CREATE TABLE IF NOT EXISTS copy_table LIKE origin_table;
CREATE TABLE IF NOT EXISTS derived_table AS SELECT * FROM origin_table;
CREATE TABLE `mydb.t1`(
`id` INT,
`dept_no` INT,
`addr` STRING,
`tel` STRING,
`hobby` ARRAY<STRING>,
`add` MAP<STRING,STRING>
)
PARTITIONED BY(`date` STRING)
ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
COLLECTION ITEMS TERMINATED BY '-'
MAP KEYS TERMINATED BY ':';
CREATE EXTERNAL TABLE mydb.ext_table(
id INT,
name STRING,
hobby ARRAY<STRING>,
add MAP<STRING,STRING>
)
ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
COLLECTION ITEMS TERMINATED BY '-'
MAP KEYS TERMINATED BY ':'
LOCATION '/user/mydb/ext_table'
TBLPROPERTIES('author'='hayden','desc'='一个外部测试表')
;
CREATE MANAGED TABLE managed_table (
id INT COMMENT 'ID',
name STRING COMMENT '名称'
)
COMMENT '测试分桶'
CLUSTERED BY(id) SORTED BY (id) INTO 4 BUCKETS
STORED BY 'org.apache.hadoop.hive.hbase.HBaseStorageHandler';
CREATE TABLE list_bucket_multiple (
col1 STRING,
col2 INT,
col3 STRING
)
SKEWED BY (col1, col2) ON (('s1',1), ('s3',3), ('s13',13), ('s78',78))
STORED AS DIRECTORIES;

View File

@ -0,0 +1,39 @@
-- Common View
CREATE VIEW IF NOT EXISTS mydb.bro_view
COMMENT '一个测试视图'
AS SELECT * FROM mydb.sale_tbl;
CREATE VIEW mydb.task_view (
taskId COMMENT '任务id',
taskName COMMENT '任务名称',
taskRunTime COMMENT '任务运行时长'
)
COMMENT '一个任务信息视图'
TBLPROPERTIES(
'author'='hayden'
)
AS SELECT DISTINCT id, `name`, runtime
FROM task_tbl
WHERE type='day';
-- Materialized View
CREATE MATERIALIZED VIEW druid_wiki_mv
COMMENT '这是一个物化视图'
STORED AS PARQUET
AS
SELECT page, `user`, c_added, c_removed
FROM src;
CREATE MATERIALIZED VIEW IF NOT EXISTS mv2
DISABLE REWRITE
PARTITIONED ON (lo_revenue)
ROW FORMAT DELIMITED FIELDS TERMINATED BY ','
LOCATION '/user/hive/materialized_view'
TBLPROPERTIES('author'='hayden','desc'='一个物化视图')
AS
SELECT lo_revenue,
lo_extendedprice * lo_discount AS d_price,
lo_revenue - lo_supplycost AS d_balance
FROM customer, dates, lineorder, part, supplier
WHERE lo_orderdate = d_datekey;