feat: optimize suggestion (#231)
* feat: optimize the strategy of finding the right range * test: apply commentOtherLine util to all suggestion tests * test: decomment suggestion test cases * test: add suggestion test cases in multiple statements * chore: improve comments * test: update log info in test
This commit is contained in:
21
test/parser/spark/suggestion/fixtures/multipleStatement.sql
Normal file
21
test/parser/spark/suggestion/fixtures/multipleStatement.sql
Normal file
@ -0,0 +1,21 @@
|
||||
CREATE TABLE VALUES -- unfinished
|
||||
|
||||
CREATE TABLE student (id INT, name STRING, age INT) STORED AS ORC;
|
||||
|
||||
CREATE SCHEMA customer_db WITH DBPROPERTIES (ID=001, Name='John');
|
||||
|
||||
ALTER TABLE StudentInfo ADD COLUMNS (LastName string, DOB timestamp);
|
||||
|
||||
SELECT * FROM db. ; -- unfinished
|
||||
|
||||
INSERT INTO weather (date, city, temp_hi, temp_lo) VALUES ('1994-11-29', 'Hayward', 54, 37);
|
||||
|
||||
DESC EXTENDED students name;
|
||||
|
||||
INSERT INTO weather (date, city, temp_hi, temp_lo) VALUES ('1994-11-29', 'Hayward', 54, 37); -- unfinished
|
||||
|
||||
DROP TABLE IF EXISTS employable;
|
||||
|
||||
DROP TEMPORARY FUNCTION test_avg;
|
||||
|
||||
INSERT INTO products (product_no, name, price) SELECT * FROM db. ; -- unfinished
|
69
test/parser/spark/suggestion/multipleStatement.test.ts
Normal file
69
test/parser/spark/suggestion/multipleStatement.test.ts
Normal file
@ -0,0 +1,69 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { CaretPosition, SyntaxContextType } from '../../../../src/parser/common/basic-parser-types';
|
||||
import SparkSQL from '../../../../src/parser/spark';
|
||||
|
||||
const syntaxSql = fs.readFileSync(
|
||||
path.join(__dirname, 'fixtures', 'multipleStatement.sql'),
|
||||
'utf-8'
|
||||
);
|
||||
|
||||
describe('SparkSQL Multiple Statements Syntax Suggestion', () => {
|
||||
const parser = new SparkSQL();
|
||||
|
||||
test('Create table ', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 1,
|
||||
column: 14,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE_CREATE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
});
|
||||
|
||||
test('Select from table', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 9,
|
||||
column: 18,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
|
||||
});
|
||||
|
||||
test('Insert into table ', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 15,
|
||||
column: 13,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual([]);
|
||||
});
|
||||
|
||||
test('Insert into select from table ', () => {
|
||||
const pos: CaretPosition = {
|
||||
lineNumber: 21,
|
||||
column: 65,
|
||||
};
|
||||
const syntaxes = parser.getSuggestionAtCaretPosition(syntaxSql, pos)?.syntax;
|
||||
const suggestion = syntaxes?.find(
|
||||
(syn) => syn.syntaxContextType === SyntaxContextType.TABLE
|
||||
);
|
||||
|
||||
expect(suggestion).not.toBeUndefined();
|
||||
expect(suggestion?.wordRanges.map((token) => token.text)).toEqual(['db', '.']);
|
||||
});
|
||||
});
|
@ -2,6 +2,7 @@ import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { CaretPosition } from '../../../../src/parser/common/basic-parser-types';
|
||||
import SparkSQL from '../../../../src/parser/spark';
|
||||
import { commentOtherLine } from '../../../helper';
|
||||
|
||||
const tokenSql = fs.readFileSync(path.join(__dirname, 'fixtures', 'tokenSuggestion.sql'), 'utf-8');
|
||||
|
||||
@ -13,7 +14,10 @@ describe('Spark SQL Token Suggestion', () => {
|
||||
lineNumber: 1,
|
||||
column: 7,
|
||||
};
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
|
||||
expect(suggestion).toEqual(['TABLE', 'INDEX', 'VIEW', 'DATABASE', 'NAMESPACE', 'SCHEMA']);
|
||||
});
|
||||
@ -23,7 +27,10 @@ describe('Spark SQL Token Suggestion', () => {
|
||||
lineNumber: 3,
|
||||
column: 8,
|
||||
};
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
|
||||
expect(suggestion).toEqual([
|
||||
'TEMPORARY',
|
||||
@ -46,7 +53,10 @@ describe('Spark SQL Token Suggestion', () => {
|
||||
lineNumber: 5,
|
||||
column: 8,
|
||||
};
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
|
||||
expect(suggestion).toEqual(['FROM']);
|
||||
});
|
||||
@ -56,7 +66,10 @@ describe('Spark SQL Token Suggestion', () => {
|
||||
lineNumber: 7,
|
||||
column: 10,
|
||||
};
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
|
||||
expect(suggestion).toEqual([
|
||||
'WITH',
|
||||
@ -79,7 +92,10 @@ describe('Spark SQL Token Suggestion', () => {
|
||||
lineNumber: 9,
|
||||
column: 6,
|
||||
};
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
|
||||
expect(suggestion).toEqual([
|
||||
'TEMPORARY',
|
||||
@ -99,7 +115,10 @@ describe('Spark SQL Token Suggestion', () => {
|
||||
lineNumber: 11,
|
||||
column: 8,
|
||||
};
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
|
||||
expect(suggestion).toEqual(['OVERWRITE', 'INTO']);
|
||||
});
|
||||
@ -109,7 +128,10 @@ describe('Spark SQL Token Suggestion', () => {
|
||||
lineNumber: 13,
|
||||
column: 6,
|
||||
};
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
|
||||
expect(suggestion).toEqual(['DATA']);
|
||||
});
|
||||
@ -119,7 +141,10 @@ describe('Spark SQL Token Suggestion', () => {
|
||||
lineNumber: 15,
|
||||
column: 6,
|
||||
};
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
|
||||
expect(suggestion).toEqual([
|
||||
'LOCKS',
|
||||
@ -154,7 +179,10 @@ describe('Spark SQL Token Suggestion', () => {
|
||||
lineNumber: 17,
|
||||
column: 8,
|
||||
};
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(tokenSql, pos)?.keywords;
|
||||
const suggestion = parser.getSuggestionAtCaretPosition(
|
||||
commentOtherLine(tokenSql, pos.lineNumber),
|
||||
pos
|
||||
)?.keywords;
|
||||
|
||||
expect(suggestion).toEqual(['TABLE']);
|
||||
});
|
||||
|
Reference in New Issue
Block a user