Skip to content

Commit 465f2c9

Browse files
committed
fix: split whiteSpace into multiple tokens to fix findCaretTokenIndex may undefined
1 parent f8afbe2 commit 465f2c9

30 files changed

+7402
-7293
lines changed

src/grammar/flink/FlinkSqlLexer.g4

+1-1
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@ options {
1111

1212
// SKIP
1313

14-
SPACE : [ \t\r\n]+ -> channel(HIDDEN);
14+
SPACE : (' ' | '\t' | '\r' | '\n') -> channel(HIDDEN);
1515
COMMENT_INPUT : '/*' .*? '*/' -> channel(HIDDEN);
1616
LINE_COMMENT: (('--' | '#') ~[\r\n]* ('\r'? '\n' | EOF) | '--' ('\r'? '\n' | EOF)) -> channel(HIDDEN);
1717

src/grammar/impala/ImpalaSqlLexer.g4

+1-1
Original file line numberDiff line numberDiff line change
@@ -350,4 +350,4 @@ SIMPLE_COMMENT: '--' ~[\r\n]* '\r'? '\n'? -> channel(HIDDEN);
350350

351351
BRACKETED_COMMENT: '/*' .*? '*/' -> channel(HIDDEN);
352352

353-
WS: [ \r\n\t]+ -> channel(HIDDEN);
353+
WS: (' ' | '\t' | '\r' | '\n') -> channel(HIDDEN);

src/grammar/mysql/MySqlLexer.g4

+1-1
Original file line numberDiff line numberDiff line change
@@ -41,7 +41,7 @@ channels {
4141
ERRORCHANNEL
4242
}
4343

44-
SPACE : [ \t\r\n]+ -> channel(HIDDEN);
44+
SPACE : (' ' | '\t' | '\r' | '\n') -> channel(HIDDEN);
4545
SPEC_MYSQL_COMMENT : '/*!' .+? '*/' -> channel(MYSQLCOMMENT);
4646
COMMENT_INPUT : '/*' .*? '*/' -> channel(HIDDEN);
4747
LINE_COMMENT: (('--' [ \t]* | '#') ~[\r\n]* ('\r'? '\n' | EOF) | '--' ('\r'? '\n' | EOF)) -> channel(HIDDEN);

src/grammar/spark/SparkSqlLexer.g4

+1-1
Original file line numberDiff line numberDiff line change
@@ -481,7 +481,7 @@ SIMPLE_COMMENT: '--' ('\\\n' | ~[\r\n])* '\r'? '\n'? -> channel(HIDDEN);
481481
BRACKETED_COMMENT:
482482
'/*' (BRACKETED_COMMENT | .)*? ('*/' | {this.markUnclosedComment();} EOF) -> channel(HIDDEN);
483483

484-
WS: [ \r\n\t]+ -> channel(HIDDEN);
484+
WS: (' ' | '\t' | '\r' | '\n') -> channel(HIDDEN);
485485

486486
// Catch-all for anything we can't recognize.
487487
// We use this to be able to ignore and recover all the text

src/grammar/trino/TrinoSql.g4

+1-1
Original file line numberDiff line numberDiff line change
@@ -1648,7 +1648,7 @@ SIMPLE_COMMENT: '--' ~[\r\n]* '\r'? '\n'? -> channel(HIDDEN);
16481648

16491649
BRACKETED_COMMENT: '/*' .*? '*/' -> channel(HIDDEN);
16501650

1651-
WS: [ \r\n\t]+ -> channel(HIDDEN);
1651+
WS: (' ' | '\t' | '\r' | '\n') -> channel(HIDDEN);
16521652

16531653
// Catch-all for anything we can't recognize.
16541654
// We use this to be able to ignore and recover all the text

src/lib/flink/FlinkSqlLexer.interp

+1-1
Large diffs are not rendered by default.

src/lib/flink/FlinkSqlLexer.ts

+1,961-1,962
Large diffs are not rendered by default.

src/lib/impala/ImpalaSqlLexer.interp

+1-1
Large diffs are not rendered by default.

src/lib/impala/ImpalaSqlLexer.ts

+582-583
Large diffs are not rendered by default.

src/lib/mysql/MySqlLexer.interp

+1-1
Large diffs are not rendered by default.

src/lib/mysql/MySqlLexer.ts

+3,472-3,473
Large diffs are not rendered by default.

src/lib/spark/SparkSqlLexer.interp

+1-1
Large diffs are not rendered by default.

src/lib/spark/SparkSqlLexer.ts

+943-944
Large diffs are not rendered by default.

src/lib/trino/TrinoSqlLexer.interp

+1-1
Large diffs are not rendered by default.

src/lib/trino/TrinoSqlLexer.ts

+314-316
Large diffs are not rendered by default.

test/helper.ts

+8-2
Original file line numberDiff line numberDiff line change
@@ -32,9 +32,15 @@ export const readSQL = (dirname: string, fileName: string) => {
3232
return result;
3333
};
3434

35-
export function commentOtherLine(sqlContent: string, line: number) {
35+
export function commentOtherLine(
36+
sqlContent: string,
37+
line: number | [startLine: number, endLine: number]
38+
) {
3639
const slices = sqlContent.split('\n').map((item, index) => {
37-
if (index !== line - 1) {
40+
if (
41+
(Array.isArray(line) && (index + 1 < line[0] || index + 1 > line[1])) ||
42+
(typeof line === 'number' && index + 1 !== line)
43+
) {
3844
return '-- ' + item;
3945
} else {
4046
return item;

test/parser/flink/suggestion/fixtures/tokenSuggestion.sql

+5-1
Original file line numberDiff line numberDiff line change
@@ -4,4 +4,8 @@ USE
44
;
55
CREATE
66
;
7-
SHOW
7+
SHOW
8+
9+
CREATE TABLE tb (id
10+
11+
);

test/parser/flink/suggestion/tokenSuggestion.test.ts

+12
Original file line numberDiff line numberDiff line change
@@ -67,4 +67,16 @@ describe('Flink SQL Token Suggestion', () => {
6767
'JARS',
6868
]);
6969
});
70+
71+
test('Suggestion in new line', () => {
72+
const pos: CaretPosition = {
73+
lineNumber: 10,
74+
column: 2,
75+
};
76+
const suggestion = flink.getSuggestionAtCaretPosition(
77+
commentOtherLine(tokenSql, [9, 11]),
78+
pos
79+
)?.keywords;
80+
expect(suggestion.length).not.toBe(0);
81+
});
7082
});

test/parser/hive/suggestion/fixtures/tokenSuggestion.sql

+4
Original file line numberDiff line numberDiff line change
@@ -18,3 +18,7 @@ LOAD
1818
;
1919
SHOW
2020
;
21+
22+
CREATE TABLE tb (id
23+
24+
);

test/parser/hive/suggestion/tokenSuggestion.test.ts

+12
Original file line numberDiff line numberDiff line change
@@ -219,4 +219,16 @@ describe('Hive SQL Token Suggestion', () => {
219219
'SCHEMAS',
220220
]);
221221
});
222+
223+
test('Suggestion in new line', () => {
224+
const pos: CaretPosition = {
225+
lineNumber: 23,
226+
column: 2,
227+
};
228+
const suggestion = hive.getSuggestionAtCaretPosition(
229+
commentOtherLine(tokenSql, [21, 23]),
230+
pos
231+
)?.keywords;
232+
expect(suggestion.length).not.toBe(0);
233+
});
222234
});

test/parser/impala/suggestion/fixtures/tokenSuggestion.sql

+4
Original file line numberDiff line numberDiff line change
@@ -9,3 +9,7 @@ INSERT ;
99
SHOW ;
1010

1111
CREATE TABLE t1 (id );
12+
13+
CREATE TABLE tb (id
14+
15+
);

test/parser/impala/suggestion/tokenSuggestion.test.ts

+12
Original file line numberDiff line numberDiff line change
@@ -143,4 +143,16 @@ describe('Impala SQL Token Suggestion', () => {
143143

144144
expect(dataTypes.every((dataType) => suggestion.includes(dataType))).toBe(true);
145145
});
146+
147+
test('Suggestion in new line', () => {
148+
const pos: CaretPosition = {
149+
lineNumber: 14,
150+
column: 2,
151+
};
152+
const suggestion = impala.getSuggestionAtCaretPosition(
153+
commentOtherLine(tokenSql, [13, 15]),
154+
pos
155+
)?.keywords;
156+
expect(suggestion.length).not.toBe(0);
157+
});
146158
});

test/parser/mysql/suggestion/fixtures/tokenSuggestion.sql

+4
Original file line numberDiff line numberDiff line change
@@ -14,3 +14,7 @@ LOAD
1414
;
1515
SHOW
1616
;
17+
18+
CREATE TABLE tb (id
19+
20+
);

test/parser/mysql/suggestion/tokenSuggestion.test.ts

+12
Original file line numberDiff line numberDiff line change
@@ -242,4 +242,16 @@ describe('MySQL Token Suggestion', () => {
242242
'BINARY',
243243
]);
244244
});
245+
246+
test('Suggestion in new line', () => {
247+
const pos: CaretPosition = {
248+
lineNumber: 19,
249+
column: 2,
250+
};
251+
const suggestion = mysql.getSuggestionAtCaretPosition(
252+
commentOtherLine(tokenSql, [18, 20]),
253+
pos
254+
)?.keywords;
255+
expect(suggestion.length).not.toBe(0);
256+
});
245257
});

test/parser/postgresql/suggestion/fixtures/tokenSuggestion.sql

+3-2
Original file line numberDiff line numberDiff line change
@@ -8,5 +8,6 @@ DELETE ;
88

99
CREATE ;
1010

11-
12-
11+
CREATE TABLE tb (id
12+
13+
);

test/parser/postgresql/suggestion/tokenSuggestion.test.ts

+12
Original file line numberDiff line numberDiff line change
@@ -190,4 +190,16 @@ describe('Postgres SQL Token Suggestion', () => {
190190
)?.keywords;
191191
expect(suggestion).toMatchUnorderedArray(['INTO']);
192192
});
193+
194+
test('Suggestion in new line', () => {
195+
const pos: CaretPosition = {
196+
lineNumber: 12,
197+
column: 2,
198+
};
199+
const suggestion = postgresql.getSuggestionAtCaretPosition(
200+
commentOtherLine(tokenSql, [11, 13]),
201+
pos
202+
)?.keywords;
203+
expect(suggestion.length).not.toBe(0);
204+
});
193205
});

test/parser/spark/suggestion/fixtures/tokenSuggestion.sql

+4
Original file line numberDiff line numberDiff line change
@@ -16,3 +16,7 @@ SHOW
1616
;
1717
EXPORT
1818
;
19+
20+
CREATE TABLE tb (id
21+
22+
);

test/parser/spark/suggestion/tokenSuggestion.test.ts

+12
Original file line numberDiff line numberDiff line change
@@ -197,4 +197,16 @@ describe('Spark SQL Token Suggestion', () => {
197197

198198
expect(suggestion).toMatchUnorderedArray(['TABLE']);
199199
});
200+
201+
test('Suggestion in new line', () => {
202+
const pos: CaretPosition = {
203+
lineNumber: 21,
204+
column: 2,
205+
};
206+
const suggestion = spark.getSuggestionAtCaretPosition(
207+
commentOtherLine(tokenSql, [20, 22]),
208+
pos
209+
)?.keywords;
210+
expect(suggestion.length).not.toBe(0);
211+
});
200212
});

test/parser/trino/suggestion/fixtures/tokenSuggestion.sql

+4
Original file line numberDiff line numberDiff line change
@@ -11,3 +11,7 @@ DESCRIBE ;
1111
DROP ;
1212

1313
INSERT ;
14+
15+
CREATE TABLE tb (id
16+
17+
);

test/parser/trino/suggestion/tokenSuggestion.test.ts

+12
Original file line numberDiff line numberDiff line change
@@ -124,4 +124,16 @@ describe('Trino SQL Token Suggestion', () => {
124124

125125
expect(suggestion).toMatchUnorderedArray(['INTO']);
126126
});
127+
128+
test('Suggestion in new line', () => {
129+
const pos: CaretPosition = {
130+
lineNumber: 16,
131+
column: 2,
132+
};
133+
const suggestion = trino.getSuggestionAtCaretPosition(
134+
commentOtherLine(tokenSql, [15, 17]),
135+
pos
136+
)?.keywords;
137+
expect(suggestion.length).not.toBe(0);
138+
});
127139
});

0 commit comments

Comments
 (0)