@@ -171,6 +171,10 @@ Tok.prototype.toJSON = Tok.prototype.inspect = function toJSON() {
171
171
} ;
172
172
} ;
173
173
174
+ function literalTok ( kind , position , line , col , prev ) {
175
+ return new Tok ( kind , position , position + kind . length , line , col , prev ) ;
176
+ }
177
+
174
178
function printCharCode ( code ) {
175
179
return (
176
180
// NaN/undefined represents access beyond the end of the file.
@@ -215,62 +219,58 @@ function readToken(lexer: Lexer<*>, prev: Token): Token {
215
219
) ;
216
220
}
217
221
218
- function literalTok ( kind : TokenKindEnum ) {
219
- return new Tok ( kind , position , position + kind . length , line , col , prev ) ;
220
- }
221
-
222
222
switch ( code ) {
223
223
// !
224
224
case 33 :
225
- return literalTok ( TokenKind . BANG ) ;
225
+ return literalTok ( TokenKind . BANG , position , line , col , prev ) ;
226
226
// #
227
227
case 35 :
228
228
return readComment ( source , position , line , col , prev ) ;
229
229
// $
230
230
case 36 :
231
- return literalTok ( TokenKind . DOLLAR ) ;
231
+ return literalTok ( TokenKind . DOLLAR , position , line , col , prev ) ;
232
232
// &
233
233
case 38 :
234
- return literalTok ( TokenKind . AMP ) ;
234
+ return literalTok ( TokenKind . AMP , position , line , col , prev ) ;
235
235
// (
236
236
case 40 :
237
- return literalTok ( TokenKind . PAREN_L ) ;
237
+ return literalTok ( TokenKind . PAREN_L , position , line , col , prev ) ;
238
238
// )
239
239
case 41 :
240
- return literalTok ( TokenKind . PAREN_R ) ;
240
+ return literalTok ( TokenKind . PAREN_R , position , line , col , prev ) ;
241
241
// .
242
242
case 46 :
243
243
if (
244
244
charCodeAt . call ( body , position + 1 ) === 46 &&
245
245
charCodeAt . call ( body , position + 2 ) === 46
246
246
) {
247
- return literalTok ( TokenKind . SPREAD ) ;
247
+ return literalTok ( TokenKind . SPREAD , position , line , col , prev ) ;
248
248
}
249
249
break ;
250
250
// :
251
251
case 58 :
252
- return literalTok ( TokenKind . COLON ) ;
252
+ return literalTok ( TokenKind . COLON , position , line , col , prev ) ;
253
253
// =
254
254
case 61 :
255
- return literalTok ( TokenKind . EQUALS ) ;
255
+ return literalTok ( TokenKind . EQUALS , position , line , col , prev ) ;
256
256
// @
257
257
case 64 :
258
- return literalTok ( TokenKind . AT ) ;
258
+ return literalTok ( TokenKind . AT , position , line , col , prev ) ;
259
259
// [
260
260
case 91 :
261
- return literalTok ( TokenKind . BRACKET_L ) ;
261
+ return literalTok ( TokenKind . BRACKET_L , position , line , col , prev ) ;
262
262
// ]
263
263
case 93 :
264
- return literalTok ( TokenKind . BRACKET_R ) ;
264
+ return literalTok ( TokenKind . BRACKET_R , position , line , col , prev ) ;
265
265
// {
266
266
case 123 :
267
- return literalTok ( TokenKind . BRACE_L ) ;
267
+ return literalTok ( TokenKind . BRACE_L , position , line , col , prev ) ;
268
268
// |
269
269
case 124 :
270
- return literalTok ( TokenKind . PIPE ) ;
270
+ return literalTok ( TokenKind . PIPE , position , line , col , prev ) ;
271
271
// }
272
272
case 125 :
273
- return literalTok ( TokenKind . BRACE_R ) ;
273
+ return literalTok ( TokenKind . BRACE_R , position , line , col , prev ) ;
274
274
// A-Z _ a-z
275
275
case 65 :
276
276
case 66 :
0 commit comments