Skip to content

Commit 7a7f792

Browse files
committed
Create 'TokenKindEnum' type.
1 parent 85401c1 commit 7a7f792

File tree

6 files changed

+81
-115
lines changed

6 files changed

+81
-115
lines changed

src/index.js

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -248,6 +248,7 @@ export type {
248248
ObjectTypeExtensionNode,
249249
DirectiveDefinitionNode,
250250
KindEnum,
251+
TokenKindEnum,
251252
DirectiveLocationEnum,
252253
} from './language';
253254

src/language/ast.js

Lines changed: 2 additions & 30 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
*/
99

1010
import type { Source } from './source';
11+
import type { TokenKindEnum } from './lexer';
1112

1213
/**
1314
* Contains a range of UTF-8 character offsets and token references that
@@ -40,35 +41,6 @@ export type Location = {
4041
+source: Source,
4142
};
4243

43-
/**
44-
* Represents the different kinds of tokens in a GraphQL document.
45-
* This type is not inlined in `Token` to fix syntax highlighting on GitHub
46-
* *only*.
47-
*/
48-
type TokenKind =
49-
| '<SOF>'
50-
| '<EOF>'
51-
| '!'
52-
| '$'
53-
| '&'
54-
| '('
55-
| ')'
56-
| '...'
57-
| ':'
58-
| '='
59-
| '@'
60-
| '['
61-
| ']'
62-
| '{'
63-
| '|'
64-
| '}'
65-
| 'Name'
66-
| 'Int'
67-
| 'Float'
68-
| 'String'
69-
| 'BlockString'
70-
| 'Comment';
71-
7244
/**
7345
* Represents a range of characters represented by a lexical token
7446
* within a Source.
@@ -77,7 +49,7 @@ export type Token = {
7749
/**
7850
* The kind of Token.
7951
*/
80-
+kind: TokenKind,
52+
+kind: TokenKindEnum,
8153

8254
/**
8355
* The character offset at which this Node begins.

src/language/directiveLocation.js

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
/**
1111
* The set of allowed directive location values.
1212
*/
13-
export const DirectiveLocation = {
13+
export const DirectiveLocation = Object.freeze({
1414
// Request Definitions
1515
QUERY: 'QUERY',
1616
MUTATION: 'MUTATION',
@@ -31,9 +31,9 @@ export const DirectiveLocation = {
3131
ENUM_VALUE: 'ENUM_VALUE',
3232
INPUT_OBJECT: 'INPUT_OBJECT',
3333
INPUT_FIELD_DEFINITION: 'INPUT_FIELD_DEFINITION',
34-
};
34+
});
3535

3636
/**
3737
* The enum type representing the directive location values.
3838
*/
39-
export type DirectiveLocationEnum = $Keys<typeof DirectiveLocation>;
39+
export type DirectiveLocationEnum = $Values<typeof DirectiveLocation>;

src/language/index.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ export {
2424
} from './visitor';
2525
export type { ASTVisitor, Visitor, VisitFn, VisitorKeyMap } from './visitor';
2626

27-
export type { Lexer } from './lexer';
27+
export type { Lexer, TokenKindEnum } from './lexer';
2828
export type { ParseOptions } from './parser';
2929

3030
export type {

src/language/lexer.js

Lines changed: 66 additions & 73 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ export function createLexer<TOptions>(
2424
source: Source,
2525
options: TOptions,
2626
): Lexer<TOptions> {
27-
const startOfFileToken = new Tok(SOF, 0, 0, 0, 0, null);
27+
const startOfFileToken = new Tok(TokenKind.SOF, 0, 0, 0, 0, null);
2828
const lexer: Lexer<TOptions> = {
2929
source,
3030
options,
@@ -46,11 +46,11 @@ function advanceLexer() {
4646

4747
function lookahead() {
4848
let token = this.token;
49-
if (token.kind !== EOF) {
49+
if (token.kind !== TokenKind.EOF) {
5050
do {
5151
// Note: next is only mutable during parsing, so we cast to allow this.
5252
token = token.next || ((token: any).next = readToken(this, token));
53-
} while (token.kind === COMMENT);
53+
} while (token.kind === TokenKind.COMMENT);
5454
}
5555
return token;
5656
}
@@ -94,58 +94,39 @@ export type Lexer<TOptions> = {
9494
lookahead(): Token,
9595
};
9696

97-
// Each kind of token.
98-
const SOF = '<SOF>';
99-
const EOF = '<EOF>';
100-
const BANG = '!';
101-
const DOLLAR = '$';
102-
const AMP = '&';
103-
const PAREN_L = '(';
104-
const PAREN_R = ')';
105-
const SPREAD = '...';
106-
const COLON = ':';
107-
const EQUALS = '=';
108-
const AT = '@';
109-
const BRACKET_L = '[';
110-
const BRACKET_R = ']';
111-
const BRACE_L = '{';
112-
const PIPE = '|';
113-
const BRACE_R = '}';
114-
const NAME = 'Name';
115-
const INT = 'Int';
116-
const FLOAT = 'Float';
117-
const STRING = 'String';
118-
const BLOCK_STRING = 'BlockString';
119-
const COMMENT = 'Comment';
120-
12197
/**
12298
* An exported enum describing the different kinds of tokens that the
12399
* lexer emits.
124100
*/
125-
export const TokenKind = {
126-
SOF,
127-
EOF,
128-
BANG,
129-
DOLLAR,
130-
AMP,
131-
PAREN_L,
132-
PAREN_R,
133-
SPREAD,
134-
COLON,
135-
EQUALS,
136-
AT,
137-
BRACKET_L,
138-
BRACKET_R,
139-
BRACE_L,
140-
PIPE,
141-
BRACE_R,
142-
NAME,
143-
INT,
144-
FLOAT,
145-
STRING,
146-
BLOCK_STRING,
147-
COMMENT,
148-
};
101+
export const TokenKind = Object.freeze({
102+
SOF: '<SOF>',
103+
EOF: '<EOF>',
104+
BANG: '!',
105+
DOLLAR: '$',
106+
AMP: '&',
107+
PAREN_L: '(',
108+
PAREN_R: ')',
109+
SPREAD: '...',
110+
COLON: ':',
111+
EQUALS: '=',
112+
AT: '@',
113+
BRACKET_L: '[',
114+
BRACKET_R: ']',
115+
BRACE_L: '{',
116+
PIPE: '|',
117+
BRACE_R: '}',
118+
NAME: 'Name',
119+
INT: 'Int',
120+
FLOAT: 'Float',
121+
STRING: 'String',
122+
BLOCK_STRING: 'BlockString',
123+
COMMENT: 'Comment',
124+
});
125+
126+
/**
127+
* The enum type representing the token kinds values.
128+
*/
129+
export type TokenKindEnum = $Values<typeof TokenKind>;
149130

150131
/**
151132
* A helper function to describe a token as a string for debugging
@@ -162,7 +143,7 @@ const slice = String.prototype.slice;
162143
* Helper function for constructing the Token object.
163144
*/
164145
function Tok(
165-
kind,
146+
kind: TokenKindEnum,
166147
start: number,
167148
end: number,
168149
line: number,
@@ -194,7 +175,7 @@ function printCharCode(code) {
194175
return (
195176
// NaN/undefined represents access beyond the end of the file.
196177
isNaN(code)
197-
? EOF
178+
? TokenKind.EOF
198179
: // Trust JSON for ASCII.
199180
code < 0x007f
200181
? JSON.stringify(String.fromCharCode(code))
@@ -220,7 +201,7 @@ function readToken(lexer: Lexer<*>, prev: Token): Token {
220201
const col = 1 + position - lexer.lineStart;
221202

222203
if (position >= bodyLength) {
223-
return new Tok(EOF, bodyLength, bodyLength, line, col, prev);
204+
return new Tok(TokenKind.EOF, bodyLength, bodyLength, line, col, prev);
224205
}
225206

226207
const code = charCodeAt.call(body, position);
@@ -234,58 +215,62 @@ function readToken(lexer: Lexer<*>, prev: Token): Token {
234215
);
235216
}
236217

218+
function literalTok(kind: TokenKindEnum) {
219+
return new Tok(kind, position, position + kind.length, line, col, prev);
220+
}
221+
237222
switch (code) {
238223
// !
239224
case 33:
240-
return new Tok(BANG, position, position + 1, line, col, prev);
225+
return literalTok(TokenKind.BANG);
241226
// #
242227
case 35:
243228
return readComment(source, position, line, col, prev);
244229
// $
245230
case 36:
246-
return new Tok(DOLLAR, position, position + 1, line, col, prev);
231+
return literalTok(TokenKind.DOLLAR);
247232
// &
248233
case 38:
249-
return new Tok(AMP, position, position + 1, line, col, prev);
234+
return literalTok(TokenKind.AMP);
250235
// (
251236
case 40:
252-
return new Tok(PAREN_L, position, position + 1, line, col, prev);
237+
return literalTok(TokenKind.PAREN_L);
253238
// )
254239
case 41:
255-
return new Tok(PAREN_R, position, position + 1, line, col, prev);
240+
return literalTok(TokenKind.PAREN_R);
256241
// .
257242
case 46:
258243
if (
259244
charCodeAt.call(body, position + 1) === 46 &&
260245
charCodeAt.call(body, position + 2) === 46
261246
) {
262-
return new Tok(SPREAD, position, position + 3, line, col, prev);
247+
return literalTok(TokenKind.SPREAD);
263248
}
264249
break;
265250
// :
266251
case 58:
267-
return new Tok(COLON, position, position + 1, line, col, prev);
252+
return literalTok(TokenKind.COLON);
268253
// =
269254
case 61:
270-
return new Tok(EQUALS, position, position + 1, line, col, prev);
255+
return literalTok(TokenKind.EQUALS);
271256
// @
272257
case 64:
273-
return new Tok(AT, position, position + 1, line, col, prev);
258+
return literalTok(TokenKind.AT);
274259
// [
275260
case 91:
276-
return new Tok(BRACKET_L, position, position + 1, line, col, prev);
261+
return literalTok(TokenKind.BRACKET_L);
277262
// ]
278263
case 93:
279-
return new Tok(BRACKET_R, position, position + 1, line, col, prev);
264+
return literalTok(TokenKind.BRACKET_R);
280265
// {
281266
case 123:
282-
return new Tok(BRACE_L, position, position + 1, line, col, prev);
267+
return literalTok(TokenKind.BRACE_L);
283268
// |
284269
case 124:
285-
return new Tok(PIPE, position, position + 1, line, col, prev);
270+
return literalTok(TokenKind.PIPE);
286271
// }
287272
case 125:
288-
return new Tok(BRACE_R, position, position + 1, line, col, prev);
273+
return literalTok(TokenKind.BRACE_R);
289274
// A-Z _ a-z
290275
case 65:
291276
case 66:
@@ -440,7 +425,7 @@ function readComment(source, start, line, col, prev): Token {
440425
);
441426

442427
return new Tok(
443-
COMMENT,
428+
TokenKind.COMMENT,
444429
start,
445430
position,
446431
line,
@@ -505,7 +490,7 @@ function readNumber(source, start, firstCode, line, col, prev): Token {
505490
}
506491

507492
return new Tok(
508-
isFloat ? FLOAT : INT,
493+
isFloat ? TokenKind.FLOAT : TokenKind.INT,
509494
start,
510495
position,
511496
line,
@@ -558,7 +543,15 @@ function readString(source, start, line, col, prev): Token {
558543
// Closing Quote (")
559544
if (code === 34) {
560545
value += slice.call(body, chunkStart, position);
561-
return new Tok(STRING, start, position + 1, line, col, prev, value);
546+
return new Tok(
547+
TokenKind.STRING,
548+
start,
549+
position + 1,
550+
line,
551+
col,
552+
prev,
553+
value,
554+
);
562555
}
563556

564557
// SourceCharacter
@@ -659,7 +652,7 @@ function readBlockString(source, start, line, col, prev): Token {
659652
) {
660653
rawValue += slice.call(body, chunkStart, position);
661654
return new Tok(
662-
BLOCK_STRING,
655+
TokenKind.BLOCK_STRING,
663656
start,
664657
position + 3,
665658
line,
@@ -756,7 +749,7 @@ function readName(source, position, line, col, prev): Token {
756749
++end;
757750
}
758751
return new Tok(
759-
NAME,
752+
TokenKind.NAME,
760753
position,
761754
end,
762755
line,

0 commit comments

Comments
 (0)