diff --git a/src/execution/execute.js b/src/execution/execute.js index 6a640f3a35..4faa1e29ed 100644 --- a/src/execution/execute.js +++ b/src/execution/execute.js @@ -17,7 +17,7 @@ import type { ObjMap } from '../jsutils/ObjMap'; import type { MaybePromise } from '../jsutils/MaybePromise'; import { typeFromAST } from '../utilities/typeFromAST'; -import * as Kind from '../language/kinds'; +import { Kind } from '../language/kinds'; import { getVariableValues, getArgumentValues, diff --git a/src/execution/values.js b/src/execution/values.js index b09e49f0bd..638f4b28b8 100644 --- a/src/execution/values.js +++ b/src/execution/values.js @@ -14,7 +14,7 @@ import keyMap from '../jsutils/keyMap'; import { coerceValue } from '../utilities/coerceValue'; import { typeFromAST } from '../utilities/typeFromAST'; import { valueFromAST } from '../utilities/valueFromAST'; -import * as Kind from '../language/kinds'; +import { Kind } from '../language/kinds'; import { print } from '../language/printer'; import { isInputType, isNonNullType } from '../type/definition'; import type { ObjMap } from '../jsutils/ObjMap'; diff --git a/src/index.js b/src/index.js index 15efb8e43b..781fb38202 100644 --- a/src/index.js +++ b/src/index.js @@ -247,6 +247,8 @@ export type { TypeExtensionNode, ObjectTypeExtensionNode, DirectiveDefinitionNode, + KindEnum, + TokenKindEnum, DirectiveLocationEnum, } from './language'; diff --git a/src/language/__tests__/parser-test.js b/src/language/__tests__/parser-test.js index a3486e72ad..b05a753e3a 100644 --- a/src/language/__tests__/parser-test.js +++ b/src/language/__tests__/parser-test.js @@ -5,7 +5,7 @@ * LICENSE file in the root directory of this source tree. */ -import * as Kind from '../kinds'; +import { Kind } from '../kinds'; import { expect } from 'chai'; import { describe, it } from 'mocha'; import { parse, parseValue, parseType } from '../parser'; diff --git a/src/language/__tests__/visitor-test.js b/src/language/__tests__/visitor-test.js index 725b5e468e..353fc13951 100644 --- a/src/language/__tests__/visitor-test.js +++ b/src/language/__tests__/visitor-test.js @@ -15,7 +15,7 @@ import { join } from 'path'; import { TypeInfo } from '../../utilities/TypeInfo'; import { testSchema } from '../../validation/__tests__/harness'; import { getNamedType, isCompositeType } from '../../type'; -import * as Kind from '../kinds'; +import { Kind } from '../kinds'; function getNodeByPath(ast, path) { let result = ast; diff --git a/src/language/ast.js b/src/language/ast.js index bec5ae3d57..86d0d52438 100644 --- a/src/language/ast.js +++ b/src/language/ast.js @@ -8,6 +8,7 @@ */ import type { Source } from './source'; +import type { TokenKindEnum } from './lexer'; /** * Contains a range of UTF-8 character offsets and token references that @@ -40,35 +41,6 @@ export type Location = { +source: Source, }; -/** - * Represents the different kinds of tokens in a GraphQL document. - * This type is not inlined in `Token` to fix syntax highlighting on GitHub - * *only*. - */ -type TokenKind = - | '' - | '' - | '!' - | '$' - | '&' - | '(' - | ')' - | '...' - | ':' - | '=' - | '@' - | '[' - | ']' - | '{' - | '|' - | '}' - | 'Name' - | 'Int' - | 'Float' - | 'String' - | 'BlockString' - | 'Comment'; - /** * Represents a range of characters represented by a lexical token * within a Source. @@ -77,7 +49,7 @@ export type Token = { /** * The kind of Token. */ - +kind: TokenKind, + +kind: TokenKindEnum, /** * The character offset at which this Node begins. diff --git a/src/language/directiveLocation.js b/src/language/directiveLocation.js index ebabf773bc..8b6a5ec98a 100644 --- a/src/language/directiveLocation.js +++ b/src/language/directiveLocation.js @@ -10,7 +10,7 @@ /** * The set of allowed directive location values. */ -export const DirectiveLocation = { +export const DirectiveLocation = Object.freeze({ // Request Definitions QUERY: 'QUERY', MUTATION: 'MUTATION', @@ -31,9 +31,9 @@ export const DirectiveLocation = { ENUM_VALUE: 'ENUM_VALUE', INPUT_OBJECT: 'INPUT_OBJECT', INPUT_FIELD_DEFINITION: 'INPUT_FIELD_DEFINITION', -}; +}); /** * The enum type representing the directive location values. */ -export type DirectiveLocationEnum = $Keys; +export type DirectiveLocationEnum = $Values; diff --git a/src/language/index.js b/src/language/index.js index f8bd4bfbb6..66e5e1f840 100644 --- a/src/language/index.js +++ b/src/language/index.js @@ -9,8 +9,8 @@ export { getLocation } from './location'; export type { SourceLocation } from './location'; -import * as Kind from './kinds'; -export { Kind }; +export { Kind } from './kinds'; +export type { KindEnum } from './kinds'; export { createLexer, TokenKind } from './lexer'; export { parse, parseValue, parseType } from './parser'; export { print } from './printer'; @@ -24,7 +24,7 @@ export { } from './visitor'; export type { ASTVisitor, Visitor, VisitFn, VisitorKeyMap } from './visitor'; -export type { Lexer } from './lexer'; +export type { Lexer, TokenKindEnum } from './lexer'; export type { ParseOptions } from './parser'; export type { diff --git a/src/language/kinds.js b/src/language/kinds.js index f36f945afe..0a45c10283 100644 --- a/src/language/kinds.js +++ b/src/language/kinds.js @@ -7,74 +7,74 @@ * @flow */ -// Name - -export const NAME = 'Name'; - -// Document - -export const DOCUMENT = 'Document'; -export const OPERATION_DEFINITION = 'OperationDefinition'; -export const VARIABLE_DEFINITION = 'VariableDefinition'; -export const VARIABLE = 'Variable'; -export const SELECTION_SET = 'SelectionSet'; -export const FIELD = 'Field'; -export const ARGUMENT = 'Argument'; - -// Fragments - -export const FRAGMENT_SPREAD = 'FragmentSpread'; -export const INLINE_FRAGMENT = 'InlineFragment'; -export const FRAGMENT_DEFINITION = 'FragmentDefinition'; - -// Values - -export const INT = 'IntValue'; -export const FLOAT = 'FloatValue'; -export const STRING = 'StringValue'; -export const BOOLEAN = 'BooleanValue'; -export const NULL = 'NullValue'; -export const ENUM = 'EnumValue'; -export const LIST = 'ListValue'; -export const OBJECT = 'ObjectValue'; -export const OBJECT_FIELD = 'ObjectField'; - -// Directives - -export const DIRECTIVE = 'Directive'; - -// Types - -export const NAMED_TYPE = 'NamedType'; -export const LIST_TYPE = 'ListType'; -export const NON_NULL_TYPE = 'NonNullType'; - -// Type System Definitions - -export const SCHEMA_DEFINITION = 'SchemaDefinition'; -export const OPERATION_TYPE_DEFINITION = 'OperationTypeDefinition'; - -// Type Definitions - -export const SCALAR_TYPE_DEFINITION = 'ScalarTypeDefinition'; -export const OBJECT_TYPE_DEFINITION = 'ObjectTypeDefinition'; -export const FIELD_DEFINITION = 'FieldDefinition'; -export const INPUT_VALUE_DEFINITION = 'InputValueDefinition'; -export const INTERFACE_TYPE_DEFINITION = 'InterfaceTypeDefinition'; -export const UNION_TYPE_DEFINITION = 'UnionTypeDefinition'; -export const ENUM_TYPE_DEFINITION = 'EnumTypeDefinition'; -export const ENUM_VALUE_DEFINITION = 'EnumValueDefinition'; -export const INPUT_OBJECT_TYPE_DEFINITION = 'InputObjectTypeDefinition'; - -// Type Extensions - -export const SCALAR_TYPE_EXTENSION = 'ScalarTypeExtension'; -export const OBJECT_TYPE_EXTENSION = 'ObjectTypeExtension'; -export const INTERFACE_TYPE_EXTENSION = 'InterfaceTypeExtension'; -export const UNION_TYPE_EXTENSION = 'UnionTypeExtension'; -export const ENUM_TYPE_EXTENSION = 'EnumTypeExtension'; -export const INPUT_OBJECT_TYPE_EXTENSION = 'InputObjectTypeExtension'; - -// Directive Definitions +/** + * The set of allowed kind values for AST nodes. + */ +export const Kind = Object.freeze({ + // Name + NAME: 'Name', + + // Document + DOCUMENT: 'Document', + OPERATION_DEFINITION: 'OperationDefinition', + VARIABLE_DEFINITION: 'VariableDefinition', + VARIABLE: 'Variable', + SELECTION_SET: 'SelectionSet', + FIELD: 'Field', + ARGUMENT: 'Argument', + + // Fragments + FRAGMENT_SPREAD: 'FragmentSpread', + INLINE_FRAGMENT: 'InlineFragment', + FRAGMENT_DEFINITION: 'FragmentDefinition', + + // Values + INT: 'IntValue', + FLOAT: 'FloatValue', + STRING: 'StringValue', + BOOLEAN: 'BooleanValue', + NULL: 'NullValue', + ENUM: 'EnumValue', + LIST: 'ListValue', + OBJECT: 'ObjectValue', + OBJECT_FIELD: 'ObjectField', + + // Directives + DIRECTIVE: 'Directive', + + // Types + NAMED_TYPE: 'NamedType', + LIST_TYPE: 'ListType', + NON_NULL_TYPE: 'NonNullType', + + // Type System Definitions + SCHEMA_DEFINITION: 'SchemaDefinition', + OPERATION_TYPE_DEFINITION: 'OperationTypeDefinition', + + // Type Definitions + SCALAR_TYPE_DEFINITION: 'ScalarTypeDefinition', + OBJECT_TYPE_DEFINITION: 'ObjectTypeDefinition', + FIELD_DEFINITION: 'FieldDefinition', + INPUT_VALUE_DEFINITION: 'InputValueDefinition', + INTERFACE_TYPE_DEFINITION: 'InterfaceTypeDefinition', + UNION_TYPE_DEFINITION: 'UnionTypeDefinition', + ENUM_TYPE_DEFINITION: 'EnumTypeDefinition', + ENUM_VALUE_DEFINITION: 'EnumValueDefinition', + INPUT_OBJECT_TYPE_DEFINITION: 'InputObjectTypeDefinition', + + // Type Extensions + SCALAR_TYPE_EXTENSION: 'ScalarTypeExtension', + OBJECT_TYPE_EXTENSION: 'ObjectTypeExtension', + INTERFACE_TYPE_EXTENSION: 'InterfaceTypeExtension', + UNION_TYPE_EXTENSION: 'UnionTypeExtension', + ENUM_TYPE_EXTENSION: 'EnumTypeExtension', + INPUT_OBJECT_TYPE_EXTENSION: 'InputObjectTypeExtension', + + // Directive Definitions + DIRECTIVE_DEFINITION: 'DirectiveDefinition', +}); -export const DIRECTIVE_DEFINITION = 'DirectiveDefinition'; +/** + * The enum type representing the possible kind values of AST nodes. + */ +export type KindEnum = $Values; diff --git a/src/language/lexer.js b/src/language/lexer.js index 9edc753c9d..e39436296f 100644 --- a/src/language/lexer.js +++ b/src/language/lexer.js @@ -24,7 +24,7 @@ export function createLexer( source: Source, options: TOptions, ): Lexer { - const startOfFileToken = new Tok(SOF, 0, 0, 0, 0, null); + const startOfFileToken = new Tok(TokenKind.SOF, 0, 0, 0, 0, null); const lexer: Lexer = { source, options, @@ -46,11 +46,11 @@ function advanceLexer() { function lookahead() { let token = this.token; - if (token.kind !== EOF) { + if (token.kind !== TokenKind.EOF) { do { // Note: next is only mutable during parsing, so we cast to allow this. token = token.next || ((token: any).next = readToken(this, token)); - } while (token.kind === COMMENT); + } while (token.kind === TokenKind.COMMENT); } return token; } @@ -94,58 +94,39 @@ export type Lexer = { lookahead(): Token, }; -// Each kind of token. -const SOF = ''; -const EOF = ''; -const BANG = '!'; -const DOLLAR = '$'; -const AMP = '&'; -const PAREN_L = '('; -const PAREN_R = ')'; -const SPREAD = '...'; -const COLON = ':'; -const EQUALS = '='; -const AT = '@'; -const BRACKET_L = '['; -const BRACKET_R = ']'; -const BRACE_L = '{'; -const PIPE = '|'; -const BRACE_R = '}'; -const NAME = 'Name'; -const INT = 'Int'; -const FLOAT = 'Float'; -const STRING = 'String'; -const BLOCK_STRING = 'BlockString'; -const COMMENT = 'Comment'; - /** * An exported enum describing the different kinds of tokens that the * lexer emits. */ -export const TokenKind = { - SOF, - EOF, - BANG, - DOLLAR, - AMP, - PAREN_L, - PAREN_R, - SPREAD, - COLON, - EQUALS, - AT, - BRACKET_L, - BRACKET_R, - BRACE_L, - PIPE, - BRACE_R, - NAME, - INT, - FLOAT, - STRING, - BLOCK_STRING, - COMMENT, -}; +export const TokenKind = Object.freeze({ + SOF: '', + EOF: '', + BANG: '!', + DOLLAR: '$', + AMP: '&', + PAREN_L: '(', + PAREN_R: ')', + SPREAD: '...', + COLON: ':', + EQUALS: '=', + AT: '@', + BRACKET_L: '[', + BRACKET_R: ']', + BRACE_L: '{', + PIPE: '|', + BRACE_R: '}', + NAME: 'Name', + INT: 'Int', + FLOAT: 'Float', + STRING: 'String', + BLOCK_STRING: 'BlockString', + COMMENT: 'Comment', +}); + +/** + * The enum type representing the token kinds values. + */ +export type TokenKindEnum = $Values; /** * A helper function to describe a token as a string for debugging @@ -162,7 +143,7 @@ const slice = String.prototype.slice; * Helper function for constructing the Token object. */ function Tok( - kind, + kind: TokenKindEnum, start: number, end: number, line: number, @@ -194,7 +175,7 @@ function printCharCode(code) { return ( // NaN/undefined represents access beyond the end of the file. isNaN(code) - ? EOF + ? TokenKind.EOF : // Trust JSON for ASCII. code < 0x007f ? JSON.stringify(String.fromCharCode(code)) @@ -215,21 +196,21 @@ function readToken(lexer: Lexer<*>, prev: Token): Token { const body = source.body; const bodyLength = body.length; - const position = positionAfterWhitespace(body, prev.end, lexer); + const pos = positionAfterWhitespace(body, prev.end, lexer); const line = lexer.line; - const col = 1 + position - lexer.lineStart; + const col = 1 + pos - lexer.lineStart; - if (position >= bodyLength) { - return new Tok(EOF, bodyLength, bodyLength, line, col, prev); + if (pos >= bodyLength) { + return new Tok(TokenKind.EOF, bodyLength, bodyLength, line, col, prev); } - const code = charCodeAt.call(body, position); + const code = charCodeAt.call(body, pos); // SourceCharacter if (code < 0x0020 && code !== 0x0009 && code !== 0x000a && code !== 0x000d) { throw syntaxError( source, - position, + pos, `Cannot contain the invalid character ${printCharCode(code)}.`, ); } @@ -237,55 +218,55 @@ function readToken(lexer: Lexer<*>, prev: Token): Token { switch (code) { // ! case 33: - return new Tok(BANG, position, position + 1, line, col, prev); + return new Tok(TokenKind.BANG, pos, pos + 1, line, col, prev); // # case 35: - return readComment(source, position, line, col, prev); + return readComment(source, pos, line, col, prev); // $ case 36: - return new Tok(DOLLAR, position, position + 1, line, col, prev); + return new Tok(TokenKind.DOLLAR, pos, pos + 1, line, col, prev); // & case 38: - return new Tok(AMP, position, position + 1, line, col, prev); + return new Tok(TokenKind.AMP, pos, pos + 1, line, col, prev); // ( case 40: - return new Tok(PAREN_L, position, position + 1, line, col, prev); + return new Tok(TokenKind.PAREN_L, pos, pos + 1, line, col, prev); // ) case 41: - return new Tok(PAREN_R, position, position + 1, line, col, prev); + return new Tok(TokenKind.PAREN_R, pos, pos + 1, line, col, prev); // . case 46: if ( - charCodeAt.call(body, position + 1) === 46 && - charCodeAt.call(body, position + 2) === 46 + charCodeAt.call(body, pos + 1) === 46 && + charCodeAt.call(body, pos + 2) === 46 ) { - return new Tok(SPREAD, position, position + 3, line, col, prev); + return new Tok(TokenKind.SPREAD, pos, pos + 3, line, col, prev); } break; // : case 58: - return new Tok(COLON, position, position + 1, line, col, prev); + return new Tok(TokenKind.COLON, pos, pos + 1, line, col, prev); // = case 61: - return new Tok(EQUALS, position, position + 1, line, col, prev); + return new Tok(TokenKind.EQUALS, pos, pos + 1, line, col, prev); // @ case 64: - return new Tok(AT, position, position + 1, line, col, prev); + return new Tok(TokenKind.AT, pos, pos + 1, line, col, prev); // [ case 91: - return new Tok(BRACKET_L, position, position + 1, line, col, prev); + return new Tok(TokenKind.BRACKET_L, pos, pos + 1, line, col, prev); // ] case 93: - return new Tok(BRACKET_R, position, position + 1, line, col, prev); + return new Tok(TokenKind.BRACKET_R, pos, pos + 1, line, col, prev); // { case 123: - return new Tok(BRACE_L, position, position + 1, line, col, prev); + return new Tok(TokenKind.BRACE_L, pos, pos + 1, line, col, prev); // | case 124: - return new Tok(PIPE, position, position + 1, line, col, prev); + return new Tok(TokenKind.PIPE, pos, pos + 1, line, col, prev); // } case 125: - return new Tok(BRACE_R, position, position + 1, line, col, prev); + return new Tok(TokenKind.BRACE_R, pos, pos + 1, line, col, prev); // A-Z _ a-z case 65: case 66: @@ -340,7 +321,7 @@ function readToken(lexer: Lexer<*>, prev: Token): Token { case 120: case 121: case 122: - return readName(source, position, line, col, prev); + return readName(source, pos, line, col, prev); // - 0-9 case 45: case 48: @@ -353,19 +334,19 @@ function readToken(lexer: Lexer<*>, prev: Token): Token { case 55: case 56: case 57: - return readNumber(source, position, code, line, col, prev); + return readNumber(source, pos, code, line, col, prev); // " case 34: if ( - charCodeAt.call(body, position + 1) === 34 && - charCodeAt.call(body, position + 2) === 34 + charCodeAt.call(body, pos + 1) === 34 && + charCodeAt.call(body, pos + 2) === 34 ) { - return readBlockString(source, position, line, col, prev); + return readBlockString(source, pos, line, col, prev); } - return readString(source, position, line, col, prev); + return readString(source, pos, line, col, prev); } - throw syntaxError(source, position, unexpectedCharacterMessage(code)); + throw syntaxError(source, pos, unexpectedCharacterMessage(code)); } /** @@ -440,7 +421,7 @@ function readComment(source, start, line, col, prev): Token { ); return new Tok( - COMMENT, + TokenKind.COMMENT, start, position, line, @@ -505,7 +486,7 @@ function readNumber(source, start, firstCode, line, col, prev): Token { } return new Tok( - isFloat ? FLOAT : INT, + isFloat ? TokenKind.FLOAT : TokenKind.INT, start, position, line, @@ -558,7 +539,15 @@ function readString(source, start, line, col, prev): Token { // Closing Quote (") if (code === 34) { value += slice.call(body, chunkStart, position); - return new Tok(STRING, start, position + 1, line, col, prev, value); + return new Tok( + TokenKind.STRING, + start, + position + 1, + line, + col, + prev, + value, + ); } // SourceCharacter @@ -659,7 +648,7 @@ function readBlockString(source, start, line, col, prev): Token { ) { rawValue += slice.call(body, chunkStart, position); return new Tok( - BLOCK_STRING, + TokenKind.BLOCK_STRING, start, position + 3, line, @@ -740,28 +729,28 @@ function char2hex(a) { * * [_A-Za-z][_0-9A-Za-z]* */ -function readName(source, position, line, col, prev): Token { +function readName(source, start, line, col, prev): Token { const body = source.body; const bodyLength = body.length; - let end = position + 1; + let position = start + 1; let code = 0; while ( - end !== bodyLength && - (code = charCodeAt.call(body, end)) !== null && + position !== bodyLength && + (code = charCodeAt.call(body, position)) !== null && (code === 95 || // _ (code >= 48 && code <= 57) || // 0-9 (code >= 65 && code <= 90) || // A-Z (code >= 97 && code <= 122)) // a-z ) { - ++end; + ++position; } return new Tok( - NAME, + TokenKind.NAME, + start, position, - end, line, col, prev, - slice.call(body, position, end), + slice.call(body, start, position), ); } diff --git a/src/language/parser.js b/src/language/parser.js index afa5d6ecab..cca271a420 100644 --- a/src/language/parser.js +++ b/src/language/parser.js @@ -11,7 +11,7 @@ import { Source } from './source'; import { syntaxError } from '../error'; import type { GraphQLError } from '../error'; import { createLexer, TokenKind, getTokenDesc } from './lexer'; -import type { Lexer } from './lexer'; +import type { Lexer, TokenKindEnum } from './lexer'; import type { Location, Token, @@ -62,50 +62,7 @@ import type { DirectiveDefinitionNode, } from './ast'; -import { - NAME, - VARIABLE, - DOCUMENT, - OPERATION_DEFINITION, - VARIABLE_DEFINITION, - SELECTION_SET, - FIELD, - ARGUMENT, - FRAGMENT_SPREAD, - INLINE_FRAGMENT, - FRAGMENT_DEFINITION, - INT, - FLOAT, - STRING, - BOOLEAN, - NULL, - ENUM, - LIST, - OBJECT, - OBJECT_FIELD, - DIRECTIVE, - NAMED_TYPE, - LIST_TYPE, - NON_NULL_TYPE, - SCHEMA_DEFINITION, - OPERATION_TYPE_DEFINITION, - SCALAR_TYPE_DEFINITION, - OBJECT_TYPE_DEFINITION, - FIELD_DEFINITION, - INPUT_VALUE_DEFINITION, - INTERFACE_TYPE_DEFINITION, - UNION_TYPE_DEFINITION, - ENUM_TYPE_DEFINITION, - ENUM_VALUE_DEFINITION, - INPUT_OBJECT_TYPE_DEFINITION, - SCALAR_TYPE_EXTENSION, - OBJECT_TYPE_EXTENSION, - INTERFACE_TYPE_EXTENSION, - UNION_TYPE_EXTENSION, - ENUM_TYPE_EXTENSION, - INPUT_OBJECT_TYPE_EXTENSION, - DIRECTIVE_DEFINITION, -} from './kinds'; +import { Kind } from './kinds'; import { DirectiveLocation } from './directiveLocation'; /** @@ -224,7 +181,7 @@ export function parseType( function parseName(lexer: Lexer<*>): NameNode { const token = expect(lexer, TokenKind.NAME); return { - kind: NAME, + kind: Kind.NAME, value: ((token.value: any): string), loc: loc(lexer, token), }; @@ -244,7 +201,7 @@ function parseDocument(lexer: Lexer<*>): DocumentNode { } while (!skip(lexer, TokenKind.EOF)); return { - kind: DOCUMENT, + kind: Kind.DOCUMENT, definitions, loc: loc(lexer, start), }; @@ -319,7 +276,7 @@ function parseOperationDefinition(lexer: Lexer<*>): OperationDefinitionNode { const start = lexer.token; if (peek(lexer, TokenKind.BRACE_L)) { return { - kind: OPERATION_DEFINITION, + kind: Kind.OPERATION_DEFINITION, operation: 'query', name: undefined, variableDefinitions: [], @@ -334,7 +291,7 @@ function parseOperationDefinition(lexer: Lexer<*>): OperationDefinitionNode { name = parseName(lexer); } return { - kind: OPERATION_DEFINITION, + kind: Kind.OPERATION_DEFINITION, operation, name, variableDefinitions: parseVariableDefinitions(lexer), @@ -378,7 +335,7 @@ function parseVariableDefinitions( function parseVariableDefinition(lexer: Lexer<*>): VariableDefinitionNode { const start = lexer.token; return { - kind: VARIABLE_DEFINITION, + kind: Kind.VARIABLE_DEFINITION, variable: parseVariable(lexer), type: (expect(lexer, TokenKind.COLON), parseTypeReference(lexer)), defaultValue: skip(lexer, TokenKind.EQUALS) @@ -395,7 +352,7 @@ function parseVariable(lexer: Lexer<*>): VariableNode { const start = lexer.token; expect(lexer, TokenKind.DOLLAR); return { - kind: VARIABLE, + kind: Kind.VARIABLE, name: parseName(lexer), loc: loc(lexer, start), }; @@ -407,7 +364,7 @@ function parseVariable(lexer: Lexer<*>): VariableNode { function parseSelectionSet(lexer: Lexer<*>): SelectionSetNode { const start = lexer.token; return { - kind: SELECTION_SET, + kind: Kind.SELECTION_SET, selections: many( lexer, TokenKind.BRACE_L, @@ -449,7 +406,7 @@ function parseField(lexer: Lexer<*>): FieldNode { } return { - kind: FIELD, + kind: Kind.FIELD, alias, name, arguments: parseArguments(lexer, false), @@ -480,7 +437,7 @@ function parseArguments( function parseArgument(lexer: Lexer<*>): ArgumentNode { const start = lexer.token; return { - kind: ARGUMENT, + kind: Kind.ARGUMENT, name: parseName(lexer), value: (expect(lexer, TokenKind.COLON), parseValueLiteral(lexer, false)), loc: loc(lexer, start), @@ -490,7 +447,7 @@ function parseArgument(lexer: Lexer<*>): ArgumentNode { function parseConstArgument(lexer: Lexer<*>): ArgumentNode { const start = lexer.token; return { - kind: ARGUMENT, + kind: Kind.ARGUMENT, name: parseName(lexer), value: (expect(lexer, TokenKind.COLON), parseConstValue(lexer)), loc: loc(lexer, start), @@ -513,7 +470,7 @@ function parseFragment( expect(lexer, TokenKind.SPREAD); if (peek(lexer, TokenKind.NAME) && lexer.token.value !== 'on') { return { - kind: FRAGMENT_SPREAD, + kind: Kind.FRAGMENT_SPREAD, name: parseFragmentName(lexer), directives: parseDirectives(lexer, false), loc: loc(lexer, start), @@ -525,7 +482,7 @@ function parseFragment( typeCondition = parseNamedType(lexer); } return { - kind: INLINE_FRAGMENT, + kind: Kind.INLINE_FRAGMENT, typeCondition, directives: parseDirectives(lexer, false), selectionSet: parseSelectionSet(lexer), @@ -547,7 +504,7 @@ function parseFragmentDefinition(lexer: Lexer<*>): FragmentDefinitionNode { // - fragment FragmentName VariableDefinitions? on TypeCondition Directives? SelectionSet if (lexer.options.experimentalFragmentVariables) { return { - kind: FRAGMENT_DEFINITION, + kind: Kind.FRAGMENT_DEFINITION, name: parseFragmentName(lexer), variableDefinitions: parseVariableDefinitions(lexer), typeCondition: (expectKeyword(lexer, 'on'), parseNamedType(lexer)), @@ -557,7 +514,7 @@ function parseFragmentDefinition(lexer: Lexer<*>): FragmentDefinitionNode { }; } return { - kind: FRAGMENT_DEFINITION, + kind: Kind.FRAGMENT_DEFINITION, name: parseFragmentName(lexer), typeCondition: (expectKeyword(lexer, 'on'), parseNamedType(lexer)), directives: parseDirectives(lexer, false), @@ -606,14 +563,14 @@ function parseValueLiteral(lexer: Lexer<*>, isConst: boolean): ValueNode { case TokenKind.INT: lexer.advance(); return { - kind: (INT: 'IntValue'), + kind: Kind.INT, value: ((token.value: any): string), loc: loc(lexer, token), }; case TokenKind.FLOAT: lexer.advance(); return { - kind: (FLOAT: 'FloatValue'), + kind: Kind.FLOAT, value: ((token.value: any): string), loc: loc(lexer, token), }; @@ -624,20 +581,20 @@ function parseValueLiteral(lexer: Lexer<*>, isConst: boolean): ValueNode { if (token.value === 'true' || token.value === 'false') { lexer.advance(); return { - kind: (BOOLEAN: 'BooleanValue'), + kind: Kind.BOOLEAN, value: token.value === 'true', loc: loc(lexer, token), }; } else if (token.value === 'null') { lexer.advance(); return { - kind: (NULL: 'NullValue'), + kind: Kind.NULL, loc: loc(lexer, token), }; } lexer.advance(); return { - kind: (ENUM: 'EnumValue'), + kind: Kind.ENUM, value: ((token.value: any): string), loc: loc(lexer, token), }; @@ -654,7 +611,7 @@ function parseStringLiteral(lexer: Lexer<*>): StringValueNode { const token = lexer.token; lexer.advance(); return { - kind: (STRING: 'StringValue'), + kind: Kind.STRING, value: ((token.value: any): string), block: token.kind === TokenKind.BLOCK_STRING, loc: loc(lexer, token), @@ -678,7 +635,7 @@ function parseList(lexer: Lexer<*>, isConst: boolean): ListValueNode { const start = lexer.token; const item = isConst ? parseConstValue : parseValueValue; return { - kind: LIST, + kind: Kind.LIST, values: any(lexer, TokenKind.BRACKET_L, item, TokenKind.BRACKET_R), loc: loc(lexer, start), }; @@ -697,7 +654,7 @@ function parseObject(lexer: Lexer<*>, isConst: boolean): ObjectValueNode { fields.push(parseObjectField(lexer, isConst)); } return { - kind: OBJECT, + kind: Kind.OBJECT, fields, loc: loc(lexer, start), }; @@ -709,7 +666,7 @@ function parseObject(lexer: Lexer<*>, isConst: boolean): ObjectValueNode { function parseObjectField(lexer: Lexer<*>, isConst: boolean): ObjectFieldNode { const start = lexer.token; return { - kind: OBJECT_FIELD, + kind: Kind.OBJECT_FIELD, name: parseName(lexer), value: (expect(lexer, TokenKind.COLON), parseValueLiteral(lexer, isConst)), loc: loc(lexer, start), @@ -739,7 +696,7 @@ function parseDirective(lexer: Lexer<*>, isConst: boolean): DirectiveNode { const start = lexer.token; expect(lexer, TokenKind.AT); return { - kind: DIRECTIVE, + kind: Kind.DIRECTIVE, name: parseName(lexer), arguments: parseArguments(lexer, isConst), loc: loc(lexer, start), @@ -761,7 +718,7 @@ export function parseTypeReference(lexer: Lexer<*>): TypeNode { type = parseTypeReference(lexer); expect(lexer, TokenKind.BRACKET_R); type = ({ - kind: LIST_TYPE, + kind: Kind.LIST_TYPE, type, loc: loc(lexer, start), }: ListTypeNode); @@ -770,7 +727,7 @@ export function parseTypeReference(lexer: Lexer<*>): TypeNode { } if (skip(lexer, TokenKind.BANG)) { return ({ - kind: NON_NULL_TYPE, + kind: Kind.NON_NULL_TYPE, type, loc: loc(lexer, start), }: NonNullTypeNode); @@ -784,7 +741,7 @@ export function parseTypeReference(lexer: Lexer<*>): TypeNode { export function parseNamedType(lexer: Lexer<*>): NamedTypeNode { const start = lexer.token; return { - kind: NAMED_TYPE, + kind: Kind.NAMED_TYPE, name: parseName(lexer), loc: loc(lexer, start), }; @@ -864,7 +821,7 @@ function parseSchemaDefinition(lexer: Lexer<*>): SchemaDefinitionNode { TokenKind.BRACE_R, ); return { - kind: SCHEMA_DEFINITION, + kind: Kind.SCHEMA_DEFINITION, directives, operationTypes, loc: loc(lexer, start), @@ -882,7 +839,7 @@ function parseOperationTypeDefinition( expect(lexer, TokenKind.COLON); const type = parseNamedType(lexer); return { - kind: OPERATION_TYPE_DEFINITION, + kind: Kind.OPERATION_TYPE_DEFINITION, operation, type, loc: loc(lexer, start), @@ -899,7 +856,7 @@ function parseScalarTypeDefinition(lexer: Lexer<*>): ScalarTypeDefinitionNode { const name = parseName(lexer); const directives = parseDirectives(lexer, true); return { - kind: SCALAR_TYPE_DEFINITION, + kind: Kind.SCALAR_TYPE_DEFINITION, description, name, directives, @@ -921,7 +878,7 @@ function parseObjectTypeDefinition(lexer: Lexer<*>): ObjectTypeDefinitionNode { const directives = parseDirectives(lexer, true); const fields = parseFieldsDefinition(lexer); return { - kind: OBJECT_TYPE_DEFINITION, + kind: Kind.OBJECT_TYPE_DEFINITION, description, name, interfaces, @@ -986,7 +943,7 @@ function parseFieldDefinition(lexer: Lexer<*>): FieldDefinitionNode { const type = parseTypeReference(lexer); const directives = parseDirectives(lexer, true); return { - kind: FIELD_DEFINITION, + kind: Kind.FIELD_DEFINITION, description, name, arguments: args, @@ -1022,7 +979,7 @@ function parseInputValueDef(lexer: Lexer<*>): InputValueDefinitionNode { } const directives = parseDirectives(lexer, true); return { - kind: INPUT_VALUE_DEFINITION, + kind: Kind.INPUT_VALUE_DEFINITION, description, name, type, @@ -1046,7 +1003,7 @@ function parseInterfaceTypeDefinition( const directives = parseDirectives(lexer, true); const fields = parseFieldsDefinition(lexer); return { - kind: INTERFACE_TYPE_DEFINITION, + kind: Kind.INTERFACE_TYPE_DEFINITION, description, name, directives, @@ -1067,7 +1024,7 @@ function parseUnionTypeDefinition(lexer: Lexer<*>): UnionTypeDefinitionNode { const directives = parseDirectives(lexer, true); const types = parseUnionMemberTypes(lexer); return { - kind: UNION_TYPE_DEFINITION, + kind: Kind.UNION_TYPE_DEFINITION, description, name, directives, @@ -1105,7 +1062,7 @@ function parseEnumTypeDefinition(lexer: Lexer<*>): EnumTypeDefinitionNode { const directives = parseDirectives(lexer, true); const values = parseEnumValuesDefinition(lexer); return { - kind: ENUM_TYPE_DEFINITION, + kind: Kind.ENUM_TYPE_DEFINITION, description, name, directives, @@ -1141,7 +1098,7 @@ function parseEnumValueDefinition(lexer: Lexer<*>): EnumValueDefinitionNode { const name = parseName(lexer); const directives = parseDirectives(lexer, true); return { - kind: ENUM_VALUE_DEFINITION, + kind: Kind.ENUM_VALUE_DEFINITION, description, name, directives, @@ -1163,7 +1120,7 @@ function parseInputObjectTypeDefinition( const directives = parseDirectives(lexer, true); const fields = parseInputFieldsDefinition(lexer); return { - kind: INPUT_OBJECT_TYPE_DEFINITION, + kind: Kind.INPUT_OBJECT_TYPE_DEFINITION, description, name, directives, @@ -1229,7 +1186,7 @@ function parseScalarTypeExtension(lexer: Lexer<*>): ScalarTypeExtensionNode { throw unexpected(lexer); } return { - kind: SCALAR_TYPE_EXTENSION, + kind: Kind.SCALAR_TYPE_EXTENSION, name, directives, loc: loc(lexer, start), @@ -1258,7 +1215,7 @@ function parseObjectTypeExtension(lexer: Lexer<*>): ObjectTypeExtensionNode { throw unexpected(lexer); } return { - kind: OBJECT_TYPE_EXTENSION, + kind: Kind.OBJECT_TYPE_EXTENSION, name, interfaces, directives, @@ -1285,7 +1242,7 @@ function parseInterfaceTypeExtension( throw unexpected(lexer); } return { - kind: INTERFACE_TYPE_EXTENSION, + kind: Kind.INTERFACE_TYPE_EXTENSION, name, directives, fields, @@ -1309,7 +1266,7 @@ function parseUnionTypeExtension(lexer: Lexer<*>): UnionTypeExtensionNode { throw unexpected(lexer); } return { - kind: UNION_TYPE_EXTENSION, + kind: Kind.UNION_TYPE_EXTENSION, name, directives, types, @@ -1333,7 +1290,7 @@ function parseEnumTypeExtension(lexer: Lexer<*>): EnumTypeExtensionNode { throw unexpected(lexer); } return { - kind: ENUM_TYPE_EXTENSION, + kind: Kind.ENUM_TYPE_EXTENSION, name, directives, values, @@ -1359,7 +1316,7 @@ function parseInputObjectTypeExtension( throw unexpected(lexer); } return { - kind: INPUT_OBJECT_TYPE_EXTENSION, + kind: Kind.INPUT_OBJECT_TYPE_EXTENSION, name, directives, fields, @@ -1381,7 +1338,7 @@ function parseDirectiveDefinition(lexer: Lexer<*>): DirectiveDefinitionNode { expectKeyword(lexer, 'on'); const locations = parseDirectiveLocations(lexer); return { - kind: DIRECTIVE_DEFINITION, + kind: Kind.DIRECTIVE_DEFINITION, description, name, arguments: args, @@ -1469,7 +1426,7 @@ Loc.prototype.toJSON = Loc.prototype.inspect = function toJSON() { /** * Determines if the next token is of a given kind */ -function peek(lexer: Lexer<*>, kind: string): boolean { +function peek(lexer: Lexer<*>, kind: TokenKindEnum): boolean { return lexer.token.kind === kind; } @@ -1477,7 +1434,7 @@ function peek(lexer: Lexer<*>, kind: string): boolean { * If the next token is of the given kind, return true after advancing * the lexer. Otherwise, do not change the parser state and return false. */ -function skip(lexer: Lexer<*>, kind: string): boolean { +function skip(lexer: Lexer<*>, kind: TokenKindEnum): boolean { const match = lexer.token.kind === kind; if (match) { lexer.advance(); @@ -1489,7 +1446,7 @@ function skip(lexer: Lexer<*>, kind: string): boolean { * If the next token is of the given kind, return that token after advancing * the lexer. Otherwise, do not change the parser state and throw an error. */ -function expect(lexer: Lexer<*>, kind: string): Token { +function expect(lexer: Lexer<*>, kind: TokenKindEnum): Token { const token = lexer.token; if (token.kind === kind) { lexer.advance(); @@ -1541,9 +1498,9 @@ function unexpected(lexer: Lexer<*>, atToken?: ?Token): GraphQLError { */ function any( lexer: Lexer<*>, - openKind: string, + openKind: TokenKindEnum, parseFn: (lexer: Lexer<*>) => T, - closeKind: string, + closeKind: TokenKindEnum, ): Array { expect(lexer, openKind); const nodes = []; @@ -1561,9 +1518,9 @@ function any( */ function many( lexer: Lexer<*>, - openKind: string, + openKind: TokenKindEnum, parseFn: (lexer: Lexer<*>) => T, - closeKind: string, + closeKind: TokenKindEnum, ): Array { expect(lexer, openKind); const nodes = [parseFn(lexer)]; diff --git a/src/type/definition.js b/src/type/definition.js index 6b5f54bfa7..9e3dfaa0b9 100644 --- a/src/type/definition.js +++ b/src/type/definition.js @@ -11,7 +11,7 @@ import instanceOf from '../jsutils/instanceOf'; import invariant from '../jsutils/invariant'; import isInvalid from '../jsutils/isInvalid'; import type { ObjMap } from '../jsutils/ObjMap'; -import * as Kind from '../language/kinds'; +import { Kind } from '../language/kinds'; import { valueFromASTUntyped } from '../utilities/valueFromASTUntyped'; import type { ScalarTypeDefinitionNode, diff --git a/src/type/scalars.js b/src/type/scalars.js index eed96e2f58..a9e08dc866 100644 --- a/src/type/scalars.js +++ b/src/type/scalars.js @@ -8,7 +8,7 @@ */ import { GraphQLScalarType, isNamedType } from './definition'; -import * as Kind from '../language/kinds'; +import { Kind } from '../language/kinds'; // As per the GraphQL Spec, Integers are only treated as valid when a valid // 32-bit signed integer, providing the broadest support across platforms. diff --git a/src/utilities/TypeInfo.js b/src/utilities/TypeInfo.js index 6d3ce7998a..5b99863521 100644 --- a/src/utilities/TypeInfo.js +++ b/src/utilities/TypeInfo.js @@ -7,7 +7,7 @@ * @flow */ -import * as Kind from '../language/kinds'; +import { Kind } from '../language/kinds'; import { isObjectType, isInterfaceType, diff --git a/src/utilities/astFromValue.js b/src/utilities/astFromValue.js index d3cfe59e02..4e9f2d257b 100644 --- a/src/utilities/astFromValue.js +++ b/src/utilities/astFromValue.js @@ -12,7 +12,7 @@ import { forEach, isCollection } from 'iterall'; import isNullish from '../jsutils/isNullish'; import isInvalid from '../jsutils/isInvalid'; import type { ValueNode } from '../language/ast'; -import * as Kind from '../language/kinds'; +import { Kind } from '../language/kinds'; import type { GraphQLInputType } from '../type/definition'; import { isScalarType, diff --git a/src/utilities/buildASTSchema.js b/src/utilities/buildASTSchema.js index 16ed9a99f4..8216a2e0f7 100644 --- a/src/utilities/buildASTSchema.js +++ b/src/utilities/buildASTSchema.js @@ -16,8 +16,7 @@ import { TokenKind } from '../language/lexer'; import { parse } from '../language/parser'; import type { Source } from '../language/source'; import { getDirectiveValues } from '../execution/values'; - -import * as Kind from '../language/kinds'; +import { Kind } from '../language/kinds'; import type { DocumentNode, diff --git a/src/utilities/extendSchema.js b/src/utilities/extendSchema.js index 673999001f..f064894c17 100644 --- a/src/utilities/extendSchema.js +++ b/src/utilities/extendSchema.js @@ -27,7 +27,7 @@ import { GraphQLList, GraphQLNonNull } from '../type/wrappers'; import { GraphQLDirective } from '../type/directives'; -import * as Kind from '../language/kinds'; +import { Kind } from '../language/kinds'; import type { GraphQLType, GraphQLNamedType } from '../type/definition'; diff --git a/src/utilities/getOperationAST.js b/src/utilities/getOperationAST.js index ca52e34a2b..ad3af6b870 100644 --- a/src/utilities/getOperationAST.js +++ b/src/utilities/getOperationAST.js @@ -7,7 +7,7 @@ * @flow */ -import { OPERATION_DEFINITION } from '../language/kinds'; +import { Kind } from '../language/kinds'; import type { DocumentNode, OperationDefinitionNode } from '../language/ast'; /** @@ -22,7 +22,7 @@ export function getOperationAST( let operation = null; for (let i = 0; i < documentAST.definitions.length; i++) { const definition = documentAST.definitions[i]; - if (definition.kind === OPERATION_DEFINITION) { + if (definition.kind === Kind.OPERATION_DEFINITION) { if (!operationName) { // If no operation name was provided, only return an Operation if there // is one defined in the document. Upon encountering the second, return diff --git a/src/utilities/isValidLiteralValue.js b/src/utilities/isValidLiteralValue.js index 43d45f14bf..5b5a8bea90 100644 --- a/src/utilities/isValidLiteralValue.js +++ b/src/utilities/isValidLiteralValue.js @@ -10,7 +10,7 @@ import { TypeInfo } from './TypeInfo'; import type { GraphQLError } from '../error/GraphQLError'; import type { ValueNode } from '../language/ast'; -import { DOCUMENT } from '../language/kinds'; +import { Kind } from '../language/kinds'; import { visit, visitWithTypeInfo } from '../language/visitor'; import type { GraphQLInputType } from '../type/definition'; import { GraphQLSchema } from '../type/schema'; @@ -27,7 +27,7 @@ export function isValidLiteralValue( valueNode: ValueNode, ): $ReadOnlyArray { const emptySchema = new GraphQLSchema({}); - const emptyDoc = { kind: DOCUMENT, definitions: [] }; + const emptyDoc = { kind: Kind.DOCUMENT, definitions: [] }; const typeInfo = new TypeInfo(emptySchema, undefined, type); const context = new ValidationContext(emptySchema, emptyDoc, typeInfo); const visitor = ValuesOfCorrectType(context); diff --git a/src/utilities/typeFromAST.js b/src/utilities/typeFromAST.js index f16c3165eb..247888a189 100644 --- a/src/utilities/typeFromAST.js +++ b/src/utilities/typeFromAST.js @@ -7,7 +7,7 @@ * @flow */ -import * as Kind from '../language/kinds'; +import { Kind } from '../language/kinds'; import type { NamedTypeNode, ListTypeNode, diff --git a/src/utilities/valueFromAST.js b/src/utilities/valueFromAST.js index 9cf859bd42..e391fccbd4 100644 --- a/src/utilities/valueFromAST.js +++ b/src/utilities/valueFromAST.js @@ -10,7 +10,7 @@ import keyMap from '../jsutils/keyMap'; import isInvalid from '../jsutils/isInvalid'; import type { ObjMap } from '../jsutils/ObjMap'; -import * as Kind from '../language/kinds'; +import { Kind } from '../language/kinds'; import { isScalarType, isEnumType, diff --git a/src/utilities/valueFromASTUntyped.js b/src/utilities/valueFromASTUntyped.js index 7d64ce3fcf..66536b09f6 100644 --- a/src/utilities/valueFromASTUntyped.js +++ b/src/utilities/valueFromASTUntyped.js @@ -11,7 +11,7 @@ import keyValMap from '../jsutils/keyValMap'; import isInvalid from '../jsutils/isInvalid'; import type { ObjMap } from '../jsutils/ObjMap'; -import * as Kind from '../language/kinds'; +import { Kind } from '../language/kinds'; import type { ValueNode } from '../language/ast'; /** diff --git a/src/validation/rules/ExecutableDefinitions.js b/src/validation/rules/ExecutableDefinitions.js index fc2d467646..865f15a0ab 100644 --- a/src/validation/rules/ExecutableDefinitions.js +++ b/src/validation/rules/ExecutableDefinitions.js @@ -9,11 +9,7 @@ import type { ValidationContext } from '../index'; import { GraphQLError } from '../../error'; -import { - FRAGMENT_DEFINITION, - OPERATION_DEFINITION, - SCHEMA_DEFINITION, -} from '../../language/kinds'; +import { Kind } from '../../language/kinds'; import type { ASTVisitor } from '../../language/visitor'; export function nonExecutableDefinitionMessage(defName: string): string { @@ -31,13 +27,13 @@ export function ExecutableDefinitions(context: ValidationContext): ASTVisitor { Document(node) { node.definitions.forEach(definition => { if ( - definition.kind !== OPERATION_DEFINITION && - definition.kind !== FRAGMENT_DEFINITION + definition.kind !== Kind.OPERATION_DEFINITION && + definition.kind !== Kind.FRAGMENT_DEFINITION ) { context.reportError( new GraphQLError( nonExecutableDefinitionMessage( - definition.kind === SCHEMA_DEFINITION + definition.kind === Kind.SCHEMA_DEFINITION ? 'schema' : definition.name.value, ), diff --git a/src/validation/rules/KnownArgumentNames.js b/src/validation/rules/KnownArgumentNames.js index 65531d0a7f..5c3e99013b 100644 --- a/src/validation/rules/KnownArgumentNames.js +++ b/src/validation/rules/KnownArgumentNames.js @@ -12,7 +12,7 @@ import { GraphQLError } from '../../error'; import type { ASTVisitor } from '../../language/visitor'; import suggestionList from '../../jsutils/suggestionList'; import quotedOrList from '../../jsutils/quotedOrList'; -import { FIELD, DIRECTIVE } from '../../language/kinds'; +import { Kind } from '../../language/kinds'; export function unknownArgMessage( argName: string, @@ -53,7 +53,7 @@ export function KnownArgumentNames(context: ValidationContext): ASTVisitor { const argDef = context.getArgument(); if (!argDef) { const argumentOf = ancestors[ancestors.length - 1]; - if (argumentOf.kind === FIELD) { + if (argumentOf.kind === Kind.FIELD) { const fieldDef = context.getFieldDef(); const parentType = context.getParentType(); if (fieldDef && parentType) { @@ -72,7 +72,7 @@ export function KnownArgumentNames(context: ValidationContext): ASTVisitor { ), ); } - } else if (argumentOf.kind === DIRECTIVE) { + } else if (argumentOf.kind === Kind.DIRECTIVE) { const directive = context.getDirective(); if (directive) { context.reportError( diff --git a/src/validation/rules/KnownDirectives.js b/src/validation/rules/KnownDirectives.js index a513fd1e0f..dd17831985 100644 --- a/src/validation/rules/KnownDirectives.js +++ b/src/validation/rules/KnownDirectives.js @@ -10,7 +10,7 @@ import type { ValidationContext } from '../index'; import { GraphQLError } from '../../error'; import find from '../../jsutils/find'; -import * as Kind from '../../language/kinds'; +import { Kind } from '../../language/kinds'; import { DirectiveLocation } from '../../language/directiveLocation'; import type { ASTVisitor } from '../../language/visitor'; diff --git a/src/validation/rules/LoneAnonymousOperation.js b/src/validation/rules/LoneAnonymousOperation.js index 31c84bd679..5b32bf5da8 100644 --- a/src/validation/rules/LoneAnonymousOperation.js +++ b/src/validation/rules/LoneAnonymousOperation.js @@ -9,7 +9,7 @@ import type { ValidationContext } from '../index'; import { GraphQLError } from '../../error'; -import { OPERATION_DEFINITION } from '../../language/kinds'; +import { Kind } from '../../language/kinds'; import type { ASTVisitor } from '../../language/visitor'; export function anonOperationNotAloneMessage(): string { @@ -27,7 +27,7 @@ export function LoneAnonymousOperation(context: ValidationContext): ASTVisitor { return { Document(node) { operationCount = node.definitions.filter( - definition => definition.kind === OPERATION_DEFINITION, + definition => definition.kind === Kind.OPERATION_DEFINITION, ).length; }, OperationDefinition(node) { diff --git a/src/validation/rules/OverlappingFieldsCanBeMerged.js b/src/validation/rules/OverlappingFieldsCanBeMerged.js index 8ce6298cdb..b99ca8b839 100644 --- a/src/validation/rules/OverlappingFieldsCanBeMerged.js +++ b/src/validation/rules/OverlappingFieldsCanBeMerged.js @@ -17,7 +17,7 @@ import type { ArgumentNode, FragmentDefinitionNode, } from '../../language/ast'; -import * as Kind from '../../language/kinds'; +import { Kind } from '../../language/kinds'; import { print } from '../../language/printer'; import type { ASTVisitor } from '../../language/visitor'; import { diff --git a/src/validation/validate.js b/src/validation/validate.js index bcc7dbd5fb..1a5d425fca 100644 --- a/src/validation/validate.js +++ b/src/validation/validate.js @@ -11,7 +11,7 @@ import invariant from '../jsutils/invariant'; import type { ObjMap } from '../jsutils/ObjMap'; import { GraphQLError } from '../error'; import { visit, visitInParallel, visitWithTypeInfo } from '../language/visitor'; -import * as Kind from '../language/kinds'; +import { Kind } from '../language/kinds'; import type { DocumentNode, OperationDefinitionNode,