Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 17 additions & 6 deletions graphql.go
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ func ParseSchema(schemaString string, resolver interface{}, opts ...SchemaOpt) (
opt(s)
}

if err := s.schema.Parse(schemaString); err != nil {
if err := s.schema.Parse(schemaString, s.useStringDescriptions); err != nil {
return nil, err
}

Expand Down Expand Up @@ -63,16 +63,27 @@ type Schema struct {
schema *schema.Schema
res *resolvable.Schema

maxDepth int
maxParallelism int
tracer trace.Tracer
validationTracer trace.ValidationTracer
logger log.Logger
maxDepth int
maxParallelism int
tracer trace.Tracer
validationTracer trace.ValidationTracer
logger log.Logger
useStringDescriptions bool
}

// SchemaOpt is an option to pass to ParseSchema or MustParseSchema.
type SchemaOpt func(*Schema)

// UseStringDescriptions enables the usage of double quoted and triple quoted
// strings as descriptions as per the June 2018 spec
// https://facebook.github.io/graphql/June2018/. When this is not enabled,
// comments are parsed as descriptions instead.
func UseStringDescriptions() SchemaOpt {
return func(s *Schema) {
s.useStringDescriptions = true
}
}

// MaxDepth specifies the maximum field nesting depth in a query. The default is 0 which disables max depth checking.
func MaxDepth(n int) SchemaOpt {
return func(s *Schema) {
Expand Down
100 changes: 83 additions & 17 deletions internal/common/lexer.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package common

import (
"fmt"
"strconv"
"strings"
"text/scanner"

Expand All @@ -11,23 +12,24 @@ import (
type syntaxError string

type Lexer struct {
sc *scanner.Scanner
next rune
descComment string
sc *scanner.Scanner
next rune
descComment string
useStringDescriptions bool
}

type Ident struct {
Name string
Loc errors.Location
}

func NewLexer(s string) *Lexer {
func NewLexer(s string, useStringDescriptions bool) *Lexer {
sc := &scanner.Scanner{
Mode: scanner.ScanIdents | scanner.ScanInts | scanner.ScanFloats | scanner.ScanStrings,
}
sc.Init(strings.NewReader(s))

return &Lexer{sc: sc}
return &Lexer{sc: sc, useStringDescriptions: useStringDescriptions}
}

func (l *Lexer) CatchSyntaxError(f func()) (errRes *errors.QueryError) {
Expand All @@ -50,13 +52,15 @@ func (l *Lexer) Peek() rune {
return l.next
}

// Consume whitespace and tokens equivalent to whitespace (e.g. commas and comments).
// ConsumeWhitespace consumes whitespace and tokens equivalent to whitespace (e.g. commas and comments).
//
// Consumed comment characters will build the description for the next type or field encountered.
// The description is available from `DescComment()`, and will be reset every time `Consume()` is
// executed.
func (l *Lexer) Consume() {
l.descComment = ""
// The description is available from `DescComment()`, and will be reset every time `ConsumeWhitespace()` is
// executed unless l.useStringDescriptions is set.
func (l *Lexer) ConsumeWhitespace() {
if !l.useStringDescriptions {
l.descComment = ""
}
for {
l.next = l.sc.Scan()

Expand Down Expand Up @@ -84,6 +88,31 @@ func (l *Lexer) Consume() {
}
}

// consumeDescription optionally consumes a description based on the June 2018 graphql spec if any are present.
//
// Single quote strings are also single line. Triple quote strings can be multi-line. Triple quote strings
// whitespace trimmed on both ends.
// If a description is found, consume any following comments as well
//
// http://facebook.github.io/graphql/June2018/#sec-Descriptions
func (l *Lexer) consumeDescription() bool {
// If the next token is not a string, we don't consume it
if l.next == scanner.String {
// a triple quote string is an empty "string" followed by an open quote due to the way the parser treats strings as one token
l.descComment = ""
tokenText := l.sc.TokenText()
if l.sc.Peek() == '"' {
// Consume the third quote
l.next = l.sc.Next()
l.consumeTripleQuoteComment()
} else {
l.consumeStringComment(tokenText)
}
return true
}
return false
}

func (l *Lexer) ConsumeIdent() string {
name := l.sc.TokenText()
l.ConsumeToken(scanner.Ident)
Expand All @@ -101,23 +130,28 @@ func (l *Lexer) ConsumeKeyword(keyword string) {
if l.next != scanner.Ident || l.sc.TokenText() != keyword {
l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %q", l.sc.TokenText(), keyword))
}
l.Consume()
l.ConsumeWhitespace()
}

func (l *Lexer) ConsumeLiteral() *BasicLit {
lit := &BasicLit{Type: l.next, Text: l.sc.TokenText()}
l.Consume()
l.ConsumeWhitespace()
return lit
}

func (l *Lexer) ConsumeToken(expected rune) {
if l.next != expected {
l.SyntaxError(fmt.Sprintf("unexpected %q, expecting %s", l.sc.TokenText(), scanner.TokenString(expected)))
}
l.Consume()
l.ConsumeWhitespace()
}

func (l *Lexer) DescComment() string {
if l.useStringDescriptions {
if l.consumeDescription() {
l.ConsumeWhitespace()
}
}
return l.descComment
}

Expand All @@ -132,19 +166,49 @@ func (l *Lexer) Location() errors.Location {
}
}

func (l *Lexer) consumeTripleQuoteComment() {
if l.next != '"' {
panic("consumeTripleQuoteComment used in wrong context: no third quote?")
}

var comment string
var numQuotes int
for {
l.next = l.sc.Next()
if l.next == '"' {
numQuotes++
} else {
numQuotes = 0
}
comment += string(l.next)
if numQuotes == 3 || l.next == scanner.EOF {
break
}
}
l.descComment += strings.TrimSpace(comment[:len(comment)-numQuotes])
}

func (l *Lexer) consumeStringComment(str string) {
value, err := strconv.Unquote(str)
if err != nil {
panic(err)
}
l.descComment += value
}

// consumeComment consumes all characters from `#` to the first encountered line terminator.
// The characters are appended to `l.descComment`.
func (l *Lexer) consumeComment() {
if l.next != '#' {
return
panic("consumeComment used in wrong context")
}

// TODO: count and trim whitespace so we can dedent any following lines.
if l.sc.Peek() == ' ' {
l.sc.Next()
}

if l.descComment != "" {
if l.descComment != "" && !l.useStringDescriptions {
// TODO: use a bytes.Buffer or strings.Builder instead of this.
l.descComment += "\n"
}
Expand All @@ -155,7 +219,9 @@ func (l *Lexer) consumeComment() {
break
}

// TODO: use a bytes.Buffer or strings.Build instead of this.
l.descComment += string(next)
if !l.useStringDescriptions {
// TODO: use a bytes.Buffer or strings.Build instead of this.
l.descComment += string(next)
}
}
}
72 changes: 62 additions & 10 deletions internal/common/lexer_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,32 +7,84 @@ import (
)

type consumeTestCase struct {
description string
definition string
expected string // expected description
description string
definition string
expected string // expected description
failureExpected bool
useStringDescriptions bool
}

// Note that these tests stop as soon as they parse the comments, so even though the rest of the file will fail to parse sometimes, the tests still pass
var consumeTests = []consumeTestCase{{
description: "initial test",
description: "no string descriptions allowed in old mode",
definition: `

# Comment line 1
# Comment line 2
#Comment line 2
,,,,,, # Commas are insignificant
"New style comments"
type Hello {
world: String!
}`,
expected: "Comment line 1\nComment line 2\nCommas are insignificant",
expected: "Comment line 1\nComment line 2\nCommas are insignificant",
useStringDescriptions: false,
}, {
description: "simple string descriptions allowed in new mode",
definition: `

# Comment line 1
#Comment line 2
,,,,,, # Commas are insignificant
"New style comments"
type Hello {
world: String!
}`,
expected: "New style comments",
useStringDescriptions: true,
}, {
description: "comment after description works",
definition: `

# Comment line 1
#Comment line 2
,,,,,, # Commas are insignificant
type Hello {
world: String!
}`,
expected: "",
useStringDescriptions: true,
}, {
description: "triple quote descriptions allowed in new mode",
definition: `

# Comment line 1
#Comment line 2
,,,,,, # Commas are insignificant
"""
New style comments
Another line
"""
type Hello {
world: String!
}`,
expected: "New style comments\nAnother line",
useStringDescriptions: true,
}}

func TestConsume(t *testing.T) {
for _, test := range consumeTests {
t.Run(test.description, func(t *testing.T) {
lex := common.NewLexer(test.definition)
lex := common.NewLexer(test.definition, test.useStringDescriptions)

err := lex.CatchSyntaxError(lex.Consume)
if err != nil {
t.Fatal(err)
err := lex.CatchSyntaxError(func() { lex.ConsumeWhitespace() })
if test.failureExpected {
if err == nil {
t.Fatalf("schema should have been invalid; comment: %s", lex.DescComment())
}
} else {
if err != nil {
t.Fatal(err)
}
}

if test.expected != lex.DescComment() {
Expand Down
4 changes: 2 additions & 2 deletions internal/query/query.go
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ func (InlineFragment) isSelection() {}
func (FragmentSpread) isSelection() {}

func Parse(queryString string) (*Document, *errors.QueryError) {
l := common.NewLexer(queryString)
l := common.NewLexer(queryString, false)

var doc *Document
err := l.CatchSyntaxError(func() { doc = parseDocument(l) })
Expand All @@ -107,7 +107,7 @@ func Parse(queryString string) (*Document, *errors.QueryError) {

func parseDocument(l *common.Lexer) *Document {
d := &Document{}
l.Consume()
l.ConsumeWhitespace()
for l.Peek() != scanner.EOF {
if l.Peek() == '{' {
op := &Operation{Type: Query, Loc: l.Location()}
Expand Down
4 changes: 2 additions & 2 deletions internal/schema/meta.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ var Meta *Schema
func init() {
Meta = &Schema{} // bootstrap
Meta = New()
if err := Meta.Parse(metaSrc); err != nil {
if err := Meta.Parse(metaSrc, false); err != nil {
panic(err)
}
}
Expand Down Expand Up @@ -167,7 +167,7 @@ var metaSrc = `
inputFields: [__InputValue!]
ofType: __Type
}

# An enum describing what kind of type a given ` + "`" + `__Type` + "`" + ` is.
enum __TypeKind {
# Indicates this type is a scalar.
Expand Down
6 changes: 3 additions & 3 deletions internal/schema/schema.go
Original file line number Diff line number Diff line change
Expand Up @@ -246,8 +246,8 @@ func New() *Schema {
}

// Parse the schema string.
func (s *Schema) Parse(schemaString string) error {
l := common.NewLexer(schemaString)
func (s *Schema) Parse(schemaString string, useStringDescriptions bool) error {
l := common.NewLexer(schemaString, useStringDescriptions)

err := l.CatchSyntaxError(func() { parseSchema(s, l) })
if err != nil {
Expand Down Expand Up @@ -389,7 +389,7 @@ func resolveInputObject(s *Schema, values common.InputValueList) error {
}

func parseSchema(s *Schema, l *common.Lexer) {
l.Consume()
l.ConsumeWhitespace()

for l.Peek() != scanner.EOF {
desc := l.DescComment()
Expand Down
6 changes: 3 additions & 3 deletions internal/schema/schema_internal_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ func TestParseInterfaceDef(t *testing.T) {
tests := []testCase{{
description: "Parses simple interface",
definition: "Greeting { field: String }",
expected: &Interface{Name: "Greeting", Fields: []*Field{&Field{Name: "field"}}},
expected: &Interface{Name: "Greeting", Fields: []*Field{{Name: "field"}}},
}}

for _, test := range tests {
Expand Down Expand Up @@ -158,8 +158,8 @@ func compareObjects(t *testing.T, expected, actual *Object) {
func setup(t *testing.T, def string) *common.Lexer {
t.Helper()

lex := common.NewLexer(def)
lex.Consume()
lex := common.NewLexer(def, false)
lex.ConsumeWhitespace()

return lex
}
Loading