Skip to content

Commit 94178a2

Browse files
committed
internal/lsp: use source.Offset instead of tok.Offset
This isn't strictly necessary for some of the cases, but it's better to use it in all cases. Also added a test to ensure that we avoid (*token.File).Offset in all of gopls--test was probably overkill, but it was quick to write. Change-Id: I6dd0126e2211796d5de4e7a389386d7aa81014f0 Reviewed-on: https://go-review.googlesource.com/c/tools/+/353890 Run-TryBot: Rebecca Stambler <[email protected]> Trust: Rebecca Stambler <[email protected]> gopls-CI: kokoro <[email protected]> TryBot-Result: Go Bot <[email protected]> Reviewed-by: Robert Findley <[email protected]>
1 parent 0b930fb commit 94178a2

File tree

11 files changed

+259
-56
lines changed

11 files changed

+259
-56
lines changed

internal/lsp/cache/parse.go

Lines changed: 51 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -782,7 +782,10 @@ func fixMissingCurlies(f *ast.File, b *ast.BlockStmt, parent ast.Node, tok *toke
782782
// If the "{" is already in the source code, there isn't anything to
783783
// fix since we aren't missing curlies.
784784
if b.Lbrace.IsValid() {
785-
braceOffset := tok.Offset(b.Lbrace)
785+
braceOffset, err := source.Offset(tok, b.Lbrace)
786+
if err != nil {
787+
return nil
788+
}
786789
if braceOffset < len(src) && src[braceOffset] == '{' {
787790
return nil
788791
}
@@ -834,7 +837,11 @@ func fixMissingCurlies(f *ast.File, b *ast.BlockStmt, parent ast.Node, tok *toke
834837

835838
var buf bytes.Buffer
836839
buf.Grow(len(src) + 3)
837-
buf.Write(src[:tok.Offset(insertPos)])
840+
offset, err := source.Offset(tok, insertPos)
841+
if err != nil {
842+
return nil
843+
}
844+
buf.Write(src[:offset])
838845

839846
// Detect if we need to insert a semicolon to fix "for" loop situations like:
840847
//
@@ -854,7 +861,7 @@ func fixMissingCurlies(f *ast.File, b *ast.BlockStmt, parent ast.Node, tok *toke
854861
// Insert "{}" at insertPos.
855862
buf.WriteByte('{')
856863
buf.WriteByte('}')
857-
buf.Write(src[tok.Offset(insertPos):])
864+
buf.Write(src[offset:])
858865
return buf.Bytes()
859866
}
860867

@@ -888,7 +895,10 @@ func fixEmptySwitch(body *ast.BlockStmt, tok *token.File, src []byte) {
888895

889896
// If the right brace is actually in the source code at the
890897
// specified position, don't mess with it.
891-
braceOffset := tok.Offset(body.Rbrace)
898+
braceOffset, err := source.Offset(tok, body.Rbrace)
899+
if err != nil {
900+
return
901+
}
892902
if braceOffset < len(src) && src[braceOffset] == '}' {
893903
return
894904
}
@@ -923,8 +933,12 @@ func fixDanglingSelector(s *ast.SelectorExpr, tok *token.File, src []byte) []byt
923933
return nil
924934
}
925935

936+
insertOffset, err := source.Offset(tok, s.X.End())
937+
if err != nil {
938+
return nil
939+
}
926940
// Insert directly after the selector's ".".
927-
insertOffset := tok.Offset(s.X.End()) + 1
941+
insertOffset++
928942
if src[insertOffset-1] != '.' {
929943
return nil
930944
}
@@ -980,7 +994,10 @@ func isPhantomUnderscore(id *ast.Ident, tok *token.File, src []byte) bool {
980994

981995
// Phantom underscore means the underscore is not actually in the
982996
// program text.
983-
offset := tok.Offset(id.Pos())
997+
offset, err := source.Offset(tok, id.Pos())
998+
if err != nil {
999+
return false
1000+
}
9841001
return len(src) <= offset || src[offset] != '_'
9851002
}
9861003

@@ -995,11 +1012,15 @@ func fixInitStmt(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte)
9951012
}
9961013

9971014
// Try to extract a statement from the BadExpr.
998-
// Make sure that the positions are in range first.
999-
if !source.InRange(tok, bad.Pos()) || !source.InRange(tok, bad.End()-1) {
1015+
start, err := source.Offset(tok, bad.Pos())
1016+
if err != nil {
1017+
return
1018+
}
1019+
end, err := source.Offset(tok, bad.End()-1)
1020+
if err != nil {
10001021
return
10011022
}
1002-
stmtBytes := src[tok.Offset(bad.Pos()) : tok.Offset(bad.End()-1)+1]
1023+
stmtBytes := src[start : end+1]
10031024
stmt, err := parseStmt(bad.Pos(), stmtBytes)
10041025
if err != nil {
10051026
return
@@ -1039,7 +1060,11 @@ func fixInitStmt(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte)
10391060
// readKeyword reads the keyword starting at pos, if any.
10401061
func readKeyword(pos token.Pos, tok *token.File, src []byte) string {
10411062
var kwBytes []byte
1042-
for i := tok.Offset(pos); i < len(src); i++ {
1063+
offset, err := source.Offset(tok, pos)
1064+
if err != nil {
1065+
return ""
1066+
}
1067+
for i := offset; i < len(src); i++ {
10431068
// Use a simplified identifier check since keywords are always lowercase ASCII.
10441069
if src[i] < 'a' || src[i] > 'z' {
10451070
break
@@ -1076,15 +1101,15 @@ func fixArrayType(bad *ast.BadExpr, parent ast.Node, tok *token.File, src []byte
10761101
// Avoid doing tok.Offset(to) since that panics if badExpr ends at EOF.
10771102
// It also panics if the position is not in the range of the file, and
10781103
// badExprs may not necessarily have good positions, so check first.
1079-
if !source.InRange(tok, from) {
1104+
fromOffset, err := source.Offset(tok, from)
1105+
if err != nil {
10801106
return false
10811107
}
1082-
if !source.InRange(tok, to-1) {
1108+
toOffset, err := source.Offset(tok, to-1)
1109+
if err != nil {
10831110
return false
10841111
}
1085-
fromOffset := tok.Offset(from)
1086-
toOffset := tok.Offset(to-1) + 1
1087-
exprBytes = append(exprBytes, src[fromOffset:toOffset]...)
1112+
exprBytes = append(exprBytes, src[fromOffset:toOffset+1]...)
10881113
exprBytes = bytes.TrimSpace(exprBytes)
10891114

10901115
// If our expression ends in "]" (e.g. "[]"), add a phantom selector
@@ -1237,18 +1262,26 @@ FindTo:
12371262
}
12381263
}
12391264

1240-
if !from.IsValid() || tok.Offset(from) >= len(src) {
1265+
fromOffset, err := source.Offset(tok, from)
1266+
if err != nil {
1267+
return false
1268+
}
1269+
if !from.IsValid() || fromOffset >= len(src) {
12411270
return false
12421271
}
12431272

1244-
if !to.IsValid() || tok.Offset(to) >= len(src) {
1273+
toOffset, err := source.Offset(tok, to)
1274+
if err != nil {
1275+
return false
1276+
}
1277+
if !to.IsValid() || toOffset >= len(src) {
12451278
return false
12461279
}
12471280

12481281
// Insert any phantom selectors needed to prevent dangling "." from messing
12491282
// up the AST.
12501283
exprBytes := make([]byte, 0, int(to-from)+len(phantomSelectors))
1251-
for i, b := range src[tok.Offset(from):tok.Offset(to)] {
1284+
for i, b := range src[fromOffset:toOffset] {
12521285
if len(phantomSelectors) > 0 && from+token.Pos(i) == phantomSelectors[0] {
12531286
exprBytes = append(exprBytes, '_')
12541287
phantomSelectors = phantomSelectors[1:]

internal/lsp/semantic.go

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -268,7 +268,10 @@ func locInRange(f *token.File, loc token.Pos) bool {
268268
func (e *encoded) srcLine(x ast.Node) string {
269269
file := e.pgf.Tok
270270
line := file.Line(x.Pos())
271-
start := file.Offset(file.LineStart(line))
271+
start, err := source.Offset(file, file.LineStart(line))
272+
if err != nil {
273+
return ""
274+
}
272275
end := start
273276
for ; end < len(e.pgf.Src) && e.pgf.Src[end] != '\n'; end++ {
274277

internal/lsp/source/completion/package.go

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -80,12 +80,15 @@ func packageCompletionSurrounding(ctx context.Context, fset *token.FileSet, pgf
8080
return nil, fmt.Errorf("unparseable file (%s)", pgf.URI)
8181
}
8282
tok := fset.File(expr.Pos())
83-
offset := pgf.Tok.Offset(pos)
83+
offset, err := source.Offset(pgf.Tok, pos)
84+
if err != nil {
85+
return nil, err
86+
}
8487
if offset > tok.Size() {
8588
debug.Bug(ctx, "out of bounds cursor", "cursor offset (%d) out of bounds for %s (size: %d)", offset, pgf.URI, tok.Size())
8689
return nil, fmt.Errorf("cursor out of bounds")
8790
}
88-
cursor := tok.Pos(pgf.Tok.Offset(pos))
91+
cursor := tok.Pos(offset)
8992
m := &protocol.ColumnMapper{
9093
URI: pgf.URI,
9194
Content: pgf.Src,

internal/lsp/source/extract.go

Lines changed: 51 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -63,7 +63,11 @@ func extractVariable(fset *token.FileSet, rng span.Range, src []byte, file *ast.
6363
if tok == nil {
6464
return nil, fmt.Errorf("no file for pos %v", fset.Position(file.Pos()))
6565
}
66-
newLineIndent := "\n" + calculateIndentation(src, tok, insertBeforeStmt)
66+
indent, err := calculateIndentation(src, tok, insertBeforeStmt)
67+
if err != nil {
68+
return nil, err
69+
}
70+
newLineIndent := "\n" + indent
6771

6872
lhs := strings.Join(lhsNames, ", ")
6973
assignStmt := &ast.AssignStmt{
@@ -128,11 +132,17 @@ func CanExtractVariable(rng span.Range, file *ast.File) (ast.Expr, []ast.Node, b
128132
// When inserting lines of code, we must ensure that the lines have consistent
129133
// formatting (i.e. the proper indentation). To do so, we observe the indentation on the
130134
// line of code on which the insertion occurs.
131-
func calculateIndentation(content []byte, tok *token.File, insertBeforeStmt ast.Node) string {
135+
func calculateIndentation(content []byte, tok *token.File, insertBeforeStmt ast.Node) (string, error) {
132136
line := tok.Line(insertBeforeStmt.Pos())
133-
lineOffset := tok.Offset(tok.LineStart(line))
134-
stmtOffset := tok.Offset(insertBeforeStmt.Pos())
135-
return string(content[lineOffset:stmtOffset])
137+
lineOffset, err := Offset(tok, tok.LineStart(line))
138+
if err != nil {
139+
return "", err
140+
}
141+
stmtOffset, err := Offset(tok, insertBeforeStmt.Pos())
142+
if err != nil {
143+
return "", err
144+
}
145+
return string(content[lineOffset:stmtOffset]), nil
136146
}
137147

138148
// generateAvailableIdentifier adjusts the new function name until there are no collisons in scope.
@@ -390,8 +400,14 @@ func extractFunctionMethod(fset *token.FileSet, rng span.Range, src []byte, file
390400

391401
// We put the selection in a constructed file. We can then traverse and edit
392402
// the extracted selection without modifying the original AST.
393-
startOffset := tok.Offset(rng.Start)
394-
endOffset := tok.Offset(rng.End)
403+
startOffset, err := Offset(tok, rng.Start)
404+
if err != nil {
405+
return nil, err
406+
}
407+
endOffset, err := Offset(tok, rng.End)
408+
if err != nil {
409+
return nil, err
410+
}
395411
selection := src[startOffset:endOffset]
396412
extractedBlock, err := parseBlockStmt(fset, selection)
397413
if err != nil {
@@ -584,11 +600,21 @@ func extractFunctionMethod(fset *token.FileSet, rng span.Range, src []byte, file
584600

585601
// We're going to replace the whole enclosing function,
586602
// so preserve the text before and after the selected block.
587-
outerStart := tok.Offset(outer.Pos())
588-
outerEnd := tok.Offset(outer.End())
603+
outerStart, err := Offset(tok, outer.Pos())
604+
if err != nil {
605+
return nil, err
606+
}
607+
outerEnd, err := Offset(tok, outer.End())
608+
if err != nil {
609+
return nil, err
610+
}
589611
before := src[outerStart:startOffset]
590612
after := src[endOffset:outerEnd]
591-
newLineIndent := "\n" + calculateIndentation(src, tok, start)
613+
indent, err := calculateIndentation(src, tok, start)
614+
if err != nil {
615+
return nil, err
616+
}
617+
newLineIndent := "\n" + indent
592618

593619
var fullReplacement strings.Builder
594620
fullReplacement.Write(before)
@@ -634,8 +660,11 @@ func extractFunctionMethod(fset *token.FileSet, rng span.Range, src []byte, file
634660
// their cursors for whitespace. To support this use case, we must manually adjust the
635661
// ranges to match the correct AST node. In this particular example, we would adjust
636662
// rng.Start forward by one byte, and rng.End backwards by two bytes.
637-
func adjustRangeForWhitespace(rng span.Range, tok *token.File, content []byte) span.Range {
638-
offset := tok.Offset(rng.Start)
663+
func adjustRangeForWhitespace(rng span.Range, tok *token.File, content []byte) (span.Range, error) {
664+
offset, err := Offset(tok, rng.Start)
665+
if err != nil {
666+
return span.Range{}, err
667+
}
639668
for offset < len(content) {
640669
if !unicode.IsSpace(rune(content[offset])) {
641670
break
@@ -646,15 +675,18 @@ func adjustRangeForWhitespace(rng span.Range, tok *token.File, content []byte) s
646675
rng.Start = tok.Pos(offset)
647676

648677
// Move backwards to find a non-whitespace character.
649-
offset = tok.Offset(rng.End)
678+
offset, err = Offset(tok, rng.End)
679+
if err != nil {
680+
return span.Range{}, err
681+
}
650682
for o := offset - 1; 0 <= o && o < len(content); o-- {
651683
if !unicode.IsSpace(rune(content[o])) {
652684
break
653685
}
654686
offset = o
655687
}
656688
rng.End = tok.Pos(offset)
657-
return rng
689+
return rng, nil
658690
}
659691

660692
// findParent finds the parent AST node of the given target node, if the target is a
@@ -916,7 +948,11 @@ func CanExtractFunction(fset *token.FileSet, rng span.Range, src []byte, file *a
916948
if tok == nil {
917949
return nil, false, false, fmt.Errorf("no file for pos %v", fset.Position(file.Pos()))
918950
}
919-
rng = adjustRangeForWhitespace(rng, tok, src)
951+
var err error
952+
rng, err = adjustRangeForWhitespace(rng, tok, src)
953+
if err != nil {
954+
return nil, false, false, err
955+
}
920956
path, _ := astutil.PathEnclosingInterval(file, rng.Start, rng.End)
921957
if len(path) == 0 {
922958
return nil, false, false, fmt.Errorf("no path enclosing interval")

internal/lsp/source/format.go

Lines changed: 25 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,10 @@ func ComputeOneImportFixEdits(snapshot Snapshot, pgf *ParsedGoFile, fix *imports
153153

154154
func computeFixEdits(snapshot Snapshot, pgf *ParsedGoFile, options *imports.Options, fixes []*imports.ImportFix) ([]protocol.TextEdit, error) {
155155
// trim the original data to match fixedData
156-
left := importPrefix(pgf.Src)
156+
left, err := importPrefix(pgf.Src)
157+
if err != nil {
158+
return nil, err
159+
}
157160
extra := !strings.Contains(left, "\n") // one line may have more than imports
158161
if extra {
159162
left = string(pgf.Src)
@@ -185,25 +188,30 @@ func computeFixEdits(snapshot Snapshot, pgf *ParsedGoFile, options *imports.Opti
185188
// importPrefix returns the prefix of the given file content through the final
186189
// import statement. If there are no imports, the prefix is the package
187190
// statement and any comment groups below it.
188-
func importPrefix(src []byte) string {
191+
func importPrefix(src []byte) (string, error) {
189192
fset := token.NewFileSet()
190193
// do as little parsing as possible
191194
f, err := parser.ParseFile(fset, "", src, parser.ImportsOnly|parser.ParseComments)
192195
if err != nil { // This can happen if 'package' is misspelled
193-
return ""
196+
return "", fmt.Errorf("importPrefix: failed to parse: %s", err)
194197
}
195198
tok := fset.File(f.Pos())
196199
var importEnd int
197200
for _, d := range f.Decls {
198201
if x, ok := d.(*ast.GenDecl); ok && x.Tok == token.IMPORT {
199-
if e := tok.Offset(d.End()); e > importEnd {
202+
if e, err := Offset(tok, d.End()); err != nil {
203+
return "", fmt.Errorf("importPrefix: %s", err)
204+
} else if e > importEnd {
200205
importEnd = e
201206
}
202207
}
203208
}
204209

205210
maybeAdjustToLineEnd := func(pos token.Pos, isCommentNode bool) int {
206-
offset := tok.Offset(pos)
211+
offset, err := Offset(tok, pos)
212+
if err != nil {
213+
return -1
214+
}
207215

208216
// Don't go past the end of the file.
209217
if offset > len(src) {
@@ -215,7 +223,10 @@ func importPrefix(src []byte) string {
215223
// return a position on the next line whenever possible.
216224
switch line := tok.Line(tok.Pos(offset)); {
217225
case line < tok.LineCount():
218-
nextLineOffset := tok.Offset(tok.LineStart(line + 1))
226+
nextLineOffset, err := Offset(tok, tok.LineStart(line+1))
227+
if err != nil {
228+
return -1
229+
}
219230
// If we found a position that is at the end of a line, move the
220231
// offset to the start of the next line.
221232
if offset+1 == nextLineOffset {
@@ -234,14 +245,19 @@ func importPrefix(src []byte) string {
234245
}
235246
for _, cgroup := range f.Comments {
236247
for _, c := range cgroup.List {
237-
if end := tok.Offset(c.End()); end > importEnd {
248+
if end, err := Offset(tok, c.End()); err != nil {
249+
return "", err
250+
} else if end > importEnd {
238251
startLine := tok.Position(c.Pos()).Line
239252
endLine := tok.Position(c.End()).Line
240253

241254
// Work around golang/go#41197 by checking if the comment might
242255
// contain "\r", and if so, find the actual end position of the
243256
// comment by scanning the content of the file.
244-
startOffset := tok.Offset(c.Pos())
257+
startOffset, err := Offset(tok, c.Pos())
258+
if err != nil {
259+
return "", err
260+
}
245261
if startLine != endLine && bytes.Contains(src[startOffset:], []byte("\r")) {
246262
if commentEnd := scanForCommentEnd(src[startOffset:]); commentEnd > 0 {
247263
end = startOffset + commentEnd
@@ -254,7 +270,7 @@ func importPrefix(src []byte) string {
254270
if importEnd > len(src) {
255271
importEnd = len(src)
256272
}
257-
return string(src[:importEnd])
273+
return string(src[:importEnd]), nil
258274
}
259275

260276
// scanForCommentEnd returns the offset of the end of the multi-line comment

0 commit comments

Comments
 (0)