mirror of
https://github.com/golang/go
synced 2024-11-18 15:04:44 -07:00
internal/lsp: separate refactorings out of memoization CL
This change just separates minor changes made along the course of the memoization CL out into their own change. This will clean up the diffs in the memoization CL. Change-Id: I7d59e05ba6472af5f1bf516b1e5b879a5815b9a5 Reviewed-on: https://go-review.googlesource.com/c/tools/+/183250 Run-TryBot: Rebecca Stambler <rstambler@golang.org> Reviewed-by: Ian Cottrell <iancottrell@google.com>
This commit is contained in:
parent
a101b041de
commit
252024b829
4
internal/lsp/cache/cache.go
vendored
4
internal/lsp/cache/cache.go
vendored
@ -97,6 +97,10 @@ func (h *fileHandle) Identity() source.FileIdentity {
|
||||
return h.underlying.Identity()
|
||||
}
|
||||
|
||||
func (h *fileHandle) Kind() source.FileKind {
|
||||
return h.underlying.Kind()
|
||||
}
|
||||
|
||||
func (h *fileHandle) Read(ctx context.Context) ([]byte, string, error) {
|
||||
v := h.handle.Get(ctx)
|
||||
if v == nil {
|
||||
|
87
internal/lsp/cache/check.go
vendored
87
internal/lsp/cache/check.go
vendored
@ -11,9 +11,11 @@ import (
|
||||
"go/scanner"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"sync"
|
||||
|
||||
"golang.org/x/tools/go/analysis"
|
||||
"golang.org/x/tools/go/packages"
|
||||
"golang.org/x/tools/internal/lsp/source"
|
||||
"golang.org/x/tools/internal/span"
|
||||
)
|
||||
|
||||
@ -92,7 +94,6 @@ func (imp *importer) typeCheck(id packageID) (*pkg, error) {
|
||||
pkg := &pkg{
|
||||
id: meta.id,
|
||||
pkgPath: meta.pkgPath,
|
||||
files: meta.files,
|
||||
imports: make(map[packagePath]*pkg),
|
||||
typesSizes: meta.typesSizes,
|
||||
typesInfo: &types.Info{
|
||||
@ -105,14 +106,50 @@ func (imp *importer) typeCheck(id packageID) (*pkg, error) {
|
||||
},
|
||||
analyses: make(map[*analysis.Analyzer]*analysisEntry),
|
||||
}
|
||||
|
||||
// Ignore function bodies for any dependency packages.
|
||||
ignoreFuncBodies := imp.topLevelPkgID != pkg.id
|
||||
files, parseErrs, err := imp.parseFiles(meta.files, ignoreFuncBodies)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
mode := source.ParseFull
|
||||
if imp.topLevelPkgID != pkg.id {
|
||||
mode = source.ParseExported
|
||||
}
|
||||
for _, err := range parseErrs {
|
||||
imp.view.appendPkgError(pkg, err)
|
||||
var (
|
||||
files []*astFile
|
||||
phs []source.ParseGoHandle
|
||||
wg sync.WaitGroup
|
||||
)
|
||||
for _, filename := range meta.files {
|
||||
uri := span.FileURI(filename)
|
||||
f, err := imp.view.getFile(uri)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
fh := f.Handle(imp.ctx)
|
||||
if fh.Kind() != source.Go {
|
||||
continue
|
||||
}
|
||||
phs = append(phs, imp.view.session.cache.ParseGoHandle(fh, mode))
|
||||
files = append(files, &astFile{
|
||||
uri: fh.Identity().URI,
|
||||
isTrimmed: mode == source.ParseExported,
|
||||
})
|
||||
}
|
||||
for i, ph := range phs {
|
||||
wg.Add(1)
|
||||
go func(i int, ph source.ParseGoHandle) {
|
||||
defer wg.Done()
|
||||
|
||||
files[i].file, files[i].err = ph.Parse(imp.ctx)
|
||||
}(i, ph)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
for _, f := range files {
|
||||
if f != nil {
|
||||
pkg.files = append(pkg.files, f)
|
||||
}
|
||||
if f.err != nil {
|
||||
imp.view.session.cache.appendPkgError(pkg, f.err)
|
||||
}
|
||||
}
|
||||
|
||||
// Use the default type information for the unsafe package.
|
||||
@ -124,8 +161,6 @@ func (imp *importer) typeCheck(id packageID) (*pkg, error) {
|
||||
pkg.types = types.NewPackage(string(meta.pkgPath), meta.name)
|
||||
}
|
||||
|
||||
pkg.syntax = files
|
||||
|
||||
// Handle circular imports by copying previously seen imports.
|
||||
seen := make(map[packageID]struct{})
|
||||
for k, v := range imp.seen {
|
||||
@ -135,9 +170,9 @@ func (imp *importer) typeCheck(id packageID) (*pkg, error) {
|
||||
|
||||
cfg := &types.Config{
|
||||
Error: func(err error) {
|
||||
imp.view.appendPkgError(pkg, err)
|
||||
imp.view.session.cache.appendPkgError(pkg, err)
|
||||
},
|
||||
IgnoreFuncBodies: ignoreFuncBodies,
|
||||
IgnoreFuncBodies: mode == source.ParseExported,
|
||||
Importer: &importer{
|
||||
view: imp.view,
|
||||
ctx: imp.ctx,
|
||||
@ -147,47 +182,51 @@ func (imp *importer) typeCheck(id packageID) (*pkg, error) {
|
||||
},
|
||||
}
|
||||
check := types.NewChecker(cfg, imp.fset, pkg.types, pkg.typesInfo)
|
||||
|
||||
// Ignore type-checking errors.
|
||||
check.Files(pkg.GetSyntax())
|
||||
|
||||
// Add every file in this package to our cache.
|
||||
imp.cachePackage(imp.ctx, pkg, meta)
|
||||
imp.cachePackage(imp.ctx, pkg, meta, mode)
|
||||
|
||||
return pkg, nil
|
||||
}
|
||||
|
||||
func (imp *importer) cachePackage(ctx context.Context, pkg *pkg, meta *metadata) {
|
||||
for _, filename := range pkg.files {
|
||||
f, err := imp.view.getFile(span.FileURI(filename))
|
||||
func (imp *importer) cachePackage(ctx context.Context, pkg *pkg, meta *metadata, mode source.ParseMode) {
|
||||
for _, file := range pkg.files {
|
||||
f, err := imp.view.getFile(file.uri)
|
||||
if err != nil {
|
||||
imp.view.session.log.Errorf(ctx, "no file: %v", err)
|
||||
continue
|
||||
}
|
||||
gof, ok := f.(*goFile)
|
||||
if !ok {
|
||||
imp.view.session.log.Errorf(ctx, "%v is not a Go file", f.URI())
|
||||
imp.view.session.log.Errorf(ctx, "%v is not a Go file", file.uri)
|
||||
continue
|
||||
}
|
||||
|
||||
// Set the package even if we failed to parse the file.
|
||||
gof.pkg = pkg
|
||||
|
||||
// Get the *token.File directly from the AST.
|
||||
gof.ast = pkg.syntax[filename]
|
||||
// Get the AST for the file.
|
||||
gof.ast = file
|
||||
if gof.ast == nil {
|
||||
imp.view.session.log.Errorf(ctx, "no AST information for %s", filename)
|
||||
imp.view.session.log.Errorf(ctx, "no AST information for %s", file.uri)
|
||||
continue
|
||||
}
|
||||
if gof.ast.file == nil {
|
||||
imp.view.session.log.Errorf(ctx, "no AST for %s", filename)
|
||||
imp.view.session.log.Errorf(ctx, "no AST for %s", file.uri)
|
||||
continue
|
||||
}
|
||||
// Get the *token.File directly from the AST.
|
||||
pos := gof.ast.file.Pos()
|
||||
if !pos.IsValid() {
|
||||
imp.view.session.log.Errorf(ctx, "AST for %s has an invalid position", filename)
|
||||
imp.view.session.log.Errorf(ctx, "AST for %s has an invalid position", file.uri)
|
||||
continue
|
||||
}
|
||||
tok := imp.view.session.cache.FileSet().File(pos)
|
||||
if tok == nil {
|
||||
imp.view.session.log.Errorf(ctx, "no *token.File for %s", filename)
|
||||
imp.view.session.log.Errorf(ctx, "no *token.File for %s", file.uri)
|
||||
continue
|
||||
}
|
||||
gof.token = tok
|
||||
@ -206,7 +245,7 @@ func (imp *importer) cachePackage(ctx context.Context, pkg *pkg, meta *metadata)
|
||||
}
|
||||
}
|
||||
|
||||
func (v *view) appendPkgError(pkg *pkg, err error) {
|
||||
func (c *cache) appendPkgError(pkg *pkg, err error) {
|
||||
if err == nil {
|
||||
return
|
||||
}
|
||||
@ -229,7 +268,7 @@ func (v *view) appendPkgError(pkg *pkg, err error) {
|
||||
}
|
||||
case types.Error:
|
||||
errs = append(errs, packages.Error{
|
||||
Pos: v.Session().Cache().FileSet().Position(err.Pos).String(),
|
||||
Pos: c.FileSet().Position(err.Pos).String(),
|
||||
Msg: err.Msg,
|
||||
Kind: packages.TypeError,
|
||||
})
|
||||
|
17
internal/lsp/cache/external.go
vendored
17
internal/lsp/cache/external.go
vendored
@ -7,6 +7,7 @@ package cache
|
||||
import (
|
||||
"context"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
|
||||
"golang.org/x/tools/internal/lsp/source"
|
||||
"golang.org/x/tools/internal/span"
|
||||
@ -22,13 +23,16 @@ type nativeFileHandle struct {
|
||||
}
|
||||
|
||||
func (fs *nativeFileSystem) GetFile(uri span.URI) source.FileHandle {
|
||||
fi, err := os.Stat(uri.Filename())
|
||||
version := fi.ModTime().String()
|
||||
if err != nil {
|
||||
version = "DOES NOT EXIST"
|
||||
}
|
||||
return &nativeFileHandle{
|
||||
fs: fs,
|
||||
identity: source.FileIdentity{
|
||||
URI: uri,
|
||||
// TODO: decide what the version string is for a native file system
|
||||
// could be the mtime?
|
||||
Version: "",
|
||||
URI: uri,
|
||||
Version: version,
|
||||
},
|
||||
}
|
||||
}
|
||||
@ -41,6 +45,11 @@ func (h *nativeFileHandle) Identity() source.FileIdentity {
|
||||
return h.identity
|
||||
}
|
||||
|
||||
func (h *nativeFileHandle) Kind() source.FileKind {
|
||||
// TODO: How should we determine the file kind?
|
||||
return source.Go
|
||||
}
|
||||
|
||||
func (h *nativeFileHandle) Read(ctx context.Context) ([]byte, string, error) {
|
||||
//TODO: this should fail if the version is not the same as the handle
|
||||
data, err := ioutil.ReadFile(h.identity.URI.Filename())
|
||||
|
1
internal/lsp/cache/file.go
vendored
1
internal/lsp/cache/file.go
vendored
@ -28,6 +28,7 @@ type viewFile interface {
|
||||
type fileBase struct {
|
||||
uris []span.URI
|
||||
fname string
|
||||
kind source.FileKind
|
||||
|
||||
view *view
|
||||
|
||||
|
5
internal/lsp/cache/gofile.go
vendored
5
internal/lsp/cache/gofile.go
vendored
@ -1,3 +1,7 @@
|
||||
// Copyright 2019 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package cache
|
||||
|
||||
import (
|
||||
@ -21,6 +25,7 @@ type goFile struct {
|
||||
}
|
||||
|
||||
type astFile struct {
|
||||
uri span.URI
|
||||
file *ast.File
|
||||
err error // parse errors
|
||||
isTrimmed bool
|
||||
|
6
internal/lsp/cache/load.go
vendored
6
internal/lsp/cache/load.go
vendored
@ -1,3 +1,7 @@
|
||||
// Copyright 2019 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package cache
|
||||
|
||||
import (
|
||||
@ -157,7 +161,7 @@ func (v *view) link(ctx context.Context, pkgPath packagePath, pkg *packages.Pack
|
||||
// Reset any field that could have changed across calls to packages.Load.
|
||||
m.name = pkg.Name
|
||||
m.files = pkg.CompiledGoFiles
|
||||
for _, filename := range m.files {
|
||||
for _, filename := range pkg.CompiledGoFiles {
|
||||
if f, _ := v.getFile(span.FileURI(filename)); f != nil {
|
||||
if gof, ok := f.(*goFile); ok {
|
||||
gof.meta = m
|
||||
|
4
internal/lsp/cache/modfile.go
vendored
4
internal/lsp/cache/modfile.go
vendored
@ -1,3 +1,7 @@
|
||||
// Copyright 2019 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package cache
|
||||
|
||||
import (
|
||||
|
67
internal/lsp/cache/parse.go
vendored
67
internal/lsp/cache/parse.go
vendored
@ -14,11 +14,9 @@ import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"golang.org/x/tools/internal/lsp/source"
|
||||
"golang.org/x/tools/internal/memoize"
|
||||
"golang.org/x/tools/internal/span"
|
||||
)
|
||||
|
||||
// Limits the number of parallel parser calls per process.
|
||||
@ -55,6 +53,8 @@ func (c *cache) ParseGoHandle(fh source.FileHandle, mode source.ParseMode) sourc
|
||||
})
|
||||
return &parseGoHandle{
|
||||
handle: h,
|
||||
file: fh,
|
||||
mode: mode,
|
||||
}
|
||||
}
|
||||
|
||||
@ -94,73 +94,12 @@ func parseGo(ctx context.Context, c *cache, fh source.FileHandle, mode source.Pa
|
||||
// Fix any badly parsed parts of the AST.
|
||||
tok := c.fset.File(ast.Pos())
|
||||
if err := fix(ctx, ast, tok, buf); err != nil {
|
||||
//TODO: we should do something with the error, but we have no access to a logger in here
|
||||
// TODO: Do something with the error (need access to a logger in here).
|
||||
}
|
||||
}
|
||||
return ast, err
|
||||
}
|
||||
|
||||
// parseFiles reads and parses the Go source files and returns the ASTs
|
||||
// of the ones that could be at least partially parsed, along with a list
|
||||
// parse errors encountered, and a fatal error that prevented parsing.
|
||||
//
|
||||
// Because files are scanned in parallel, the token.Pos
|
||||
// positions of the resulting ast.Files are not ordered.
|
||||
func (imp *importer) parseFiles(filenames []string, ignoreFuncBodies bool) (map[string]*astFile, []error, error) {
|
||||
var (
|
||||
wg sync.WaitGroup
|
||||
n = len(filenames)
|
||||
parsed = make([]*astFile, n)
|
||||
errors = make([]error, n)
|
||||
)
|
||||
// TODO: change this function to return the handles
|
||||
for i, filename := range filenames {
|
||||
if err := imp.ctx.Err(); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
// get a file handle
|
||||
fh := imp.view.session.GetFile(span.FileURI(filename))
|
||||
// now get a parser
|
||||
mode := source.ParseFull
|
||||
if ignoreFuncBodies {
|
||||
mode = source.ParseExported
|
||||
}
|
||||
ph := imp.view.session.cache.ParseGoHandle(fh, mode)
|
||||
// now read and parse in parallel
|
||||
wg.Add(1)
|
||||
go func(i int, filename string) {
|
||||
defer wg.Done()
|
||||
// ParseFile may return a partial AST and an error.
|
||||
f, err := ph.Parse(imp.ctx)
|
||||
parsed[i], errors[i] = &astFile{
|
||||
file: f,
|
||||
err: err,
|
||||
isTrimmed: ignoreFuncBodies,
|
||||
}, err
|
||||
}(i, filename)
|
||||
}
|
||||
wg.Wait()
|
||||
|
||||
parsedByFilename := make(map[string]*astFile)
|
||||
|
||||
for i, f := range parsed {
|
||||
if f.file != nil {
|
||||
parsedByFilename[filenames[i]] = f
|
||||
}
|
||||
}
|
||||
|
||||
var o int
|
||||
for _, err := range errors {
|
||||
if err != nil {
|
||||
errors[o] = err
|
||||
o++
|
||||
}
|
||||
}
|
||||
errors = errors[:o]
|
||||
|
||||
return parsedByFilename, errors, nil
|
||||
}
|
||||
|
||||
// sameFile returns true if x and y have the same basename and denote
|
||||
// the same file.
|
||||
//
|
||||
|
21
internal/lsp/cache/pkg.go
vendored
21
internal/lsp/cache/pkg.go
vendored
@ -22,8 +22,7 @@ type pkg struct {
|
||||
id packageID
|
||||
pkgPath packagePath
|
||||
|
||||
files []string
|
||||
syntax map[string]*astFile
|
||||
files []*astFile
|
||||
errors []packages.Error
|
||||
imports map[packagePath]*pkg
|
||||
types *types.Package
|
||||
@ -124,8 +123,8 @@ func (pkg *pkg) GetActionGraph(ctx context.Context, a *analysis.Analyzer) (*sour
|
||||
}
|
||||
sort.Strings(importPaths) // for determinism
|
||||
for _, importPath := range importPaths {
|
||||
dep, ok := pkg.imports[packagePath(importPath)]
|
||||
if !ok {
|
||||
dep := pkg.GetImport(importPath)
|
||||
if dep == nil {
|
||||
continue
|
||||
}
|
||||
act, err := dep.GetActionGraph(ctx, a)
|
||||
@ -149,13 +148,19 @@ func (pkg *pkg) PkgPath() string {
|
||||
}
|
||||
|
||||
func (pkg *pkg) GetFilenames() []string {
|
||||
return pkg.files
|
||||
filenames := make([]string, 0, len(pkg.files))
|
||||
for _, f := range pkg.files {
|
||||
filenames = append(filenames, f.uri.Filename())
|
||||
}
|
||||
return filenames
|
||||
}
|
||||
|
||||
func (pkg *pkg) GetSyntax() []*ast.File {
|
||||
syntax := make([]*ast.File, 0, len(pkg.syntax))
|
||||
for _, astFile := range pkg.syntax {
|
||||
syntax = append(syntax, astFile.file)
|
||||
var syntax []*ast.File
|
||||
for _, f := range pkg.files {
|
||||
if f.file != nil {
|
||||
syntax = append(syntax, f.file)
|
||||
}
|
||||
}
|
||||
return syntax
|
||||
}
|
||||
|
6
internal/lsp/cache/session.go
vendored
6
internal/lsp/cache/session.go
vendored
@ -42,6 +42,7 @@ type overlay struct {
|
||||
uri span.URI
|
||||
data []byte
|
||||
hash string
|
||||
kind source.FileKind
|
||||
|
||||
// onDisk is true if a file has been saved on disk,
|
||||
// and therefore does not need to be part of the overlay sent to go/packages.
|
||||
@ -266,6 +267,11 @@ func (o *overlay) Identity() source.FileIdentity {
|
||||
}
|
||||
}
|
||||
|
||||
func (o *overlay) Kind() source.FileKind {
|
||||
// TODO: Determine the file kind using textDocument.languageId.
|
||||
return source.Go
|
||||
}
|
||||
|
||||
func (o *overlay) Read(ctx context.Context) ([]byte, string, error) {
|
||||
return o.data, o.hash, nil
|
||||
}
|
||||
|
4
internal/lsp/cache/sumfile.go
vendored
4
internal/lsp/cache/sumfile.go
vendored
@ -1,3 +1,7 @@
|
||||
// Copyright 2019 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package cache
|
||||
|
||||
import (
|
||||
|
5
internal/lsp/cache/token.go
vendored
5
internal/lsp/cache/token.go
vendored
@ -1,3 +1,7 @@
|
||||
// Copyright 2019 The Go Authors. All rights reserved.
|
||||
// Use of this source code is governed by a BSD-style
|
||||
// license that can be found in the LICENSE file.
|
||||
|
||||
package cache
|
||||
|
||||
import (
|
||||
@ -36,6 +40,7 @@ func (c *cache) TokenHandle(fh source.FileHandle) source.TokenHandle {
|
||||
})
|
||||
return &tokenHandle{
|
||||
handle: h,
|
||||
file: fh,
|
||||
}
|
||||
}
|
||||
|
||||
|
3
internal/lsp/cache/view.go
vendored
3
internal/lsp/cache/view.go
vendored
@ -327,6 +327,7 @@ func (v *view) getFile(uri span.URI) (viewFile, error) {
|
||||
fileBase: fileBase{
|
||||
view: v,
|
||||
fname: filename,
|
||||
kind: source.Mod,
|
||||
},
|
||||
}
|
||||
case ".sum":
|
||||
@ -334,6 +335,7 @@ func (v *view) getFile(uri span.URI) (viewFile, error) {
|
||||
fileBase: fileBase{
|
||||
view: v,
|
||||
fname: filename,
|
||||
kind: source.Sum,
|
||||
},
|
||||
}
|
||||
default:
|
||||
@ -342,6 +344,7 @@ func (v *view) getFile(uri span.URI) (viewFile, error) {
|
||||
fileBase: fileBase{
|
||||
view: v,
|
||||
fname: filename,
|
||||
kind: source.Go,
|
||||
},
|
||||
}
|
||||
v.session.filesWatchMap.Watch(uri, func() {
|
||||
|
@ -6,7 +6,6 @@ package lsp
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"golang.org/x/tools/internal/lsp/protocol"
|
||||
@ -62,23 +61,11 @@ func (s *Server) codeAction(ctx context.Context, params *protocol.CodeActionPara
|
||||
return codeActions, nil
|
||||
}
|
||||
|
||||
func organizeImports(ctx context.Context, v source.View, s span.Span) ([]protocol.TextEdit, error) {
|
||||
f, m, err := getGoFile(ctx, v, s.URI())
|
||||
func organizeImports(ctx context.Context, view source.View, s span.Span) ([]protocol.TextEdit, error) {
|
||||
f, m, rng, err := spanToRange(ctx, view, s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rng, err := s.Range(m.Converter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if rng.Start == rng.End {
|
||||
// If we have a single point, assume we want the whole file.
|
||||
tok := f.GetToken(ctx)
|
||||
if tok == nil {
|
||||
return nil, fmt.Errorf("no file information for %s", f.URI())
|
||||
}
|
||||
rng.End = tok.Pos(tok.Size())
|
||||
}
|
||||
edits, err := source.Imports(ctx, f, rng)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -30,7 +30,7 @@ func (s *Server) completion(ctx context.Context, params *protocol.CompletionPara
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
items, surrounding, err := source.Completion(ctx, f, rng.Start)
|
||||
items, surrounding, err := source.Completion(ctx, view, f, rng.Start)
|
||||
if err != nil {
|
||||
s.session.Logger().Infof(ctx, "no completions found for %s:%v:%v: %v", uri, int(params.Position.Line), int(params.Position.Character), err)
|
||||
}
|
||||
|
@ -13,12 +13,12 @@ import (
|
||||
"golang.org/x/tools/internal/span"
|
||||
)
|
||||
|
||||
func (s *Server) Diagnostics(ctx context.Context, v source.View, uri span.URI) {
|
||||
func (s *Server) Diagnostics(ctx context.Context, view source.View, uri span.URI) {
|
||||
if ctx.Err() != nil {
|
||||
s.session.Logger().Errorf(ctx, "canceling diagnostics for %s: %v", uri, ctx.Err())
|
||||
return
|
||||
}
|
||||
f, err := v.GetFile(ctx, uri)
|
||||
f, err := view.GetFile(ctx, uri)
|
||||
if err != nil {
|
||||
s.session.Logger().Errorf(ctx, "no file for %s: %v", uri, err)
|
||||
return
|
||||
@ -28,7 +28,7 @@ func (s *Server) Diagnostics(ctx context.Context, v source.View, uri span.URI) {
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
reports, err := source.Diagnostics(ctx, v, gof, s.disabledAnalyses)
|
||||
reports, err := source.Diagnostics(ctx, view, gof, s.disabledAnalyses)
|
||||
if err != nil {
|
||||
s.session.Logger().Errorf(ctx, "failed to compute diagnostics for %s: %v", gof.URI(), err)
|
||||
return
|
||||
@ -38,7 +38,7 @@ func (s *Server) Diagnostics(ctx context.Context, v source.View, uri span.URI) {
|
||||
defer s.undeliveredMu.Unlock()
|
||||
|
||||
for uri, diagnostics := range reports {
|
||||
if err := s.publishDiagnostics(ctx, v, uri, diagnostics); err != nil {
|
||||
if err := s.publishDiagnostics(ctx, view, uri, diagnostics); err != nil {
|
||||
if s.undelivered == nil {
|
||||
s.undelivered = make(map[span.URI][]source.Diagnostic)
|
||||
}
|
||||
@ -52,7 +52,7 @@ func (s *Server) Diagnostics(ctx context.Context, v source.View, uri span.URI) {
|
||||
// Anytime we compute diagnostics, make sure to also send along any
|
||||
// undelivered ones (only for remaining URIs).
|
||||
for uri, diagnostics := range s.undelivered {
|
||||
if err := s.publishDiagnostics(ctx, v, uri, diagnostics); err != nil {
|
||||
if err := s.publishDiagnostics(ctx, view, uri, diagnostics); err != nil {
|
||||
s.session.Logger().Errorf(ctx, "failed to deliver diagnostic for %s (will not retry): %v", uri, err)
|
||||
}
|
||||
// If we fail to deliver the same diagnostics twice, just give up.
|
||||
|
@ -21,23 +21,8 @@ func (s *Server) formatting(ctx context.Context, params *protocol.DocumentFormat
|
||||
}
|
||||
|
||||
// formatRange formats a document with a given range.
|
||||
func formatRange(ctx context.Context, v source.View, s span.Span) ([]protocol.TextEdit, error) {
|
||||
f, m, err := getGoFile(ctx, v, s.URI())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
rng, err := s.Range(m.Converter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if rng.Start == rng.End {
|
||||
// If we have a single point, assume we want the whole file.
|
||||
tok := f.GetToken(ctx)
|
||||
if tok == nil {
|
||||
return nil, fmt.Errorf("no file information for %s", f.URI())
|
||||
}
|
||||
rng.End = tok.Pos(tok.Size())
|
||||
}
|
||||
func formatRange(ctx context.Context, view source.View, s span.Span) ([]protocol.TextEdit, error) {
|
||||
f, m, rng, err := spanToRange(ctx, view, s)
|
||||
edits, err := source.Format(ctx, f, rng)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -45,6 +30,26 @@ func formatRange(ctx context.Context, v source.View, s span.Span) ([]protocol.Te
|
||||
return ToProtocolEdits(m, edits)
|
||||
}
|
||||
|
||||
func spanToRange(ctx context.Context, view source.View, s span.Span) (source.GoFile, *protocol.ColumnMapper, span.Range, error) {
|
||||
f, m, err := getGoFile(ctx, view, s.URI())
|
||||
if err != nil {
|
||||
return nil, nil, span.Range{}, err
|
||||
}
|
||||
rng, err := s.Range(m.Converter)
|
||||
if err != nil {
|
||||
return nil, nil, span.Range{}, err
|
||||
}
|
||||
if rng.Start == rng.End {
|
||||
// If we have a single point, assume we want the whole file.
|
||||
tok := f.GetToken(ctx)
|
||||
if tok == nil {
|
||||
return nil, nil, span.Range{}, fmt.Errorf("no file information for %s", f.URI())
|
||||
}
|
||||
rng.End = tok.Pos(tok.Size())
|
||||
}
|
||||
return f, m, rng, nil
|
||||
}
|
||||
|
||||
func ToProtocolEdits(m *protocol.ColumnMapper, edits []source.TextEdit) ([]protocol.TextEdit, error) {
|
||||
if edits == nil {
|
||||
return nil, nil
|
||||
|
@ -27,7 +27,10 @@ func (s *Server) documentHighlight(ctx context.Context, params *protocol.TextDoc
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
spans := source.Highlight(ctx, f, rng.Start)
|
||||
spans, err := source.Highlight(ctx, f, rng.Start)
|
||||
if err != nil {
|
||||
view.Session().Logger().Errorf(ctx, "no highlight for %s: %v", spn, err)
|
||||
}
|
||||
return toProtocolHighlight(m, spans), nil
|
||||
}
|
||||
|
||||
|
@ -27,7 +27,7 @@ func (s *Server) documentLink(ctx context.Context, params *protocol.DocumentLink
|
||||
// Add a Godoc link for each imported package.
|
||||
var result []protocol.DocumentLink
|
||||
for _, imp := range file.Imports {
|
||||
spn, err := span.NewRange(f.FileSet(), imp.Pos(), imp.End()).Span()
|
||||
spn, err := span.NewRange(view.Session().Cache().FileSet(), imp.Pos(), imp.End()).Span()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
@ -57,6 +57,7 @@ func testLSP(t *testing.T, exporter packagestest.Exporter) {
|
||||
tests.Run(t, r, data)
|
||||
}
|
||||
|
||||
// TODO: Actually test the LSP diagnostics function in this test.
|
||||
func (r *runner) Diagnostics(t *testing.T, data tests.Diagnostics) {
|
||||
v := r.server.session.View(viewName)
|
||||
for uri, want := range data {
|
||||
|
@ -27,12 +27,14 @@ func (s *Server) rename(ctx context.Context, params *protocol.RenameParams) (*pr
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
edits, err := source.Rename(ctx, view, f, rng.Start, params.NewName)
|
||||
ident, err := source.Identifier(ctx, view, f, rng.Start)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
edits, err := ident.Rename(ctx, params.NewName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
changes := make(map[string][]protocol.TextEdit)
|
||||
for uri, textEdits := range edits {
|
||||
_, m, err := getGoFile(ctx, view, uri)
|
||||
|
@ -237,7 +237,7 @@ type candidate struct {
|
||||
// The selection is computed based on the preceding identifier and can be used by
|
||||
// the client to score the quality of the completion. For instance, some clients
|
||||
// may tolerate imperfect matches as valid completion results, since users may make typos.
|
||||
func Completion(ctx context.Context, f GoFile, pos token.Pos) ([]CompletionItem, *Selection, error) {
|
||||
func Completion(ctx context.Context, view View, f GoFile, pos token.Pos) ([]CompletionItem, *Selection, error) {
|
||||
file := f.GetAST(ctx)
|
||||
if file == nil {
|
||||
return nil, nil, fmt.Errorf("no AST for %s", f.URI())
|
||||
@ -269,7 +269,7 @@ func Completion(ctx context.Context, f GoFile, pos token.Pos) ([]CompletionItem,
|
||||
types: pkg.GetTypes(),
|
||||
info: pkg.GetTypesInfo(),
|
||||
qf: qualifier(file, pkg.GetTypes(), pkg.GetTypesInfo()),
|
||||
view: f.View(),
|
||||
view: view,
|
||||
ctx: ctx,
|
||||
path: path,
|
||||
pos: pos,
|
||||
|
@ -51,7 +51,7 @@ const (
|
||||
SeverityError
|
||||
)
|
||||
|
||||
func Diagnostics(ctx context.Context, v View, f GoFile, disabledAnalyses map[string]struct{}) (map[span.URI][]Diagnostic, error) {
|
||||
func Diagnostics(ctx context.Context, view View, f GoFile, disabledAnalyses map[string]struct{}) (map[span.URI][]Diagnostic, error) {
|
||||
pkg := f.GetPackage(ctx)
|
||||
if pkg == nil {
|
||||
return singleDiagnostic(f.URI(), "%s is not part of a package", f.URI()), nil
|
||||
@ -59,7 +59,7 @@ func Diagnostics(ctx context.Context, v View, f GoFile, disabledAnalyses map[str
|
||||
// Prepare the reports we will send for the files in this package.
|
||||
reports := make(map[span.URI][]Diagnostic)
|
||||
for _, filename := range pkg.GetFilenames() {
|
||||
addReport(v, reports, span.FileURI(filename), nil)
|
||||
addReport(view, reports, span.FileURI(filename), nil)
|
||||
}
|
||||
|
||||
// Prepare any additional reports for the errors in this package.
|
||||
@ -67,26 +67,27 @@ func Diagnostics(ctx context.Context, v View, f GoFile, disabledAnalyses map[str
|
||||
if err.Kind != packages.ListError {
|
||||
continue
|
||||
}
|
||||
addReport(v, reports, packagesErrorSpan(err).URI(), nil)
|
||||
addReport(view, reports, packagesErrorSpan(err).URI(), nil)
|
||||
}
|
||||
|
||||
// Run diagnostics for the package that this URI belongs to.
|
||||
if !diagnostics(ctx, v, pkg, reports) {
|
||||
if !diagnostics(ctx, view, pkg, reports) {
|
||||
// If we don't have any list, parse, or type errors, run analyses.
|
||||
if err := analyses(ctx, v, pkg, disabledAnalyses, reports); err != nil {
|
||||
v.Session().Logger().Errorf(ctx, "failed to run analyses for %s: %v", f.URI(), err)
|
||||
if err := analyses(ctx, view, pkg, disabledAnalyses, reports); err != nil {
|
||||
view.Session().Logger().Errorf(ctx, "failed to run analyses for %s: %v", f.URI(), err)
|
||||
}
|
||||
}
|
||||
// Updates to the diagnostics for this package may need to be propagated.
|
||||
for _, f := range f.GetActiveReverseDeps(ctx) {
|
||||
revDeps := f.GetActiveReverseDeps(ctx)
|
||||
for _, f := range revDeps {
|
||||
pkg := f.GetPackage(ctx)
|
||||
if pkg == nil {
|
||||
continue
|
||||
}
|
||||
for _, filename := range pkg.GetFilenames() {
|
||||
addReport(v, reports, span.FileURI(filename), nil)
|
||||
addReport(view, reports, span.FileURI(filename), nil)
|
||||
}
|
||||
diagnostics(ctx, v, pkg, reports)
|
||||
diagnostics(ctx, view, pkg, reports)
|
||||
}
|
||||
return reports, nil
|
||||
}
|
||||
@ -203,32 +204,32 @@ func parseDiagnosticMessage(input string) span.Span {
|
||||
return span.Parse(input[:msgIndex])
|
||||
}
|
||||
|
||||
func pointToSpan(ctx context.Context, v View, spn span.Span) span.Span {
|
||||
f, err := v.GetFile(ctx, spn.URI())
|
||||
func pointToSpan(ctx context.Context, view View, spn span.Span) span.Span {
|
||||
f, err := view.GetFile(ctx, spn.URI())
|
||||
if err != nil {
|
||||
v.Session().Logger().Errorf(ctx, "Could find file for diagnostic: %v", spn.URI())
|
||||
view.Session().Logger().Errorf(ctx, "could not find file for diagnostic: %v", spn.URI())
|
||||
return spn
|
||||
}
|
||||
diagFile, ok := f.(GoFile)
|
||||
if !ok {
|
||||
v.Session().Logger().Errorf(ctx, "Not a go file: %v", spn.URI())
|
||||
view.Session().Logger().Errorf(ctx, "%s is not a Go file", spn.URI())
|
||||
return spn
|
||||
}
|
||||
tok := diagFile.GetToken(ctx)
|
||||
if tok == nil {
|
||||
v.Session().Logger().Errorf(ctx, "Could not find tokens for diagnostic: %v", spn.URI())
|
||||
view.Session().Logger().Errorf(ctx, "could not find token.File for diagnostic: %v", spn.URI())
|
||||
return spn
|
||||
}
|
||||
data, _, err := diagFile.Handle(ctx).Read(ctx)
|
||||
if err != nil {
|
||||
v.Session().Logger().Errorf(ctx, "Could not find content for diagnostic: %v", spn.URI())
|
||||
view.Session().Logger().Errorf(ctx, "could not find content for diagnostic: %v", spn.URI())
|
||||
return spn
|
||||
}
|
||||
c := span.NewTokenConverter(diagFile.FileSet(), tok)
|
||||
s, err := spn.WithOffset(c)
|
||||
//we just don't bother producing an error if this failed
|
||||
if err != nil {
|
||||
v.Session().Logger().Errorf(ctx, "invalid span for diagnostic: %v: %v", spn.URI(), err)
|
||||
view.Session().Logger().Errorf(ctx, "invalid span for diagnostic: %v: %v", spn.URI(), err)
|
||||
return spn
|
||||
}
|
||||
start := s.Start()
|
||||
|
@ -6,6 +6,7 @@ package source
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
|
||||
@ -13,22 +14,20 @@ import (
|
||||
"golang.org/x/tools/internal/span"
|
||||
)
|
||||
|
||||
func Highlight(ctx context.Context, f GoFile, pos token.Pos) []span.Span {
|
||||
func Highlight(ctx context.Context, f GoFile, pos token.Pos) ([]span.Span, error) {
|
||||
file := f.GetAST(ctx)
|
||||
if file == nil {
|
||||
return nil
|
||||
return nil, fmt.Errorf("no AST for %s", f.URI())
|
||||
}
|
||||
fset := f.FileSet()
|
||||
path, _ := astutil.PathEnclosingInterval(file, pos, pos)
|
||||
if len(path) == 0 {
|
||||
return nil
|
||||
return nil, fmt.Errorf("no enclosing position found for %s", fset.Position(pos))
|
||||
}
|
||||
|
||||
id, ok := path[0].(*ast.Ident)
|
||||
if !ok {
|
||||
return nil
|
||||
return nil, fmt.Errorf("%s is not an identifier", fset.Position(pos))
|
||||
}
|
||||
|
||||
var result []span.Span
|
||||
if id.Obj != nil {
|
||||
ast.Inspect(path[len(path)-1], func(n ast.Node) bool {
|
||||
@ -41,5 +40,5 @@ func Highlight(ctx context.Context, f GoFile, pos token.Pos) []span.Span {
|
||||
return true
|
||||
})
|
||||
}
|
||||
return result
|
||||
return result, nil
|
||||
}
|
||||
|
@ -45,14 +45,14 @@ func (i *IdentifierInfo) DeclarationRange() span.Range {
|
||||
|
||||
// Identifier returns identifier information for a position
|
||||
// in a file, accounting for a potentially incomplete selector.
|
||||
func Identifier(ctx context.Context, v View, f GoFile, pos token.Pos) (*IdentifierInfo, error) {
|
||||
if result, err := identifier(ctx, v, f, pos); err != nil || result != nil {
|
||||
func Identifier(ctx context.Context, view View, f GoFile, pos token.Pos) (*IdentifierInfo, error) {
|
||||
if result, err := identifier(ctx, view, f, pos); err != nil || result != nil {
|
||||
return result, err
|
||||
}
|
||||
// If the position is not an identifier but immediately follows
|
||||
// an identifier or selector period (as is common when
|
||||
// requesting a completion), use the path to the preceding node.
|
||||
result, err := identifier(ctx, v, f, pos-1)
|
||||
result, err := identifier(ctx, view, f, pos-1)
|
||||
if result == nil && err == nil {
|
||||
err = fmt.Errorf("no identifier found")
|
||||
}
|
||||
@ -60,7 +60,7 @@ func Identifier(ctx context.Context, v View, f GoFile, pos token.Pos) (*Identifi
|
||||
}
|
||||
|
||||
// identifier checks a single position for a potential identifier.
|
||||
func identifier(ctx context.Context, v View, f GoFile, pos token.Pos) (*IdentifierInfo, error) {
|
||||
func identifier(ctx context.Context, view View, f GoFile, pos token.Pos) (*IdentifierInfo, error) {
|
||||
file := f.GetAST(ctx)
|
||||
if file == nil {
|
||||
return nil, fmt.Errorf("no AST for %s", f.URI())
|
||||
@ -76,7 +76,7 @@ func identifier(ctx context.Context, v View, f GoFile, pos token.Pos) (*Identifi
|
||||
}
|
||||
|
||||
// Handle import specs separately, as there is no formal position for a package declaration.
|
||||
if result, err := importSpec(f, file, pkg, pos); result != nil || err != nil {
|
||||
if result, err := importSpec(ctx, f, file, pkg, pos); result != nil || err != nil {
|
||||
return result, err
|
||||
}
|
||||
|
||||
@ -156,7 +156,7 @@ func identifier(ctx context.Context, v View, f GoFile, pos token.Pos) (*Identifi
|
||||
if result.decl.rng, err = objToRange(ctx, f.FileSet(), result.decl.obj); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if result.decl.node, err = objToNode(ctx, v, pkg.GetTypes(), result.decl.obj, result.decl.rng); err != nil {
|
||||
if result.decl.node, err = objToNode(ctx, view, pkg.GetTypes(), result.decl.obj, result.decl.rng); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
typ := pkg.GetTypesInfo().TypeOf(result.ident)
|
||||
@ -218,12 +218,12 @@ func posToRange(ctx context.Context, fset *token.FileSet, name string, pos token
|
||||
return span.NewRange(fset, pos, pos+token.Pos(len(name))), nil
|
||||
}
|
||||
|
||||
func objToNode(ctx context.Context, v View, originPkg *types.Package, obj types.Object, rng span.Range) (ast.Decl, error) {
|
||||
func objToNode(ctx context.Context, view View, originPkg *types.Package, obj types.Object, rng span.Range) (ast.Decl, error) {
|
||||
s, err := rng.Span()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
f, err := v.GetFile(ctx, s.URI())
|
||||
f, err := view.GetFile(ctx, s.URI())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -262,7 +262,7 @@ func objToNode(ctx context.Context, v View, originPkg *types.Package, obj types.
|
||||
}
|
||||
|
||||
// importSpec handles positions inside of an *ast.ImportSpec.
|
||||
func importSpec(f GoFile, fAST *ast.File, pkg Package, pos token.Pos) (*IdentifierInfo, error) {
|
||||
func importSpec(ctx context.Context, f GoFile, fAST *ast.File, pkg Package, pos token.Pos) (*IdentifierInfo, error) {
|
||||
for _, imp := range fAST.Imports {
|
||||
if !(imp.Pos() <= pos && pos < imp.End()) {
|
||||
continue
|
||||
|
@ -18,6 +18,7 @@ import (
|
||||
)
|
||||
|
||||
type renamer struct {
|
||||
ctx context.Context
|
||||
fset *token.FileSet
|
||||
pkg Package // the package containing the declaration of the ident
|
||||
refs []*ReferenceInfo
|
||||
@ -32,45 +33,39 @@ type renamer struct {
|
||||
}
|
||||
|
||||
// Rename returns a map of TextEdits for each file modified when renaming a given identifier within a package.
|
||||
func Rename(ctx context.Context, view View, f GoFile, pos token.Pos, newName string) (map[span.URI][]TextEdit, error) {
|
||||
pkg := f.GetPackage(ctx)
|
||||
if pkg == nil || pkg.IsIllTyped() {
|
||||
return nil, fmt.Errorf("package for %s is ill typed", f.URI())
|
||||
}
|
||||
|
||||
// Get the identifier to rename.
|
||||
ident, err := Identifier(ctx, view, f, pos)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if ident.Name == newName {
|
||||
func (i *IdentifierInfo) Rename(ctx context.Context, newName string) (map[span.URI][]TextEdit, error) {
|
||||
if i.Name == newName {
|
||||
return nil, fmt.Errorf("old and new names are the same: %s", newName)
|
||||
}
|
||||
if !isValidIdentifier(ident.Name) {
|
||||
return nil, fmt.Errorf("invalid identifier to rename: %q", ident.Name)
|
||||
if !isValidIdentifier(i.Name) {
|
||||
return nil, fmt.Errorf("invalid identifier to rename: %q", i.Name)
|
||||
}
|
||||
|
||||
// Do not rename identifiers declared in another package.
|
||||
if pkg.GetTypes() != ident.decl.obj.Pkg() {
|
||||
return nil, fmt.Errorf("failed to rename because %q is declared in package %q", ident.Name, ident.decl.obj.Pkg().Name())
|
||||
pkg := i.File.GetPackage(ctx)
|
||||
if pkg == nil || pkg.IsIllTyped() {
|
||||
return nil, fmt.Errorf("package for %s is ill typed", i.File.URI())
|
||||
}
|
||||
if pkg.GetTypes() != i.decl.obj.Pkg() {
|
||||
return nil, fmt.Errorf("failed to rename because %q is declared in package %q", i.Name, i.decl.obj.Pkg().Name())
|
||||
}
|
||||
|
||||
// TODO(suzmue): Support renaming of imported packages.
|
||||
if _, ok := ident.decl.obj.(*types.PkgName); ok {
|
||||
return nil, fmt.Errorf("renaming imported package %s not supported", ident.Name)
|
||||
if _, ok := i.decl.obj.(*types.PkgName); ok {
|
||||
return nil, fmt.Errorf("renaming imported package %s not supported", i.Name)
|
||||
}
|
||||
|
||||
refs, err := ident.References(ctx)
|
||||
refs, err := i.References(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
r := renamer{
|
||||
fset: f.FileSet(),
|
||||
fset: i.File.FileSet(),
|
||||
pkg: pkg,
|
||||
refs: refs,
|
||||
objsToUpdate: make(map[types.Object]bool),
|
||||
from: ident.Name,
|
||||
from: i.Name,
|
||||
to: newName,
|
||||
packages: make(map[*types.Package]Package),
|
||||
}
|
||||
@ -84,11 +79,11 @@ func Rename(ctx context.Context, view View, f GoFile, pos token.Pos, newName str
|
||||
return nil, fmt.Errorf(r.errors)
|
||||
}
|
||||
|
||||
return r.update(ctx, view)
|
||||
return r.update(ctx)
|
||||
}
|
||||
|
||||
// Rename all references to the identifier.
|
||||
func (r *renamer) update(ctx context.Context, view View) (map[span.URI][]TextEdit, error) {
|
||||
func (r *renamer) update(ctx context.Context) (map[span.URI][]TextEdit, error) {
|
||||
result := make(map[span.URI][]TextEdit)
|
||||
|
||||
docRegexp := regexp.MustCompile(`\b` + r.from + `\b`)
|
||||
@ -131,7 +126,7 @@ func (r *renamer) update(ctx context.Context, view View) (map[span.URI][]TextEdi
|
||||
|
||||
// docComment returns the doc for an identifier.
|
||||
func (r *renamer) docComment(pkg Package, id *ast.Ident) *ast.CommentGroup {
|
||||
_, nodes, _ := pathEnclosingInterval(r.fset, pkg, id.Pos(), id.End())
|
||||
_, nodes, _ := pathEnclosingInterval(r.ctx, r.fset, pkg, id.Pos(), id.End())
|
||||
for _, node := range nodes {
|
||||
switch decl := node.(type) {
|
||||
case *ast.FuncDecl:
|
||||
|
@ -7,6 +7,7 @@
|
||||
package source
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
@ -188,7 +189,7 @@ func (r *renamer) checkInLexicalScope(from types.Object, pkg Package) {
|
||||
// Check for super-block conflict.
|
||||
// The name r.to is defined in a superblock.
|
||||
// Is that name referenced from within this block?
|
||||
forEachLexicalRef(pkg, to, func(id *ast.Ident, block *types.Scope) bool {
|
||||
forEachLexicalRef(r.ctx, pkg, to, func(id *ast.Ident, block *types.Scope) bool {
|
||||
_, obj := lexicalLookup(block, from.Name(), id.Pos())
|
||||
if obj == from {
|
||||
// super-block conflict
|
||||
@ -207,7 +208,7 @@ func (r *renamer) checkInLexicalScope(from types.Object, pkg Package) {
|
||||
// Check for sub-block conflict.
|
||||
// Is there an intervening definition of r.to between
|
||||
// the block defining 'from' and some reference to it?
|
||||
forEachLexicalRef(pkg, from, func(id *ast.Ident, block *types.Scope) bool {
|
||||
forEachLexicalRef(r.ctx, pkg, from, func(id *ast.Ident, block *types.Scope) bool {
|
||||
// Find the block that defines the found reference.
|
||||
// It may be an ancestor.
|
||||
fromBlock, _ := lexicalLookup(block, from.Name(), id.Pos())
|
||||
@ -276,7 +277,7 @@ func deeper(x, y *types.Scope) bool {
|
||||
// pkg that is a reference to obj in lexical scope. block is the
|
||||
// lexical block enclosing the reference. If fn returns false the
|
||||
// iteration is terminated and findLexicalRefs returns false.
|
||||
func forEachLexicalRef(pkg Package, obj types.Object, fn func(id *ast.Ident, block *types.Scope) bool) bool {
|
||||
func forEachLexicalRef(ctx context.Context, pkg Package, obj types.Object, fn func(id *ast.Ident, block *types.Scope) bool) bool {
|
||||
ok := true
|
||||
var stack []ast.Node
|
||||
|
||||
@ -379,7 +380,7 @@ func (r *renamer) checkStructField(from *types.Var) {
|
||||
// go/types offers no easy way to get from a field (or interface
|
||||
// method) to its declaring struct (or interface), so we must
|
||||
// ascend the AST.
|
||||
pkg, path, _ := pathEnclosingInterval(r.fset, r.packages[from.Pkg()], from.Pos(), from.Pos())
|
||||
pkg, path, _ := pathEnclosingInterval(r.ctx, r.fset, r.packages[from.Pkg()], from.Pos(), from.Pos())
|
||||
// path matches this pattern:
|
||||
// [Ident SelectorExpr? StarExpr? Field FieldList StructType ParenExpr* ... File]
|
||||
|
||||
@ -819,7 +820,7 @@ func someUse(info *types.Info, obj types.Object) *ast.Ident {
|
||||
//
|
||||
// The zero value is returned if not found.
|
||||
//
|
||||
func pathEnclosingInterval(fset *token.FileSet, pkg Package, start, end token.Pos) (resPkg Package, path []ast.Node, exact bool) {
|
||||
func pathEnclosingInterval(ctx context.Context, fset *token.FileSet, pkg Package, start, end token.Pos) (resPkg Package, path []ast.Node, exact bool) {
|
||||
var pkgs = []Package{pkg}
|
||||
for _, f := range pkg.GetSyntax() {
|
||||
for _, imp := range f.Imports {
|
||||
|
@ -146,7 +146,7 @@ func (r *runner) Completion(t *testing.T, data tests.Completions, snippets tests
|
||||
t.Fatalf("failed to get token for %v", src)
|
||||
}
|
||||
pos := tok.Pos(src.Start().Offset())
|
||||
list, surrounding, err := source.Completion(ctx, f.(source.GoFile), pos)
|
||||
list, surrounding, err := source.Completion(ctx, r.view, f.(source.GoFile), pos)
|
||||
if err != nil {
|
||||
t.Fatalf("failed for %v: %v", src, err)
|
||||
}
|
||||
@ -179,7 +179,7 @@ func (r *runner) Completion(t *testing.T, data tests.Completions, snippets tests
|
||||
}
|
||||
tok := f.GetToken(ctx)
|
||||
pos := tok.Pos(src.Start().Offset())
|
||||
list, _, err := source.Completion(ctx, f.(source.GoFile), pos)
|
||||
list, _, err := source.Completion(ctx, r.view, f.(source.GoFile), pos)
|
||||
if err != nil {
|
||||
t.Fatalf("failed for %v: %v", src, err)
|
||||
}
|
||||
@ -395,9 +395,12 @@ func (r *runner) Highlight(t *testing.T, data tests.Highlights) {
|
||||
}
|
||||
tok := f.GetToken(ctx)
|
||||
pos := tok.Pos(src.Start().Offset())
|
||||
highlights := source.Highlight(ctx, f.(source.GoFile), pos)
|
||||
highlights, err := source.Highlight(ctx, f.(source.GoFile), pos)
|
||||
if err != nil {
|
||||
t.Errorf("highlight failed for %s: %v", src.URI(), err)
|
||||
}
|
||||
if len(highlights) != len(locations) {
|
||||
t.Fatalf("got %d highlights for %s, expected %d", len(highlights), name, len(locations))
|
||||
t.Errorf("got %d highlights for %s, expected %d", len(highlights), name, len(locations))
|
||||
}
|
||||
for i, h := range highlights {
|
||||
if h != locations[i] {
|
||||
@ -450,18 +453,18 @@ func (r *runner) Reference(t *testing.T, data tests.References) {
|
||||
func (r *runner) Rename(t *testing.T, data tests.Renames) {
|
||||
ctx := context.Background()
|
||||
for spn, newText := range data {
|
||||
uri := spn.URI()
|
||||
filename := uri.Filename()
|
||||
|
||||
f, err := r.view.GetFile(ctx, spn.URI())
|
||||
if err != nil {
|
||||
t.Fatalf("failed for %v: %v", spn, err)
|
||||
}
|
||||
|
||||
tok := f.GetToken(ctx)
|
||||
pos := tok.Pos(spn.Start().Offset())
|
||||
|
||||
changes, err := source.Rename(context.Background(), r.view, f.(source.GoFile), pos, newText)
|
||||
ident, err := source.Identifier(context.Background(), r.view, f.(source.GoFile), pos)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
changes, err := ident.Rename(context.Background(), newText)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
continue
|
||||
@ -472,9 +475,9 @@ func (r *runner) Rename(t *testing.T, data tests.Renames) {
|
||||
continue
|
||||
}
|
||||
|
||||
edits := changes[uri]
|
||||
edits := changes[spn.URI()]
|
||||
if edits == nil {
|
||||
t.Errorf("rename failed for %s, did not edit %s", newText, filename)
|
||||
t.Errorf("rename failed for %s, did not edit %s", newText, spn.URI())
|
||||
continue
|
||||
}
|
||||
data, _, err := f.Handle(ctx).Read(ctx)
|
||||
@ -485,7 +488,7 @@ func (r *runner) Rename(t *testing.T, data tests.Renames) {
|
||||
|
||||
got := applyEdits(string(data), edits)
|
||||
tag := fmt.Sprintf("%s-rename", newText)
|
||||
gorenamed := string(r.data.Golden(tag, filename, func() ([]byte, error) {
|
||||
gorenamed := string(r.data.Golden(tag, spn.URI().Filename(), func() ([]byte, error) {
|
||||
return []byte(got), nil
|
||||
}))
|
||||
|
||||
@ -527,8 +530,10 @@ func (r *runner) Symbol(t *testing.T, data tests.Symbols) {
|
||||
if err != nil {
|
||||
t.Fatalf("failed for %v: %v", uri, err)
|
||||
}
|
||||
symbols := source.DocumentSymbols(ctx, f.(source.GoFile))
|
||||
|
||||
symbols, err := source.DocumentSymbols(ctx, f.(source.GoFile))
|
||||
if err != nil {
|
||||
t.Errorf("symbols failed for %s: %v", uri, err)
|
||||
}
|
||||
if len(symbols) != len(expectedSymbols) {
|
||||
t.Errorf("want %d top-level symbols in %v, got %d", len(expectedSymbols), uri, len(symbols))
|
||||
continue
|
||||
|
@ -40,15 +40,15 @@ type Symbol struct {
|
||||
Children []Symbol
|
||||
}
|
||||
|
||||
func DocumentSymbols(ctx context.Context, f GoFile) []Symbol {
|
||||
func DocumentSymbols(ctx context.Context, f GoFile) ([]Symbol, error) {
|
||||
fset := f.FileSet()
|
||||
file := f.GetAST(ctx)
|
||||
if file == nil {
|
||||
return nil
|
||||
return nil, fmt.Errorf("no AST for %s", f.URI())
|
||||
}
|
||||
pkg := f.GetPackage(ctx)
|
||||
if pkg == nil || pkg.IsIllTyped() {
|
||||
return nil
|
||||
return nil, fmt.Errorf("no package for %s", f.URI())
|
||||
}
|
||||
info := pkg.GetTypesInfo()
|
||||
q := qualifier(file, pkg.GetTypes(), info)
|
||||
@ -102,8 +102,7 @@ func DocumentSymbols(ctx context.Context, f GoFile) []Symbol {
|
||||
symbols = append(symbols, methods...)
|
||||
}
|
||||
}
|
||||
|
||||
return symbols
|
||||
return symbols, nil
|
||||
}
|
||||
|
||||
func funcSymbol(decl *ast.FuncDecl, obj types.Object, fset *token.FileSet, q types.Qualifier) Symbol {
|
||||
|
@ -30,9 +30,12 @@ type FileHandle interface {
|
||||
// FileSystem returns the file system this handle was acquired from.
|
||||
FileSystem() FileSystem
|
||||
|
||||
// Return the Identity for the file.
|
||||
// Identity returns the FileIdentity for the file.
|
||||
Identity() FileIdentity
|
||||
|
||||
// Kind returns the FileKind for the file.
|
||||
Kind() FileKind
|
||||
|
||||
// Read reads the contents of a file and returns it along with its hash value.
|
||||
// If the file is not available, returns a nil slice and an error.
|
||||
Read(ctx context.Context) ([]byte, string, error)
|
||||
@ -44,6 +47,16 @@ type FileSystem interface {
|
||||
GetFile(uri span.URI) FileHandle
|
||||
}
|
||||
|
||||
// FileKind describes the kind of the file in question.
|
||||
// It can be one of Go, mod, or sum.
|
||||
type FileKind int
|
||||
|
||||
const (
|
||||
Go = FileKind(iota)
|
||||
Mod
|
||||
Sum
|
||||
)
|
||||
|
||||
// TokenHandle represents a handle to the *token.File for a file.
|
||||
type TokenHandle interface {
|
||||
// File returns a file handle for which to get the *token.File.
|
||||
|
@ -19,7 +19,10 @@ func (s *Server) documentSymbol(ctx context.Context, params *protocol.DocumentSy
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
symbols := source.DocumentSymbols(ctx, f)
|
||||
symbols, err := source.DocumentSymbols(ctx, f)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return toProtocolDocumentSymbols(m, symbols), nil
|
||||
}
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user