mirror of
https://github.com/golang/go
synced 2024-11-18 16:44:43 -07:00
internal/lsp: use memoize package to cache source.Packages
This change eliminates the need for the package cache map, and instead stores package type information in the store. We still have to maintain invalidation logic because the key is not computed correctly. Change-Id: I1c2a7502b99491ef0ff68d68c9f439503d531ff1 Reviewed-on: https://go-review.googlesource.com/c/tools/+/185438 Run-TryBot: Rebecca Stambler <rstambler@golang.org> TryBot-Result: Gobot Gobot <gobot@golang.org> Reviewed-by: Ian Cottrell <iancottrell@google.com>
This commit is contained in:
parent
41f3357785
commit
62ee03427e
333
internal/lsp/cache/check.go
vendored
333
internal/lsp/cache/check.go
vendored
@ -5,10 +5,10 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"go/ast"
|
||||
"go/scanner"
|
||||
"go/token"
|
||||
"go/types"
|
||||
"sync"
|
||||
|
||||
@ -18,95 +18,201 @@ import (
|
||||
"golang.org/x/tools/internal/lsp/telemetry"
|
||||
"golang.org/x/tools/internal/lsp/telemetry/log"
|
||||
"golang.org/x/tools/internal/lsp/telemetry/trace"
|
||||
"golang.org/x/tools/internal/span"
|
||||
"golang.org/x/tools/internal/memoize"
|
||||
errors "golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
type importer struct {
|
||||
view *view
|
||||
view *view
|
||||
ctx context.Context
|
||||
config *packages.Config
|
||||
|
||||
// seen maintains the set of previously imported packages.
|
||||
// If we have seen a package that is already in this map, we have a circular import.
|
||||
seen map[packageID]struct{}
|
||||
|
||||
// topLevelPkgID is the ID of the package from which type-checking began.
|
||||
topLevelPkgID packageID
|
||||
// topLevelPackageID is the ID of the package from which type-checking began.
|
||||
topLevelPackageID packageID
|
||||
|
||||
ctx context.Context
|
||||
fset *token.FileSet
|
||||
// parentPkg is the package that imports the current package.
|
||||
parentPkg *pkg
|
||||
|
||||
// parentCheckPackageHandle is the check package handle that imports the current package.
|
||||
parentCheckPackageHandle *checkPackageHandle
|
||||
}
|
||||
|
||||
func (imp *importer) Import(pkgPath string) (*types.Package, error) {
|
||||
ctx := imp.ctx
|
||||
id, ok := imp.view.mcache.ids[packagePath(pkgPath)]
|
||||
if !ok {
|
||||
return nil, errors.Errorf("no known ID for %s", pkgPath)
|
||||
// checkPackageKey uniquely identifies a package and its config.
|
||||
type checkPackageKey struct {
|
||||
files string
|
||||
config string
|
||||
|
||||
// TODO: For now, we don't include dependencies in the key.
|
||||
// This will be necessary when we change the cache invalidation logic.
|
||||
}
|
||||
|
||||
// checkPackageHandle implements source.CheckPackageHandle.
|
||||
type checkPackageHandle struct {
|
||||
handle *memoize.Handle
|
||||
|
||||
files []source.ParseGoHandle
|
||||
imports map[packagePath]*checkPackageHandle
|
||||
|
||||
m *metadata
|
||||
config *packages.Config
|
||||
}
|
||||
|
||||
// checkPackageData contains the data produced by type-checking a package.
|
||||
type checkPackageData struct {
|
||||
memoize.NoCopy
|
||||
|
||||
pkg *pkg
|
||||
err error
|
||||
}
|
||||
|
||||
func (pkg *pkg) GetImport(ctx context.Context, pkgPath string) (source.Package, error) {
|
||||
if imp := pkg.imports[packagePath(pkgPath)]; imp != nil {
|
||||
return imp, nil
|
||||
}
|
||||
pkg, err := imp.getPkg(ctx, id)
|
||||
// Don't return a nil pointer because that still satisfies the interface.
|
||||
return nil, errors.Errorf("no imported package for %s", pkgPath)
|
||||
}
|
||||
|
||||
// checkPackageHandle returns a source.CheckPackageHandle for a given package and config.
|
||||
func (imp *importer) checkPackageHandle(m *metadata) (*checkPackageHandle, error) {
|
||||
phs, err := imp.parseGoHandles(m)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return pkg.types, nil
|
||||
key := checkPackageKey{
|
||||
files: hashParseKeys(phs),
|
||||
config: hashConfig(imp.config),
|
||||
}
|
||||
cph := &checkPackageHandle{
|
||||
m: m,
|
||||
files: phs,
|
||||
config: imp.config,
|
||||
imports: make(map[packagePath]*checkPackageHandle),
|
||||
}
|
||||
h := imp.view.session.cache.store.Bind(key, func(ctx context.Context) interface{} {
|
||||
data := &checkPackageData{}
|
||||
data.pkg, data.err = func() (*pkg, error) {
|
||||
return imp.typeCheck(cph, m)
|
||||
}()
|
||||
return data
|
||||
})
|
||||
cph.handle = h
|
||||
return cph, nil
|
||||
}
|
||||
|
||||
func (imp *importer) getPkg(ctx context.Context, id packageID) (*pkg, error) {
|
||||
if _, ok := imp.seen[id]; ok {
|
||||
return nil, errors.Errorf("circular import detected")
|
||||
// hashConfig returns the hash for the *packages.Config.
|
||||
func hashConfig(config *packages.Config) string {
|
||||
b := bytes.NewBuffer(nil)
|
||||
|
||||
// Dir, Mode, Env, BuildFlags are the parts of the config that can change.
|
||||
b.WriteString(config.Dir)
|
||||
b.WriteString(string(config.Mode))
|
||||
|
||||
for _, e := range config.Env {
|
||||
b.WriteString(e)
|
||||
}
|
||||
imp.view.pcache.mu.Lock()
|
||||
e, ok := imp.view.pcache.packages[id]
|
||||
|
||||
if ok {
|
||||
// cache hit
|
||||
imp.view.pcache.mu.Unlock()
|
||||
// wait for entry to become ready
|
||||
<-e.ready
|
||||
} else {
|
||||
// cache miss
|
||||
e = &entry{ready: make(chan struct{})}
|
||||
imp.view.pcache.packages[id] = e
|
||||
imp.view.pcache.mu.Unlock()
|
||||
|
||||
// This goroutine becomes responsible for populating
|
||||
// the entry and broadcasting its readiness.
|
||||
e.pkg, e.err = imp.typeCheck(ctx, id)
|
||||
if e.err != nil {
|
||||
// Don't cache failed packages. If we didn't successfully cache the package
|
||||
// in each file, then this pcache entry won't get invalidated as those files
|
||||
// change.
|
||||
imp.view.pcache.mu.Lock()
|
||||
if imp.view.pcache.packages[id] == e {
|
||||
delete(imp.view.pcache.packages, id)
|
||||
}
|
||||
imp.view.pcache.mu.Unlock()
|
||||
}
|
||||
close(e.ready)
|
||||
for _, f := range config.BuildFlags {
|
||||
b.WriteString(f)
|
||||
}
|
||||
|
||||
if e.err != nil {
|
||||
// If the import had been previously canceled, and that error cached, try again.
|
||||
if e.err == context.Canceled && ctx.Err() == nil {
|
||||
return imp.getPkg(ctx, id)
|
||||
}
|
||||
return nil, e.err
|
||||
}
|
||||
|
||||
return e.pkg, nil
|
||||
return hashContents(b.Bytes())
|
||||
}
|
||||
|
||||
func (imp *importer) typeCheck(ctx context.Context, id packageID) (*pkg, error) {
|
||||
ctx, done := trace.StartSpan(ctx, "cache.importer.typeCheck", telemetry.Package.Of(id))
|
||||
defer done()
|
||||
meta, ok := imp.view.mcache.packages[id]
|
||||
func (cph *checkPackageHandle) Check(ctx context.Context) (source.Package, error) {
|
||||
return cph.check(ctx)
|
||||
}
|
||||
|
||||
func (cph *checkPackageHandle) check(ctx context.Context) (*pkg, error) {
|
||||
v := cph.handle.Get(ctx)
|
||||
if v == nil {
|
||||
return nil, ctx.Err()
|
||||
}
|
||||
data := v.(*checkPackageData)
|
||||
return data.pkg, data.err
|
||||
}
|
||||
|
||||
func (cph *checkPackageHandle) Config() *packages.Config {
|
||||
return cph.config
|
||||
}
|
||||
|
||||
func (cph *checkPackageHandle) Files() []source.ParseGoHandle {
|
||||
return cph.files
|
||||
}
|
||||
|
||||
func (cph *checkPackageHandle) Cached(ctx context.Context) (source.Package, error) {
|
||||
v := cph.handle.Cached()
|
||||
if v == nil {
|
||||
return nil, errors.Errorf("no cached value for %s", cph.m.pkgPath)
|
||||
}
|
||||
data := v.(*checkPackageData)
|
||||
return data.pkg, data.err
|
||||
}
|
||||
|
||||
func (imp *importer) parseGoHandles(m *metadata) ([]source.ParseGoHandle, error) {
|
||||
phs := make([]source.ParseGoHandle, 0, len(m.files))
|
||||
for _, uri := range m.files {
|
||||
// Call the unlocked version of getFile since we are holding the view's mutex.
|
||||
f, err := imp.view.GetFile(imp.ctx, uri)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
gof, ok := f.(*goFile)
|
||||
if !ok {
|
||||
return nil, errors.Errorf("%s is not a Go file", f.URI())
|
||||
}
|
||||
fh := gof.Handle(imp.ctx)
|
||||
mode := source.ParseExported
|
||||
if imp.topLevelPackageID == m.id {
|
||||
mode = source.ParseFull
|
||||
} else if imp.view.session.cache.store.Cached(parseKey{
|
||||
file: fh.Identity(),
|
||||
mode: source.ParseFull,
|
||||
}) != nil {
|
||||
// If we have the full AST cached, don't bother getting the trimmed version.
|
||||
mode = source.ParseFull
|
||||
}
|
||||
phs = append(phs, imp.view.session.cache.ParseGoHandle(fh, mode))
|
||||
}
|
||||
return phs, nil
|
||||
}
|
||||
|
||||
func (imp *importer) Import(pkgPath string) (*types.Package, error) {
|
||||
// We need to set the parent package's imports, so there should always be one.
|
||||
if imp.parentPkg == nil {
|
||||
return nil, errors.Errorf("no parent package for import %s", pkgPath)
|
||||
}
|
||||
// Get the package metadata from the importing package.
|
||||
cph, ok := imp.parentCheckPackageHandle.imports[packagePath(pkgPath)]
|
||||
if !ok {
|
||||
return nil, errors.Errorf("no metadata for %v", id)
|
||||
return nil, errors.Errorf("no package data for import path %s", pkgPath)
|
||||
}
|
||||
// Create a check package handle to get the type information for this package.
|
||||
pkg, err := cph.check(imp.ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
imp.parentPkg.imports[packagePath(pkgPath)] = pkg
|
||||
// Add every file in this package to our cache.
|
||||
if err := imp.cachePackage(cph, pkg, cph.m); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return pkg.GetTypes(), nil
|
||||
}
|
||||
|
||||
func (imp *importer) typeCheck(cph *checkPackageHandle, m *metadata) (*pkg, error) {
|
||||
ctx, done := trace.StartSpan(imp.ctx, "cache.importer.typeCheck")
|
||||
defer done()
|
||||
|
||||
pkg := &pkg{
|
||||
view: imp.view,
|
||||
id: meta.id,
|
||||
pkgPath: meta.pkgPath,
|
||||
id: m.id,
|
||||
pkgPath: m.pkgPath,
|
||||
files: cph.Files(),
|
||||
imports: make(map[packagePath]*pkg),
|
||||
typesSizes: meta.typesSizes,
|
||||
typesSizes: m.typesSizes,
|
||||
typesInfo: &types.Info{
|
||||
Types: make(map[ast.Expr]types.TypeAndValue),
|
||||
Defs: make(map[*ast.Ident]types.Object),
|
||||
@ -117,26 +223,26 @@ func (imp *importer) typeCheck(ctx context.Context, id packageID) (*pkg, error)
|
||||
},
|
||||
analyses: make(map[*analysis.Analyzer]*analysisEntry),
|
||||
}
|
||||
|
||||
// Ignore function bodies for any dependency packages.
|
||||
mode := source.ParseFull
|
||||
if imp.topLevelPkgID != pkg.id {
|
||||
mode = source.ParseExported
|
||||
// If the package comes back with errors from `go list`,
|
||||
// don't bother type-checking it.
|
||||
for _, err := range m.errors {
|
||||
pkg.errors = append(m.errors, err)
|
||||
}
|
||||
var (
|
||||
files = make([]*ast.File, len(meta.files))
|
||||
parseErrors = make([]error, len(meta.files))
|
||||
wg sync.WaitGroup
|
||||
)
|
||||
for _, filename := range meta.files {
|
||||
uri := span.FileURI(filename)
|
||||
f, err := imp.view.getFile(ctx, uri)
|
||||
// Set imports of package to correspond to cached packages.
|
||||
cimp := imp.child(pkg, cph)
|
||||
for _, child := range m.children {
|
||||
childHandle, err := cimp.checkPackageHandle(child)
|
||||
if err != nil {
|
||||
log.Error(ctx, "unable to get file", err, telemetry.File.Of(f.URI()))
|
||||
log.Error(imp.ctx, "no check package handle", err, telemetry.Package.Of(child.id))
|
||||
continue
|
||||
}
|
||||
pkg.files = append(pkg.files, imp.view.session.cache.ParseGoHandle(f.Handle(ctx), mode))
|
||||
cph.imports[child.pkgPath] = childHandle
|
||||
}
|
||||
var (
|
||||
files = make([]*ast.File, len(pkg.files))
|
||||
parseErrors = make([]error, len(pkg.files))
|
||||
wg sync.WaitGroup
|
||||
)
|
||||
for i, ph := range pkg.files {
|
||||
wg.Add(1)
|
||||
go func(i int, ph source.ParseGoHandle) {
|
||||
@ -166,86 +272,73 @@ func (imp *importer) typeCheck(ctx context.Context, id packageID) (*pkg, error)
|
||||
files = files[:i]
|
||||
|
||||
// Use the default type information for the unsafe package.
|
||||
if meta.pkgPath == "unsafe" {
|
||||
if m.pkgPath == "unsafe" {
|
||||
pkg.types = types.Unsafe
|
||||
} else if len(files) == 0 { // not the unsafe package, no parsed files
|
||||
return nil, errors.Errorf("no parsed files for package %s", pkg.pkgPath)
|
||||
} else {
|
||||
pkg.types = types.NewPackage(string(meta.pkgPath), meta.name)
|
||||
pkg.types = types.NewPackage(string(m.pkgPath), m.name)
|
||||
}
|
||||
|
||||
// Handle circular imports by copying previously seen imports.
|
||||
seen := make(map[packageID]struct{})
|
||||
for k, v := range imp.seen {
|
||||
seen[k] = v
|
||||
}
|
||||
seen[id] = struct{}{}
|
||||
|
||||
cfg := &types.Config{
|
||||
Error: func(err error) {
|
||||
imp.view.session.cache.appendPkgError(pkg, err)
|
||||
},
|
||||
IgnoreFuncBodies: mode == source.ParseExported,
|
||||
Importer: &importer{
|
||||
view: imp.view,
|
||||
ctx: ctx,
|
||||
fset: imp.fset,
|
||||
topLevelPkgID: imp.topLevelPkgID,
|
||||
seen: seen,
|
||||
},
|
||||
Importer: cimp,
|
||||
}
|
||||
check := types.NewChecker(cfg, imp.fset, pkg.types, pkg.typesInfo)
|
||||
check := types.NewChecker(cfg, imp.view.session.cache.FileSet(), pkg.types, pkg.typesInfo)
|
||||
|
||||
// Ignore type-checking errors.
|
||||
check.Files(files)
|
||||
|
||||
// Add every file in this package to our cache.
|
||||
if err := imp.cachePackage(ctx, pkg, meta, mode); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return pkg, nil
|
||||
}
|
||||
|
||||
func (imp *importer) cachePackage(ctx context.Context, pkg *pkg, meta *metadata, mode source.ParseMode) error {
|
||||
func (imp *importer) child(pkg *pkg, cph *checkPackageHandle) *importer {
|
||||
// Handle circular imports by copying previously seen imports.
|
||||
seen := make(map[packageID]struct{})
|
||||
for k, v := range imp.seen {
|
||||
seen[k] = v
|
||||
}
|
||||
seen[pkg.id] = struct{}{}
|
||||
return &importer{
|
||||
view: imp.view,
|
||||
ctx: imp.ctx,
|
||||
config: imp.config,
|
||||
seen: seen,
|
||||
topLevelPackageID: imp.topLevelPackageID,
|
||||
parentPkg: pkg,
|
||||
parentCheckPackageHandle: cph,
|
||||
}
|
||||
}
|
||||
|
||||
func (imp *importer) cachePackage(cph *checkPackageHandle, pkg *pkg, m *metadata) error {
|
||||
for _, ph := range pkg.files {
|
||||
uri := ph.File().Identity().URI
|
||||
f, err := imp.view.getFile(ctx, uri)
|
||||
f, err := imp.view.GetFile(imp.ctx, uri)
|
||||
if err != nil {
|
||||
return errors.Errorf("no such file %s: %v", uri, err)
|
||||
}
|
||||
gof, ok := f.(*goFile)
|
||||
if !ok {
|
||||
return errors.Errorf("non Go file %s", uri)
|
||||
return errors.Errorf("%s is not a Go file", uri)
|
||||
}
|
||||
if err := imp.cachePerFile(gof, ph, pkg); err != nil {
|
||||
if err := imp.cachePerFile(gof, ph, cph, m); err != nil {
|
||||
return errors.Errorf("failed to cache file %s: %v", gof.URI(), err)
|
||||
}
|
||||
}
|
||||
|
||||
// Set imports of package to correspond to cached packages.
|
||||
// We lock the package cache, but we shouldn't get any inconsistencies
|
||||
// because we are still holding the lock on the view.
|
||||
for importPath := range meta.children {
|
||||
importPkg, err := imp.getPkg(ctx, importPath)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
pkg.imports[importPkg.pkgPath] = importPkg
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (imp *importer) cachePerFile(gof *goFile, ph source.ParseGoHandle, p *pkg) error {
|
||||
func (imp *importer) cachePerFile(gof *goFile, ph source.ParseGoHandle, cph source.CheckPackageHandle, m *metadata) error {
|
||||
gof.mu.Lock()
|
||||
defer gof.mu.Unlock()
|
||||
|
||||
// Set the package even if we failed to parse the file.
|
||||
if gof.pkgs == nil {
|
||||
gof.pkgs = make(map[packageID]*pkg)
|
||||
gof.pkgs = make(map[packageID]source.CheckPackageHandle)
|
||||
}
|
||||
gof.pkgs[p.id] = p
|
||||
gof.pkgs[m.id] = cph
|
||||
|
||||
file, err := ph.Parse(imp.ctx)
|
||||
if file == nil {
|
||||
|
169
internal/lsp/cache/gofile.go
vendored
169
internal/lsp/cache/gofile.go
vendored
@ -36,7 +36,7 @@ type goFile struct {
|
||||
|
||||
imports []*ast.ImportSpec
|
||||
|
||||
pkgs map[packageID]*pkg
|
||||
pkgs map[packageID]source.CheckPackageHandle
|
||||
meta map[packageID]*metadata
|
||||
}
|
||||
|
||||
@ -61,14 +61,11 @@ func (f *goFile) GetToken(ctx context.Context) (*token.File, error) {
|
||||
}
|
||||
|
||||
func (f *goFile) GetAST(ctx context.Context, mode source.ParseMode) (*ast.File, error) {
|
||||
f.view.mu.Lock()
|
||||
defer f.view.mu.Unlock()
|
||||
|
||||
ctx = telemetry.File.With(ctx, f.URI())
|
||||
|
||||
if f.isDirty(ctx) || f.wrongParseMode(ctx, mode) {
|
||||
if _, err := f.view.loadParseTypecheck(ctx, f); err != nil {
|
||||
return nil, errors.Errorf("GetAST: unable to check package for %s: %v", f.URI(), err)
|
||||
if err := f.view.loadParseTypecheck(ctx, f); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
// Check for a cached AST first, in case getting a trimmed version would actually cause a re-parse.
|
||||
@ -100,66 +97,92 @@ func (cache *cache) cachedAST(fh source.FileHandle, mode source.ParseMode) (*ast
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (f *goFile) GetPackages(ctx context.Context) []source.Package {
|
||||
f.view.mu.Lock()
|
||||
defer f.view.mu.Unlock()
|
||||
func (f *goFile) GetPackages(ctx context.Context) ([]source.Package, error) {
|
||||
cphs, err := f.GetCheckPackageHandles(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var pkgs []source.Package
|
||||
for _, cph := range cphs {
|
||||
pkg, err := cph.Check(ctx)
|
||||
if err != nil {
|
||||
log.Error(ctx, "failed to check package", err)
|
||||
}
|
||||
pkgs = append(pkgs, pkg)
|
||||
}
|
||||
if len(pkgs) == 0 {
|
||||
return nil, errors.Errorf("no packages for %s", f.URI())
|
||||
}
|
||||
return pkgs, nil
|
||||
}
|
||||
|
||||
func (f *goFile) GetPackage(ctx context.Context) (source.Package, error) {
|
||||
cph, err := f.GetCheckPackageHandle(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return cph.Check(ctx)
|
||||
}
|
||||
|
||||
func (f *goFile) GetCheckPackageHandles(ctx context.Context) ([]source.CheckPackageHandle, error) {
|
||||
ctx = telemetry.File.With(ctx, f.URI())
|
||||
|
||||
if f.isDirty(ctx) || f.wrongParseMode(ctx, source.ParseFull) {
|
||||
if errs, err := f.view.loadParseTypecheck(ctx, f); err != nil {
|
||||
log.Error(ctx, "unable to check package", err, telemetry.File)
|
||||
|
||||
// Create diagnostics for errors if we are able to.
|
||||
if len(errs) > 0 {
|
||||
return []source.Package{&pkg{errors: errs}}
|
||||
}
|
||||
return nil
|
||||
if err := f.view.loadParseTypecheck(ctx, f); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
f.mu.Lock()
|
||||
defer f.mu.Unlock()
|
||||
|
||||
var pkgs []source.Package
|
||||
for _, pkg := range f.pkgs {
|
||||
pkgs = append(pkgs, pkg)
|
||||
var cphs []source.CheckPackageHandle
|
||||
for _, cph := range f.pkgs {
|
||||
cphs = append(cphs, cph)
|
||||
}
|
||||
return pkgs
|
||||
if len(cphs) == 0 {
|
||||
return nil, errors.Errorf("no CheckPackageHandles for %s", f.URI())
|
||||
}
|
||||
return cphs, nil
|
||||
}
|
||||
|
||||
func (f *goFile) GetPackage(ctx context.Context) source.Package {
|
||||
pkgs := f.GetPackages(ctx)
|
||||
var result source.Package
|
||||
|
||||
// Pick the "narrowest" package, i.e. the package with the fewest number of files.
|
||||
// This solves the problem of test variants,
|
||||
// as the test will have more files than the non-test package.
|
||||
for _, pkg := range pkgs {
|
||||
if result == nil || len(pkg.GetHandles()) < len(result.GetHandles()) {
|
||||
result = pkg
|
||||
}
|
||||
func (f *goFile) GetCheckPackageHandle(ctx context.Context) (source.CheckPackageHandle, error) {
|
||||
cphs, err := f.GetCheckPackageHandles(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return result
|
||||
return bestCheckPackageHandle(f.URI(), cphs)
|
||||
}
|
||||
|
||||
func (f *goFile) GetCachedPackage(ctx context.Context) (source.Package, error) {
|
||||
f.view.mu.Lock()
|
||||
defer f.view.mu.Unlock()
|
||||
|
||||
f.mu.Lock()
|
||||
defer f.mu.Unlock()
|
||||
var cphs []source.CheckPackageHandle
|
||||
for _, cph := range f.pkgs {
|
||||
cphs = append(cphs, cph)
|
||||
}
|
||||
f.mu.Unlock()
|
||||
|
||||
var result source.Package
|
||||
// Pick the "narrowest" package, i.e. the package with the fewest number of files.
|
||||
// This solves the problem of test variants,
|
||||
// as the test will have more files than the non-test package.
|
||||
for _, pkg := range f.pkgs {
|
||||
if result == nil || len(pkg.GetHandles()) < len(result.GetHandles()) {
|
||||
result = pkg
|
||||
cph, err := bestCheckPackageHandle(f.URI(), cphs)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return cph.Cached(ctx)
|
||||
}
|
||||
|
||||
// bestCheckPackageHandle picks the "narrowest" package for a given file.
|
||||
//
|
||||
// By "narrowest" package, we mean the package with the fewest number of files
|
||||
// that includes the given file. This solves the problem of test variants,
|
||||
// as the test will have more files than the non-test package.
|
||||
func bestCheckPackageHandle(uri span.URI, cphs []source.CheckPackageHandle) (source.CheckPackageHandle, error) {
|
||||
var result source.CheckPackageHandle
|
||||
for _, cph := range cphs {
|
||||
if result == nil || len(cph.Files()) < len(result.Files()) {
|
||||
result = cph
|
||||
}
|
||||
}
|
||||
if result == nil {
|
||||
return nil, errors.Errorf("no cached package for %s", f.URI())
|
||||
return nil, errors.Errorf("no CheckPackageHandle for %s", uri)
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
@ -169,14 +192,14 @@ func (f *goFile) wrongParseMode(ctx context.Context, mode source.ParseMode) bool
|
||||
defer f.mu.Unlock()
|
||||
|
||||
fh := f.Handle(ctx)
|
||||
for _, pkg := range f.pkgs {
|
||||
for _, ph := range pkg.files {
|
||||
for _, cph := range f.pkgs {
|
||||
for _, ph := range cph.Files() {
|
||||
if fh.Identity() == ph.File().Identity() {
|
||||
return ph.Mode() < mode
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
return true
|
||||
}
|
||||
|
||||
// isDirty is true if the file needs to be type-checked.
|
||||
@ -191,7 +214,7 @@ func (f *goFile) isDirty(ctx context.Context) bool {
|
||||
// Note: This must be the first case, otherwise we may not reset the value of f.justOpened.
|
||||
if f.justOpened {
|
||||
f.meta = make(map[packageID]*metadata)
|
||||
f.pkgs = make(map[packageID]*pkg)
|
||||
f.pkgs = make(map[packageID]source.CheckPackageHandle)
|
||||
f.justOpened = false
|
||||
return true
|
||||
}
|
||||
@ -202,8 +225,8 @@ func (f *goFile) isDirty(ctx context.Context) bool {
|
||||
return true
|
||||
}
|
||||
fh := f.Handle(ctx)
|
||||
for _, pkg := range f.pkgs {
|
||||
for _, file := range pkg.files {
|
||||
for _, cph := range f.pkgs {
|
||||
for _, file := range cph.Files() {
|
||||
// There is a type-checked package for the current file handle.
|
||||
if file.File().Identity() == fh.Identity() {
|
||||
return false
|
||||
@ -213,34 +236,40 @@ func (f *goFile) isDirty(ctx context.Context) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
func (f *goFile) GetActiveReverseDeps(ctx context.Context) []source.GoFile {
|
||||
pkg := f.GetPackage(ctx)
|
||||
if pkg == nil {
|
||||
func (f *goFile) GetActiveReverseDeps(ctx context.Context) (files []source.GoFile) {
|
||||
f.mu.Lock()
|
||||
meta := f.meta
|
||||
f.mu.Unlock()
|
||||
|
||||
if meta == nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
seen := make(map[packageID]struct{}) // visited packages
|
||||
results := make(map[*goFile]struct{})
|
||||
|
||||
f.view.mu.Lock()
|
||||
defer f.view.mu.Unlock()
|
||||
|
||||
f.view.mcache.mu.Lock()
|
||||
defer f.view.mcache.mu.Unlock()
|
||||
|
||||
id := packageID(pkg.ID())
|
||||
for _, m := range meta {
|
||||
f.view.reverseDeps(ctx, seen, results, m.id)
|
||||
for f := range results {
|
||||
if f == nil {
|
||||
continue
|
||||
}
|
||||
// Don't return any of the active files in this package.
|
||||
f.mu.Lock()
|
||||
_, ok := f.meta[m.id]
|
||||
f.mu.Unlock()
|
||||
if ok {
|
||||
continue
|
||||
}
|
||||
|
||||
seen := make(map[packageID]struct{}) // visited packages
|
||||
results := make(map[*goFile]struct{})
|
||||
f.view.reverseDeps(ctx, seen, results, id)
|
||||
|
||||
var files []source.GoFile
|
||||
for rd := range results {
|
||||
if rd == nil {
|
||||
continue
|
||||
files = append(files, f)
|
||||
}
|
||||
// Don't return any of the active files in this package.
|
||||
if _, ok := rd.pkgs[id]; ok {
|
||||
continue
|
||||
}
|
||||
files = append(files, rd)
|
||||
}
|
||||
return files
|
||||
}
|
||||
@ -254,8 +283,8 @@ func (v *view) reverseDeps(ctx context.Context, seen map[packageID]struct{}, res
|
||||
if !ok {
|
||||
return
|
||||
}
|
||||
for _, filename := range m.files {
|
||||
uri := span.FileURI(filename)
|
||||
for _, uri := range m.files {
|
||||
// Call unlocked version of getFile since we hold the lock on the view.
|
||||
if f, err := v.getFile(ctx, uri); err == nil && v.session.IsOpen(uri) {
|
||||
results[f.(*goFile)] = struct{}{}
|
||||
}
|
||||
|
254
internal/lsp/cache/load.go
vendored
254
internal/lsp/cache/load.go
vendored
@ -18,75 +18,69 @@ import (
|
||||
errors "golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
func (v *view) loadParseTypecheck(ctx context.Context, f *goFile) ([]packages.Error, error) {
|
||||
v.mcache.mu.Lock()
|
||||
defer v.mcache.mu.Unlock()
|
||||
func (view *view) loadParseTypecheck(ctx context.Context, f *goFile) error {
|
||||
pkgs, err := view.load(ctx, f)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, m := range pkgs {
|
||||
imp := &importer{
|
||||
view: view,
|
||||
ctx: ctx,
|
||||
config: view.Config(ctx),
|
||||
seen: make(map[packageID]struct{}),
|
||||
topLevelPackageID: m.id,
|
||||
}
|
||||
cph, err := imp.checkPackageHandle(m)
|
||||
if err != nil {
|
||||
log.Error(ctx, "failed to get CheckPackgeHandle", err)
|
||||
continue
|
||||
}
|
||||
pkg, err := cph.check(ctx)
|
||||
if err != nil {
|
||||
log.Error(ctx, "failed to check package", err)
|
||||
continue
|
||||
}
|
||||
// Cache this package on the file object, since all dependencies are cached in the Import function.
|
||||
imp.cachePackage(cph, pkg, m)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (view *view) load(ctx context.Context, f *goFile) (map[packageID]*metadata, error) {
|
||||
view.mu.Lock()
|
||||
defer view.mu.Unlock()
|
||||
|
||||
view.mcache.mu.Lock()
|
||||
defer view.mcache.mu.Unlock()
|
||||
|
||||
// If the AST for this file is trimmed, and we are explicitly type-checking it,
|
||||
// don't ignore function bodies.
|
||||
if f.wrongParseMode(ctx, source.ParseFull) {
|
||||
v.pcache.mu.Lock()
|
||||
f.invalidateAST(ctx)
|
||||
v.pcache.mu.Unlock()
|
||||
}
|
||||
|
||||
// Get the metadata for the file.
|
||||
meta, errs, err := v.checkMetadata(ctx, f)
|
||||
meta, err := view.checkMetadata(ctx, f)
|
||||
if err != nil {
|
||||
return errs, err
|
||||
return nil, err
|
||||
}
|
||||
if meta == nil {
|
||||
return nil, nil
|
||||
if len(f.meta) == 0 {
|
||||
return nil, fmt.Errorf("no package metadata found for %s", f.URI())
|
||||
}
|
||||
for id, m := range meta {
|
||||
imp := &importer{
|
||||
view: v,
|
||||
seen: make(map[packageID]struct{}),
|
||||
ctx: ctx,
|
||||
fset: v.session.cache.FileSet(),
|
||||
topLevelPkgID: id,
|
||||
}
|
||||
// Start prefetching direct imports.
|
||||
for importID := range m.children {
|
||||
go imp.getPkg(ctx, importID)
|
||||
}
|
||||
// Type-check package.
|
||||
pkg, err := imp.getPkg(ctx, imp.topLevelPkgID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if pkg == nil || pkg.IsIllTyped() {
|
||||
return nil, errors.Errorf("loadParseTypecheck: %s is ill typed", m.pkgPath)
|
||||
}
|
||||
}
|
||||
if len(f.pkgs) == 0 {
|
||||
return nil, errors.Errorf("no packages found for %s", f.URI())
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func sameSet(x, y map[packagePath]struct{}) bool {
|
||||
if len(x) != len(y) {
|
||||
return false
|
||||
}
|
||||
for k := range x {
|
||||
if _, ok := y[k]; !ok {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
return meta, nil
|
||||
}
|
||||
|
||||
// checkMetadata determines if we should run go/packages.Load for this file.
|
||||
// If yes, update the metadata for the file and its package.
|
||||
func (v *view) checkMetadata(ctx context.Context, f *goFile) (map[packageID]*metadata, []packages.Error, error) {
|
||||
if !v.runGopackages(ctx, f) {
|
||||
return f.meta, nil, nil
|
||||
func (v *view) checkMetadata(ctx context.Context, f *goFile) (map[packageID]*metadata, error) {
|
||||
if !v.shouldRunGopackages(ctx, f) {
|
||||
return f.meta, nil
|
||||
}
|
||||
|
||||
// Check if the context has been canceled before calling packages.Load.
|
||||
if ctx.Err() != nil {
|
||||
return nil, nil, ctx.Err()
|
||||
return nil, ctx.Err()
|
||||
}
|
||||
|
||||
ctx, done := trace.StartSpan(ctx, "packages.Load", telemetry.File.Of(f.filename()))
|
||||
@ -97,12 +91,7 @@ func (v *view) checkMetadata(ctx context.Context, f *goFile) (map[packageID]*met
|
||||
err = errors.Errorf("go/packages.Load: no packages found for %s", f.filename())
|
||||
}
|
||||
// Return this error as a diagnostic to the user.
|
||||
return nil, []packages.Error{
|
||||
{
|
||||
Msg: err.Error(),
|
||||
Kind: packages.UnknownError,
|
||||
},
|
||||
}, err
|
||||
return nil, err
|
||||
}
|
||||
// Track missing imports as we look at the package's errors.
|
||||
missingImports := make(map[packagePath]struct{})
|
||||
@ -110,21 +99,21 @@ func (v *view) checkMetadata(ctx context.Context, f *goFile) (map[packageID]*met
|
||||
log.Print(ctx, "go/packages.Load", tag.Of("packages", len(pkgs)))
|
||||
for _, pkg := range pkgs {
|
||||
log.Print(ctx, "go/packages.Load", tag.Of("package", pkg.PkgPath), tag.Of("files", pkg.CompiledGoFiles))
|
||||
// If the package comes back with errors from `go list`,
|
||||
// don't bother type-checking it.
|
||||
if len(pkg.Errors) > 0 {
|
||||
return nil, pkg.Errors, errors.Errorf("package %s has errors, skipping type-checking", pkg.PkgPath)
|
||||
}
|
||||
// Build the import graph for this package.
|
||||
if err := v.link(ctx, packagePath(pkg.PkgPath), pkg, nil, missingImports); err != nil {
|
||||
return nil, nil, err
|
||||
if err := v.link(ctx, &importGraph{
|
||||
pkgPath: packagePath(pkg.PkgPath),
|
||||
pkg: pkg,
|
||||
parent: nil,
|
||||
missingImports: make(map[packagePath]struct{}),
|
||||
}); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
m, err := validateMetadata(ctx, missingImports, f)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
return nil, err
|
||||
}
|
||||
return m, nil, nil
|
||||
return m, nil
|
||||
}
|
||||
|
||||
func validateMetadata(ctx context.Context, missingImports map[packagePath]struct{}, f *goFile) (map[packageID]*metadata, error) {
|
||||
@ -146,9 +135,21 @@ func validateMetadata(ctx context.Context, missingImports map[packagePath]struct
|
||||
return f.meta, nil
|
||||
}
|
||||
|
||||
func sameSet(x, y map[packagePath]struct{}) bool {
|
||||
if len(x) != len(y) {
|
||||
return false
|
||||
}
|
||||
for k := range x {
|
||||
if _, ok := y[k]; !ok {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
// reparseImports reparses a file's package and import declarations to
|
||||
// determine if they have changed.
|
||||
func (v *view) runGopackages(ctx context.Context, f *goFile) (result bool) {
|
||||
func (v *view) shouldRunGopackages(ctx context.Context, f *goFile) (result bool) {
|
||||
f.mu.Lock()
|
||||
defer func() {
|
||||
// Clear metadata if we are intending to re-run go/packages.
|
||||
@ -161,14 +162,12 @@ func (v *view) runGopackages(ctx context.Context, f *goFile) (result bool) {
|
||||
delete(f.pkgs, k)
|
||||
}
|
||||
}
|
||||
|
||||
f.mu.Unlock()
|
||||
}()
|
||||
|
||||
if len(f.meta) == 0 || len(f.missingImports) > 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
// Get file content in case we don't already have it.
|
||||
parsed, err := v.session.cache.ParseGoHandle(f.Handle(ctx), source.ParseHeader).Parse(ctx)
|
||||
if err == context.Canceled {
|
||||
@ -177,61 +176,49 @@ func (v *view) runGopackages(ctx context.Context, f *goFile) (result bool) {
|
||||
if parsed == nil {
|
||||
return true
|
||||
}
|
||||
|
||||
// Check if the package's name has changed, by checking if this is a filename
|
||||
// we already know about, and if so, check if its package name has changed.
|
||||
for _, m := range f.meta {
|
||||
for _, filename := range m.files {
|
||||
if filename == f.URI().Filename() {
|
||||
for _, uri := range m.files {
|
||||
if span.CompareURI(uri, f.URI()) == 0 {
|
||||
if m.name != parsed.Name.Name {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// If the package's imports have changed, re-run `go list`.
|
||||
if len(f.imports) != len(parsed.Imports) {
|
||||
return true
|
||||
}
|
||||
|
||||
for i, importSpec := range f.imports {
|
||||
if importSpec.Path.Value != parsed.Imports[i].Path.Value {
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
func (v *view) link(ctx context.Context, pkgPath packagePath, pkg *packages.Package, parent *metadata, missingImports map[packagePath]struct{}) error {
|
||||
id := packageID(pkg.ID)
|
||||
m, ok := v.mcache.packages[id]
|
||||
type importGraph struct {
|
||||
pkgPath packagePath
|
||||
pkg *packages.Package
|
||||
parent *metadata
|
||||
missingImports map[packagePath]struct{}
|
||||
}
|
||||
|
||||
// If a file was added or deleted we need to invalidate the package cache
|
||||
// so relevant packages get parsed and type-checked again.
|
||||
if ok && !filenamesIdentical(m.files, pkg.CompiledGoFiles) {
|
||||
v.pcache.mu.Lock()
|
||||
v.remove(ctx, id, make(map[packageID]struct{}))
|
||||
v.pcache.mu.Unlock()
|
||||
func (v *view) link(ctx context.Context, g *importGraph) error {
|
||||
// Recreate the metadata rather than reusing it to avoid locking.
|
||||
m := &metadata{
|
||||
id: packageID(g.pkg.ID),
|
||||
pkgPath: g.pkgPath,
|
||||
name: g.pkg.Name,
|
||||
typesSizes: g.pkg.TypesSizes,
|
||||
errors: g.pkg.Errors,
|
||||
}
|
||||
for _, filename := range g.pkg.CompiledGoFiles {
|
||||
m.files = append(m.files, span.FileURI(filename))
|
||||
|
||||
// If we haven't seen this package before.
|
||||
if !ok {
|
||||
m = &metadata{
|
||||
pkgPath: pkgPath,
|
||||
id: id,
|
||||
typesSizes: pkg.TypesSizes,
|
||||
parents: make(map[packageID]bool),
|
||||
children: make(map[packageID]bool),
|
||||
}
|
||||
v.mcache.packages[id] = m
|
||||
v.mcache.ids[pkgPath] = id
|
||||
}
|
||||
// Reset any field that could have changed across calls to packages.Load.
|
||||
m.name = pkg.Name
|
||||
m.files = pkg.CompiledGoFiles
|
||||
for _, filename := range m.files {
|
||||
// Call the unlocked version of getFile since we are holding the view's mutex.
|
||||
f, err := v.getFile(ctx, span.FileURI(filename))
|
||||
if err != nil {
|
||||
log.Error(ctx, "no file", err, telemetry.File.Of(filename))
|
||||
@ -247,57 +234,78 @@ func (v *view) link(ctx context.Context, pkgPath packagePath, pkg *packages.Pack
|
||||
}
|
||||
gof.meta[m.id] = m
|
||||
}
|
||||
// Connect the import graph.
|
||||
if parent != nil {
|
||||
m.parents[parent.id] = true
|
||||
parent.children[id] = true
|
||||
|
||||
// Preserve the import graph.
|
||||
if original, ok := v.mcache.packages[m.id]; ok {
|
||||
m.children = original.children
|
||||
m.parents = original.parents
|
||||
}
|
||||
for importPath, importPkg := range pkg.Imports {
|
||||
if m.children == nil {
|
||||
m.children = make(map[packageID]*metadata)
|
||||
}
|
||||
if m.parents == nil {
|
||||
m.parents = make(map[packageID]bool)
|
||||
}
|
||||
|
||||
// Add the metadata to the cache.
|
||||
v.mcache.packages[m.id] = m
|
||||
v.mcache.ids[g.pkgPath] = m.id
|
||||
|
||||
// Connect the import graph.
|
||||
if g.parent != nil {
|
||||
m.parents[g.parent.id] = true
|
||||
g.parent.children[m.id] = m
|
||||
}
|
||||
for importPath, importPkg := range g.pkg.Imports {
|
||||
importPkgPath := packagePath(importPath)
|
||||
if importPkgPath == pkgPath {
|
||||
return errors.Errorf("cycle detected in %s", importPath)
|
||||
if importPkgPath == g.pkgPath {
|
||||
return fmt.Errorf("cycle detected in %s", importPath)
|
||||
}
|
||||
// Don't remember any imports with significant errors.
|
||||
if importPkgPath != "unsafe" && len(pkg.CompiledGoFiles) == 0 {
|
||||
missingImports[importPkgPath] = struct{}{}
|
||||
if importPkgPath != "unsafe" && len(importPkg.CompiledGoFiles) == 0 {
|
||||
g.missingImports[importPkgPath] = struct{}{}
|
||||
continue
|
||||
}
|
||||
if _, ok := m.children[packageID(importPkg.ID)]; !ok {
|
||||
if err := v.link(ctx, importPkgPath, importPkg, m, missingImports); err != nil {
|
||||
log.Error(ctx, "error in dependency", err, telemetry.Package.Of(importPkgPath))
|
||||
if err := v.link(ctx, &importGraph{
|
||||
pkgPath: importPkgPath,
|
||||
pkg: importPkg,
|
||||
parent: m,
|
||||
missingImports: g.missingImports,
|
||||
}); err != nil {
|
||||
log.Error(ctx, "error in dependecny", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
// Clear out any imports that have been removed.
|
||||
// Clear out any imports that have been removed since the package was last loaded.
|
||||
for importID := range m.children {
|
||||
child, ok := v.mcache.packages[importID]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
importPath := string(child.pkgPath)
|
||||
if _, ok := pkg.Imports[importPath]; ok {
|
||||
if _, ok := g.pkg.Imports[importPath]; ok {
|
||||
continue
|
||||
}
|
||||
delete(m.children, importID)
|
||||
delete(child.parents, id)
|
||||
delete(child.parents, m.id)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// filenamesIdentical reports whether two sets of file names are identical.
|
||||
func filenamesIdentical(oldFiles, newFiles []string) bool {
|
||||
if len(oldFiles) != len(newFiles) {
|
||||
func identicalFileHandles(old, new []source.ParseGoHandle) bool {
|
||||
if len(old) != len(new) {
|
||||
return false
|
||||
}
|
||||
oldByName := make(map[string]struct{}, len(oldFiles))
|
||||
for _, filename := range oldFiles {
|
||||
oldByName[filename] = struct{}{}
|
||||
oldByIdentity := make(map[string]struct{}, len(old))
|
||||
for _, ph := range old {
|
||||
oldByIdentity[hashParseKey(ph)] = struct{}{}
|
||||
}
|
||||
for _, newFilename := range newFiles {
|
||||
if _, found := oldByName[newFilename]; !found {
|
||||
for _, ph := range new {
|
||||
if _, found := oldByIdentity[hashParseKey(ph)]; !found {
|
||||
return false
|
||||
}
|
||||
delete(oldByName, newFilename)
|
||||
delete(oldByIdentity, hashParseKey(ph))
|
||||
}
|
||||
return len(oldByName) == 0
|
||||
return len(oldByIdentity) == 0
|
||||
}
|
||||
|
16
internal/lsp/cache/parse.go
vendored
16
internal/lsp/cache/parse.go
vendored
@ -5,6 +5,7 @@
|
||||
package cache
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
@ -83,6 +84,21 @@ func (h *parseGoHandle) Cached(ctx context.Context) (*ast.File, error) {
|
||||
return data.ast, data.err
|
||||
}
|
||||
|
||||
func hashParseKey(ph source.ParseGoHandle) string {
|
||||
b := bytes.NewBuffer(nil)
|
||||
b.WriteString(ph.File().Identity().String())
|
||||
b.WriteString(string(ph.Mode()))
|
||||
return hashContents(b.Bytes())
|
||||
}
|
||||
|
||||
func hashParseKeys(phs []source.ParseGoHandle) string {
|
||||
b := bytes.NewBuffer(nil)
|
||||
for _, ph := range phs {
|
||||
b.WriteString(hashParseKey(ph))
|
||||
}
|
||||
return hashContents(b.Bytes())
|
||||
}
|
||||
|
||||
func parseGo(ctx context.Context, c *cache, fh source.FileHandle, mode source.ParseMode) (*ast.File, error) {
|
||||
ctx, done := trace.StartSpan(ctx, "cache.parseGo", telemetry.File.Of(fh.Identity().URI.Filename()))
|
||||
defer done()
|
||||
|
14
internal/lsp/cache/pkg.go
vendored
14
internal/lsp/cache/pkg.go
vendored
@ -125,9 +125,9 @@ func (pkg *pkg) GetActionGraph(ctx context.Context, a *analysis.Analyzer) (*sour
|
||||
}
|
||||
sort.Strings(importPaths) // for determinism
|
||||
for _, importPath := range importPaths {
|
||||
dep := pkg.GetImport(importPath)
|
||||
if dep == nil {
|
||||
continue
|
||||
dep, err := pkg.GetImport(ctx, importPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
act, err := dep.GetActionGraph(ctx, a)
|
||||
if err != nil {
|
||||
@ -184,14 +184,6 @@ func (pkg *pkg) IsIllTyped() bool {
|
||||
return pkg.types == nil && pkg.typesInfo == nil
|
||||
}
|
||||
|
||||
func (pkg *pkg) GetImport(pkgPath string) source.Package {
|
||||
if imp := pkg.imports[packagePath(pkgPath)]; imp != nil {
|
||||
return imp
|
||||
}
|
||||
// Don't return a nil pointer because that still satisfies the interface.
|
||||
return nil
|
||||
}
|
||||
|
||||
func (pkg *pkg) SetDiagnostics(diags []source.Diagnostic) {
|
||||
pkg.diagMu.Lock()
|
||||
defer pkg.diagMu.Unlock()
|
||||
|
3
internal/lsp/cache/session.go
vendored
3
internal/lsp/cache/session.go
vendored
@ -89,9 +89,6 @@ func (s *session) NewView(ctx context.Context, name string, folder span.URI) sou
|
||||
packages: make(map[packageID]*metadata),
|
||||
ids: make(map[packagePath]packageID),
|
||||
},
|
||||
pcache: &packageCache{
|
||||
packages: make(map[packageID]*entry),
|
||||
},
|
||||
ignoredURIs: make(map[span.URI]struct{}),
|
||||
}
|
||||
// Preemptively build the builtin package,
|
||||
|
56
internal/lsp/cache/view.go
vendored
56
internal/lsp/cache/view.go
vendored
@ -77,9 +77,6 @@ type view struct {
|
||||
// mcache caches metadata for the packages of the opened files in a view.
|
||||
mcache *metadataCache
|
||||
|
||||
// pcache caches type information for the packages of the opened files in a view.
|
||||
pcache *packageCache
|
||||
|
||||
// builtinPkg is the AST package used to resolve builtin types.
|
||||
builtinPkg *ast.Package
|
||||
|
||||
@ -95,23 +92,15 @@ type metadataCache struct {
|
||||
}
|
||||
|
||||
type metadata struct {
|
||||
id packageID
|
||||
pkgPath packagePath
|
||||
name string
|
||||
files []string
|
||||
typesSizes types.Sizes
|
||||
parents, children map[packageID]bool
|
||||
}
|
||||
|
||||
type packageCache struct {
|
||||
mu sync.Mutex
|
||||
packages map[packageID]*entry
|
||||
}
|
||||
|
||||
type entry struct {
|
||||
pkg *pkg
|
||||
err error
|
||||
ready chan struct{} // closed to broadcast ready condition
|
||||
id packageID
|
||||
pkgPath packagePath
|
||||
name string
|
||||
files []span.URI
|
||||
key string
|
||||
typesSizes types.Sizes
|
||||
parents map[packageID]bool
|
||||
children map[packageID]*metadata
|
||||
errors []packages.Error
|
||||
}
|
||||
|
||||
func (v *view) Session() source.Session {
|
||||
@ -363,9 +352,6 @@ func (f *goFile) invalidateContent(ctx context.Context) {
|
||||
f.view.mcache.mu.Lock()
|
||||
defer f.view.mcache.mu.Unlock()
|
||||
|
||||
f.view.pcache.mu.Lock()
|
||||
defer f.view.pcache.mu.Unlock()
|
||||
|
||||
f.handleMu.Lock()
|
||||
defer f.handleMu.Unlock()
|
||||
|
||||
@ -377,12 +363,12 @@ func (f *goFile) invalidateContent(ctx context.Context) {
|
||||
// including any position and type information that depends on it.
|
||||
func (f *goFile) invalidateAST(ctx context.Context) {
|
||||
f.mu.Lock()
|
||||
pkgs := f.pkgs
|
||||
cphs := f.pkgs
|
||||
f.mu.Unlock()
|
||||
|
||||
// Remove the package and all of its reverse dependencies from the cache.
|
||||
for id, pkg := range pkgs {
|
||||
if pkg != nil {
|
||||
for id, cph := range cphs {
|
||||
if cph != nil {
|
||||
f.view.remove(ctx, id, map[packageID]struct{}{})
|
||||
}
|
||||
}
|
||||
@ -405,8 +391,8 @@ func (v *view) remove(ctx context.Context, id packageID, seen map[packageID]stru
|
||||
}
|
||||
// All of the files in the package may also be holding a pointer to the
|
||||
// invalidated package.
|
||||
for _, filename := range m.files {
|
||||
f, err := v.findFile(span.FileURI(filename))
|
||||
for _, uri := range m.files {
|
||||
f, err := v.findFile(uri)
|
||||
if err != nil {
|
||||
log.Error(ctx, "cannot find file", err, telemetry.File.Of(f.URI()))
|
||||
continue
|
||||
@ -417,10 +403,15 @@ func (v *view) remove(ctx context.Context, id packageID, seen map[packageID]stru
|
||||
continue
|
||||
}
|
||||
gof.mu.Lock()
|
||||
if pkg, ok := gof.pkgs[id]; ok {
|
||||
// TODO: Ultimately, we shouldn't need this.
|
||||
// Preemptively delete all of the cached keys if we are invalidating a package.
|
||||
for _, ph := range pkg.files {
|
||||
// TODO: Ultimately, we shouldn't need this.
|
||||
if cph, ok := gof.pkgs[id]; ok {
|
||||
// Delete the package handle from the store.
|
||||
v.session.cache.store.Delete(checkPackageKey{
|
||||
files: hashParseKeys(cph.Files()),
|
||||
config: hashConfig(cph.Config()),
|
||||
})
|
||||
// Also, delete all of the cached ParseGoHandles.
|
||||
for _, ph := range cph.Files() {
|
||||
v.session.cache.store.Delete(parseKey{
|
||||
file: ph.File().Identity(),
|
||||
mode: ph.Mode(),
|
||||
@ -430,7 +421,6 @@ func (v *view) remove(ctx context.Context, id packageID, seen map[packageID]stru
|
||||
delete(gof.pkgs, id)
|
||||
gof.mu.Unlock()
|
||||
}
|
||||
delete(v.pcache.packages, id)
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -219,8 +219,11 @@ func quickFixes(ctx context.Context, view source.View, gof source.GoFile) ([]pro
|
||||
// TODO: This is technically racy because the diagnostics provided by the code action
|
||||
// may not be the same as the ones that gopls is aware of.
|
||||
// We need to figure out some way to solve this problem.
|
||||
diags := gof.GetPackage(ctx).GetDiagnostics()
|
||||
for _, diag := range diags {
|
||||
pkg, err := gof.GetPackage(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, diag := range pkg.GetDiagnostics() {
|
||||
pdiag, err := toProtocolDiagnostic(ctx, view, diag)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -23,7 +23,7 @@ import (
|
||||
errors "golang.org/x/xerrors"
|
||||
)
|
||||
|
||||
func analyze(ctx context.Context, v View, pkgs []Package, analyzers []*analysis.Analyzer) ([]*Action, error) {
|
||||
func analyze(ctx context.Context, v View, cphs []CheckPackageHandle, analyzers []*analysis.Analyzer) ([]*Action, error) {
|
||||
ctx, done := trace.StartSpan(ctx, "source.analyze")
|
||||
defer done()
|
||||
if ctx.Err() != nil {
|
||||
@ -33,7 +33,11 @@ func analyze(ctx context.Context, v View, pkgs []Package, analyzers []*analysis.
|
||||
// Build nodes for initial packages.
|
||||
var roots []*Action
|
||||
for _, a := range analyzers {
|
||||
for _, pkg := range pkgs {
|
||||
for _, cph := range cphs {
|
||||
pkg, err := cph.Check(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
root, err := pkg.GetActionGraph(ctx, a)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -285,11 +285,10 @@ func Completion(ctx context.Context, view View, f GoFile, pos token.Pos, opts Co
|
||||
if file == nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
pkg := f.GetPackage(ctx)
|
||||
if pkg == nil || pkg.IsIllTyped() {
|
||||
return nil, nil, errors.Errorf("package for %s is ill typed", f.URI())
|
||||
pkg, err := f.GetPackage(ctx)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
// Completion is based on what precedes the cursor.
|
||||
// Find the path to the position before pos.
|
||||
path, _ := astutil.PathEnclosingInterval(file, pos-1, pos-1)
|
||||
|
@ -240,6 +240,7 @@ func qualifier(f *ast.File, pkg *types.Package, info *types.Info) types.Qualifie
|
||||
if imp.Name != nil {
|
||||
obj = info.Defs[imp.Name]
|
||||
} else {
|
||||
|
||||
obj = info.Implicits[imp]
|
||||
}
|
||||
if pkgname, ok := obj.(*types.PkgName); ok {
|
||||
|
@ -64,8 +64,14 @@ const (
|
||||
func Diagnostics(ctx context.Context, view View, f GoFile, disabledAnalyses map[string]struct{}) (map[span.URI][]Diagnostic, error) {
|
||||
ctx, done := trace.StartSpan(ctx, "source.Diagnostics", telemetry.File.Of(f.URI()))
|
||||
defer done()
|
||||
pkg := f.GetPackage(ctx)
|
||||
if pkg == nil {
|
||||
|
||||
cph, err := f.GetCheckPackageHandle(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
pkg, err := cph.Check(ctx)
|
||||
if err != nil {
|
||||
log.Error(ctx, "no package for file", err)
|
||||
return singleDiagnostic(f.URI(), "%s is not part of a package", f.URI()), nil
|
||||
}
|
||||
// Prepare the reports we will send for the files in this package.
|
||||
@ -85,16 +91,16 @@ func Diagnostics(ctx context.Context, view View, f GoFile, disabledAnalyses map[
|
||||
// Run diagnostics for the package that this URI belongs to.
|
||||
if !diagnostics(ctx, view, pkg, reports) {
|
||||
// If we don't have any list, parse, or type errors, run analyses.
|
||||
if err := analyses(ctx, view, pkg, disabledAnalyses, reports); err != nil {
|
||||
log.Error(ctx, "failed to run analyses", err, telemetry.File)
|
||||
if err := analyses(ctx, view, cph, disabledAnalyses, reports); err != nil {
|
||||
log.Error(ctx, "failed to run analyses", err, telemetry.File.Of(f.URI()))
|
||||
}
|
||||
}
|
||||
// Updates to the diagnostics for this package may need to be propagated.
|
||||
revDeps := f.GetActiveReverseDeps(ctx)
|
||||
for _, f := range revDeps {
|
||||
pkg := f.GetPackage(ctx)
|
||||
if pkg == nil {
|
||||
continue
|
||||
pkg, err := f.GetPackage(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, fh := range pkg.GetHandles() {
|
||||
clearReports(view, reports, fh.File().Identity().URI)
|
||||
@ -158,9 +164,9 @@ func diagnostics(ctx context.Context, view View, pkg Package, reports map[span.U
|
||||
return nonEmptyDiagnostics
|
||||
}
|
||||
|
||||
func analyses(ctx context.Context, v View, pkg Package, disabledAnalyses map[string]struct{}, reports map[span.URI][]Diagnostic) error {
|
||||
func analyses(ctx context.Context, v View, cph CheckPackageHandle, disabledAnalyses map[string]struct{}, reports map[span.URI][]Diagnostic) error {
|
||||
// Type checking and parsing succeeded. Run analyses.
|
||||
if err := runAnalyses(ctx, v, pkg, disabledAnalyses, func(a *analysis.Analyzer, diag analysis.Diagnostic) error {
|
||||
if err := runAnalyses(ctx, v, cph, disabledAnalyses, func(a *analysis.Analyzer, diag analysis.Diagnostic) error {
|
||||
diagnostic, err := toDiagnostic(a, v, diag)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -310,7 +316,7 @@ var Analyzers = []*analysis.Analyzer{
|
||||
unusedresult.Analyzer,
|
||||
}
|
||||
|
||||
func runAnalyses(ctx context.Context, v View, pkg Package, disabledAnalyses map[string]struct{}, report func(a *analysis.Analyzer, diag analysis.Diagnostic) error) error {
|
||||
func runAnalyses(ctx context.Context, v View, cph CheckPackageHandle, disabledAnalyses map[string]struct{}, report func(a *analysis.Analyzer, diag analysis.Diagnostic) error) error {
|
||||
var analyzers []*analysis.Analyzer
|
||||
for _, a := range Analyzers {
|
||||
if _, ok := disabledAnalyses[a.Name]; ok {
|
||||
@ -319,7 +325,7 @@ func runAnalyses(ctx context.Context, v View, pkg Package, disabledAnalyses map[
|
||||
analyzers = append(analyzers, a)
|
||||
}
|
||||
|
||||
roots, err := analyze(ctx, v, []Package{pkg}, analyzers)
|
||||
roots, err := analyze(ctx, v, []CheckPackageHandle{cph}, analyzers)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -342,6 +348,10 @@ func runAnalyses(ctx context.Context, v View, pkg Package, disabledAnalyses map[
|
||||
}
|
||||
sdiags = append(sdiags, sdiag)
|
||||
}
|
||||
pkg, err := cph.Check(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pkg.SetDiagnostics(sdiags)
|
||||
}
|
||||
return nil
|
||||
|
@ -29,7 +29,10 @@ func Format(ctx context.Context, f GoFile, rng span.Range) ([]TextEdit, error) {
|
||||
if file == nil {
|
||||
return nil, err
|
||||
}
|
||||
pkg := f.GetPackage(ctx)
|
||||
pkg, err := f.GetPackage(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if hasListErrors(pkg.GetErrors()) || hasParseErrors(pkg, f.URI()) {
|
||||
// Even if this package has list or parse errors, this file may not
|
||||
// have any parse errors and can still be formatted. Using format.Node
|
||||
@ -78,9 +81,9 @@ func Imports(ctx context.Context, view View, f GoFile, rng span.Range) ([]TextEd
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
pkg := f.GetPackage(ctx)
|
||||
if pkg == nil || pkg.IsIllTyped() {
|
||||
return nil, errors.Errorf("no package for file %s", f.URI())
|
||||
pkg, err := f.GetPackage(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if hasListErrors(pkg.GetErrors()) {
|
||||
return nil, errors.Errorf("%s has list errors, not running goimports", f.URI())
|
||||
@ -123,14 +126,13 @@ func AllImportsFixes(ctx context.Context, view View, f GoFile, rng span.Range) (
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
pkg := f.GetPackage(ctx)
|
||||
if pkg == nil || pkg.IsIllTyped() {
|
||||
return nil, nil, errors.Errorf("no package for file %s", f.URI())
|
||||
pkg, err := f.GetPackage(ctx)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
if hasListErrors(pkg.GetErrors()) {
|
||||
return nil, nil, errors.Errorf("%s has list errors, not running goimports", f.URI())
|
||||
}
|
||||
|
||||
options := &imports.Options{
|
||||
// Defaults.
|
||||
AllErrors: true,
|
||||
|
@ -48,14 +48,11 @@ func (i *IdentifierInfo) DeclarationRange() span.Range {
|
||||
// Identifier returns identifier information for a position
|
||||
// in a file, accounting for a potentially incomplete selector.
|
||||
func Identifier(ctx context.Context, f GoFile, pos token.Pos) (*IdentifierInfo, error) {
|
||||
pkg := f.GetPackage(ctx)
|
||||
if pkg == nil || pkg.IsIllTyped() {
|
||||
return nil, errors.Errorf("pkg for %s is ill-typed", f.URI())
|
||||
pkg, err := f.GetPackage(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
var (
|
||||
file *ast.File
|
||||
err error
|
||||
)
|
||||
var file *ast.File
|
||||
for _, ph := range pkg.GetHandles() {
|
||||
if ph.File().Identity().URI == f.URI() {
|
||||
file, err = ph.Cached(ctx)
|
||||
@ -303,9 +300,9 @@ func importSpec(ctx context.Context, f GoFile, fAST *ast.File, pkg Package, pos
|
||||
pkg: pkg,
|
||||
}
|
||||
// Consider the "declaration" of an import spec to be the imported package.
|
||||
importedPkg := pkg.GetImport(importPath)
|
||||
if importedPkg == nil {
|
||||
return nil, errors.Errorf("no import for %q", importPath)
|
||||
importedPkg, err := pkg.GetImport(ctx, importPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if importedPkg.GetSyntax(ctx) == nil {
|
||||
return nil, errors.Errorf("no syntax for for %q", importPath)
|
||||
|
@ -36,11 +36,11 @@ func (i *IdentifierInfo) References(ctx context.Context) ([]*ReferenceInfo, erro
|
||||
return nil, errors.Errorf("no references for an import spec")
|
||||
}
|
||||
|
||||
pkgs := i.File.GetPackages(ctx)
|
||||
pkgs, err := i.File.GetPackages(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, pkg := range pkgs {
|
||||
if pkg == nil || pkg.IsIllTyped() {
|
||||
return nil, errors.Errorf("package for %s is ill typed", i.File.URI())
|
||||
}
|
||||
info := pkg.GetTypesInfo()
|
||||
if info == nil {
|
||||
return nil, errors.Errorf("package %s has no types info", pkg.PkgPath())
|
||||
|
@ -46,14 +46,13 @@ func (i *IdentifierInfo) Rename(ctx context.Context, newName string) (map[span.U
|
||||
if !isValidIdentifier(i.Name) {
|
||||
return nil, errors.Errorf("invalid identifier to rename: %q", i.Name)
|
||||
}
|
||||
|
||||
if i.pkg == nil || i.pkg.IsIllTyped() {
|
||||
return nil, errors.Errorf("package for %s is ill typed", i.File.URI())
|
||||
}
|
||||
// Do not rename builtin identifiers.
|
||||
if i.decl.obj.Parent() == types.Universe {
|
||||
return nil, errors.Errorf("cannot rename builtin %q", i.Name)
|
||||
}
|
||||
if i.pkg == nil || i.pkg.IsIllTyped() {
|
||||
return nil, errors.Errorf("package for %s is ill typed", i.File.URI())
|
||||
}
|
||||
// Do not rename identifiers declared in another package.
|
||||
if i.pkg.GetTypes() != i.decl.obj.Pkg() {
|
||||
return nil, errors.Errorf("failed to rename because %q is declared in package %q", i.Name, i.decl.obj.Pkg().Name())
|
||||
@ -75,7 +74,7 @@ func (i *IdentifierInfo) Rename(ctx context.Context, newName string) (map[span.U
|
||||
packages: make(map[*types.Package]Package),
|
||||
}
|
||||
for _, from := range refs {
|
||||
r.packages[from.pkg.GetTypes()] = from.pkg
|
||||
r.packages[i.pkg.GetTypes()] = from.pkg
|
||||
}
|
||||
|
||||
// Check that the renaming of the identifier is ok.
|
||||
@ -198,9 +197,8 @@ func (r *renamer) updatePkgName(pkgName *types.PkgName) (*TextEdit, error) {
|
||||
// Modify ImportSpec syntax to add or remove the Name as needed.
|
||||
pkg := r.packages[pkgName.Pkg()]
|
||||
_, path, _ := pathEnclosingInterval(r.ctx, r.fset, pkg, pkgName.Pos(), pkgName.Pos())
|
||||
|
||||
if len(path) < 2 {
|
||||
return nil, errors.Errorf("failed to update PkgName for %s", pkgName.Name())
|
||||
return nil, errors.Errorf("no path enclosing interval for %s", pkgName.Name())
|
||||
}
|
||||
spec, ok := path[1].(*ast.ImportSpec)
|
||||
if !ok {
|
||||
|
@ -13,6 +13,7 @@ import (
|
||||
"go/token"
|
||||
"go/types"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
@ -91,6 +92,9 @@ func (r *renamer) checkInPackageBlock(from types.Object) {
|
||||
}
|
||||
|
||||
pkg := r.packages[from.Pkg()]
|
||||
if pkg == nil {
|
||||
return
|
||||
}
|
||||
|
||||
// Check that in the package block, "init" is a function, and never referenced.
|
||||
if r.to == "init" {
|
||||
@ -204,7 +208,6 @@ func (r *renamer) checkInLexicalScope(from types.Object, pkg Package) {
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// Check for sub-block conflict.
|
||||
// Is there an intervening definition of r.to between
|
||||
// the block defining 'from' and some reference to it?
|
||||
@ -381,6 +384,9 @@ func (r *renamer) checkStructField(from *types.Var) {
|
||||
// method) to its declaring struct (or interface), so we must
|
||||
// ascend the AST.
|
||||
pkg, path, _ := pathEnclosingInterval(r.ctx, r.fset, r.packages[from.Pkg()], from.Pos(), from.Pos())
|
||||
if pkg == nil || path == nil {
|
||||
return
|
||||
}
|
||||
// path matches this pattern:
|
||||
// [Ident SelectorExpr? StarExpr? Field FieldList StructType ParenExpr* ... File]
|
||||
|
||||
@ -840,11 +846,15 @@ func pathEnclosingInterval(ctx context.Context, fset *token.FileSet, pkg Package
|
||||
if imp == nil {
|
||||
continue
|
||||
}
|
||||
impPkg := pkg.GetImport(imp.Path.Value)
|
||||
if impPkg == nil {
|
||||
importPath, err := strconv.Unquote(imp.Path.Value)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
pkgs = append(pkgs, impPkg)
|
||||
importPkg, err := pkg.GetImport(ctx, importPath)
|
||||
if err != nil {
|
||||
return nil, nil, false
|
||||
}
|
||||
pkgs = append(pkgs, importPkg)
|
||||
}
|
||||
}
|
||||
for _, p := range pkgs {
|
||||
|
@ -34,11 +34,10 @@ func SignatureHelp(ctx context.Context, f GoFile, pos token.Pos) (*SignatureInfo
|
||||
if file == nil {
|
||||
return nil, err
|
||||
}
|
||||
pkg := f.GetPackage(ctx)
|
||||
if pkg == nil || pkg.IsIllTyped() {
|
||||
return nil, errors.Errorf("package for %s is ill typed", f.URI())
|
||||
pkg, err := f.GetPackage(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Find a call expression surrounding the query position.
|
||||
var callExpr *ast.CallExpr
|
||||
path, _ := astutil.PathEnclosingInterval(file, pos, pos)
|
||||
|
@ -532,7 +532,6 @@ func (r *runner) Rename(t *testing.T, data tests.Renames) {
|
||||
t.Fatalf("failed to get token for %s: %v", spn.URI(), err)
|
||||
}
|
||||
pos := tok.Pos(spn.Start().Offset())
|
||||
|
||||
ident, err := source.Identifier(r.ctx, f.(source.GoFile), pos)
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
|
@ -50,9 +50,9 @@ func DocumentSymbols(ctx context.Context, f GoFile) ([]Symbol, error) {
|
||||
if file == nil {
|
||||
return nil, err
|
||||
}
|
||||
pkg := f.GetPackage(ctx)
|
||||
if pkg == nil || pkg.IsIllTyped() {
|
||||
return nil, errors.Errorf("no package for %s", f.URI())
|
||||
pkg, err := f.GetPackage(ctx)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
info := pkg.GetTypesInfo()
|
||||
q := qualifier(file, pkg.GetTypes(), info)
|
||||
|
@ -6,6 +6,7 @@ package source
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/token"
|
||||
"go/types"
|
||||
@ -25,6 +26,10 @@ type FileIdentity struct {
|
||||
Version string
|
||||
}
|
||||
|
||||
func (identity FileIdentity) String() string {
|
||||
return fmt.Sprintf("%s%s", identity.URI, identity.Version)
|
||||
}
|
||||
|
||||
// FileHandle represents a handle to a specific version of a single file from
|
||||
// a specific file system.
|
||||
type FileHandle interface {
|
||||
@ -103,6 +108,22 @@ const (
|
||||
ParseFull
|
||||
)
|
||||
|
||||
// CheckPackageHandle represents a handle to a specific version of a package.
|
||||
// It is uniquely defined by the file handles that make up the package.
|
||||
type CheckPackageHandle interface {
|
||||
// ParseGoHandle returns a ParseGoHandle for which to get the package.
|
||||
Files() []ParseGoHandle
|
||||
|
||||
// Config is the *packages.Config that the package metadata was loaded with.
|
||||
Config() *packages.Config
|
||||
|
||||
// Check returns the type-checked Package for the CheckPackageHandle.
|
||||
Check(ctx context.Context) (Package, error)
|
||||
|
||||
// Cached returns the Package for the CheckPackageHandle if it has already been stored.
|
||||
Cached(ctx context.Context) (Package, error)
|
||||
}
|
||||
|
||||
// Cache abstracts the core logic of dealing with the environment from the
|
||||
// higher level logic that processes the information to produce results.
|
||||
// The cache provides access to files and their contents, so the source
|
||||
@ -120,10 +141,10 @@ type Cache interface {
|
||||
// FileSet returns the shared fileset used by all files in the system.
|
||||
FileSet() *token.FileSet
|
||||
|
||||
// Token returns a TokenHandle for the given file handle.
|
||||
// TokenHandle returns a TokenHandle for the given file handle.
|
||||
TokenHandle(fh FileHandle) TokenHandle
|
||||
|
||||
// ParseGo returns a ParseGoHandle for the given file handle.
|
||||
// ParseGoHandle returns a ParseGoHandle for the given file handle.
|
||||
ParseGoHandle(fh FileHandle, mode ParseMode) ParseGoHandle
|
||||
}
|
||||
|
||||
@ -237,11 +258,17 @@ type GoFile interface {
|
||||
// GetCachedPackage returns the cached package for the file, if any.
|
||||
GetCachedPackage(ctx context.Context) (Package, error)
|
||||
|
||||
// GetPackage returns the package that this file belongs to.
|
||||
GetPackage(ctx context.Context) Package
|
||||
// GetPackage returns the CheckPackageHandle for the package that this file belongs to.
|
||||
GetCheckPackageHandle(ctx context.Context) (CheckPackageHandle, error)
|
||||
|
||||
// GetPackages returns all of the packages that this file belongs to.
|
||||
GetPackages(ctx context.Context) []Package
|
||||
// GetPackages returns the CheckPackageHandles of the packages that this file belongs to.
|
||||
GetCheckPackageHandles(ctx context.Context) ([]CheckPackageHandle, error)
|
||||
|
||||
// GetPackage returns the CheckPackageHandle for the package that this file belongs to.
|
||||
GetPackage(ctx context.Context) (Package, error)
|
||||
|
||||
// GetPackages returns the CheckPackageHandles of the packages that this file belongs to.
|
||||
GetPackages(ctx context.Context) ([]Package, error)
|
||||
|
||||
// GetActiveReverseDeps returns the active files belonging to the reverse
|
||||
// dependencies of this file's package.
|
||||
@ -268,10 +295,14 @@ type Package interface {
|
||||
GetTypesInfo() *types.Info
|
||||
GetTypesSizes() types.Sizes
|
||||
IsIllTyped() bool
|
||||
GetActionGraph(ctx context.Context, a *analysis.Analyzer) (*Action, error)
|
||||
GetImport(pkgPath string) Package
|
||||
GetDiagnostics() []Diagnostic
|
||||
SetDiagnostics(diags []Diagnostic)
|
||||
|
||||
// GetImport returns the CheckPackageHandle for a package imported by this package.
|
||||
GetImport(ctx context.Context, pkgPath string) (Package, error)
|
||||
|
||||
// GetActionGraph returns the action graph for the given package.
|
||||
GetActionGraph(ctx context.Context, a *analysis.Analyzer) (*Action, error)
|
||||
}
|
||||
|
||||
// TextEdit represents a change to a section of a document.
|
||||
|
@ -164,10 +164,9 @@ func (s *Server) didClose(ctx context.Context, params *protocol.DidCloseTextDocu
|
||||
log.Error(ctx, "closing a non-Go file, no diagnostics to clear", nil, telemetry.File)
|
||||
return nil
|
||||
}
|
||||
pkg := gof.GetPackage(ctx)
|
||||
if pkg == nil {
|
||||
log.Error(ctx, "no package available", nil, telemetry.File)
|
||||
return nil
|
||||
pkg, err := gof.GetPackage(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, ph := range pkg.GetHandles() {
|
||||
// If other files from this package are open, don't clear.
|
||||
|
Loading…
Reference in New Issue
Block a user