mirror of
https://github.com/golang/go
synced 2024-11-18 13:34:41 -07:00
internal/lsp: cache analysis using memoize package
This change moves to the approach of caching the analysis using the memoize package. This means that we will do less work, as we no longer need to recompute results that are unchanged. The cache key for an analysis is simply the key of the CheckPackageHandle, along with the name of the analyzer. Change-Id: I0e589ccf088ff1de5670401b7207ffa77a254ceb Reviewed-on: https://go-review.googlesource.com/c/tools/+/200817 Run-TryBot: Rebecca Stambler <rstambler@golang.org> Reviewed-by: Ian Cottrell <iancottrell@google.com>
This commit is contained in:
parent
5fa5b1782b
commit
b53505e708
@ -1,10 +1,4 @@
|
|||||||
// Copyright 2019 The Go Authors. All rights reserved.
|
package cache
|
||||||
// Use of this source code is governed by a BSD-style
|
|
||||||
// license that can be found in the LICENSE file.
|
|
||||||
|
|
||||||
// This file is largely based on go/analysis/internal/checker/checker.go.
|
|
||||||
|
|
||||||
package source
|
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
@ -13,69 +7,65 @@ import (
|
|||||||
"go/types"
|
"go/types"
|
||||||
"reflect"
|
"reflect"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
|
||||||
"sync"
|
"sync"
|
||||||
"time"
|
|
||||||
|
|
||||||
"golang.org/x/sync/errgroup"
|
"golang.org/x/sync/errgroup"
|
||||||
"golang.org/x/tools/go/analysis"
|
"golang.org/x/tools/go/analysis"
|
||||||
"golang.org/x/tools/internal/telemetry/trace"
|
"golang.org/x/tools/internal/lsp/source"
|
||||||
|
"golang.org/x/tools/internal/memoize"
|
||||||
|
"golang.org/x/tools/internal/telemetry/log"
|
||||||
errors "golang.org/x/xerrors"
|
errors "golang.org/x/xerrors"
|
||||||
)
|
)
|
||||||
|
|
||||||
func analyze(ctx context.Context, v View, cphs []CheckPackageHandle, analyzers []*analysis.Analyzer) ([]*Action, error) {
|
func (s *snapshot) Analyze(ctx context.Context, id string, analyzers []*analysis.Analyzer) (map[*analysis.Analyzer][]*analysis.Diagnostic, error) {
|
||||||
ctx, done := trace.StartSpan(ctx, "source.analyze")
|
var roots []*actionHandle
|
||||||
defer done()
|
|
||||||
|
|
||||||
|
for _, a := range analyzers {
|
||||||
|
ah, err := s.actionHandle(ctx, packageID(id), source.ParseFull, a)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
ah.isroot = true
|
||||||
|
roots = append(roots, ah)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if the context has been canceled before running the analyses.
|
||||||
if ctx.Err() != nil {
|
if ctx.Err() != nil {
|
||||||
return nil, ctx.Err()
|
return nil, ctx.Err()
|
||||||
}
|
}
|
||||||
|
|
||||||
// Build nodes for initial packages.
|
results := make(map[*analysis.Analyzer][]*analysis.Diagnostic)
|
||||||
var roots []*Action
|
for _, ah := range roots {
|
||||||
for _, a := range analyzers {
|
diagnostics, _, err := ah.analyze(ctx)
|
||||||
for _, cph := range cphs {
|
if err != nil {
|
||||||
pkg, err := cph.Check(ctx)
|
log.Error(ctx, "no results", err)
|
||||||
if err != nil {
|
continue
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
root, err := pkg.GetActionGraph(ctx, a)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
root.isroot = true
|
|
||||||
root.view = v
|
|
||||||
roots = append(roots, root)
|
|
||||||
}
|
}
|
||||||
|
results[ah.analyzer] = diagnostics
|
||||||
}
|
}
|
||||||
|
return results, nil
|
||||||
// Execute the graph in parallel.
|
|
||||||
if err := execAll(ctx, v.Session().Cache().FileSet(), roots); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return roots, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// An action represents one unit of analysis work: the application of
|
// An action represents one unit of analysis work: the application of
|
||||||
// one analysis to one package. Actions form a DAG, both within a
|
// one analysis to one package. Actions form a DAG, both within a
|
||||||
// package (as different analyzers are applied, either in sequence or
|
// package (as different analyzers are applied, either in sequence or
|
||||||
// parallel), and across packages (as dependencies are analyzed).
|
// parallel), and across packages (as dependencies are analyzed).
|
||||||
type Action struct {
|
type actionHandle struct {
|
||||||
once sync.Once
|
handle *memoize.Handle
|
||||||
Analyzer *analysis.Analyzer
|
|
||||||
Pkg Package
|
|
||||||
Deps []*Action
|
|
||||||
diagnostics []analysis.Diagnostic
|
|
||||||
|
|
||||||
|
analyzer *analysis.Analyzer
|
||||||
|
deps []*actionHandle
|
||||||
|
pkg *pkg
|
||||||
pass *analysis.Pass
|
pass *analysis.Pass
|
||||||
isroot bool
|
isroot bool
|
||||||
objectFacts map[objectFactKey]analysis.Fact
|
objectFacts map[objectFactKey]analysis.Fact
|
||||||
packageFacts map[packageFactKey]analysis.Fact
|
packageFacts map[packageFactKey]analysis.Fact
|
||||||
inputs map[*analysis.Analyzer]interface{}
|
}
|
||||||
result interface{}
|
|
||||||
err error
|
type actionData struct {
|
||||||
duration time.Duration
|
diagnostics []*analysis.Diagnostic
|
||||||
view View
|
result interface{}
|
||||||
|
err error
|
||||||
}
|
}
|
||||||
|
|
||||||
type objectFactKey struct {
|
type objectFactKey struct {
|
||||||
@ -88,46 +78,105 @@ type packageFactKey struct {
|
|||||||
typ reflect.Type
|
typ reflect.Type
|
||||||
}
|
}
|
||||||
|
|
||||||
func (act *Action) String() string {
|
func (s *snapshot) actionHandle(ctx context.Context, id packageID, mode source.ParseMode, a *analysis.Analyzer) (*actionHandle, error) {
|
||||||
return fmt.Sprintf("%s@%s", act.Analyzer, act.Pkg.PkgPath())
|
cph := s.getPackage(id, mode)
|
||||||
|
if cph == nil {
|
||||||
|
return nil, errors.Errorf("no CheckPackageHandle for %s:%v", id, mode == source.ParseExported)
|
||||||
|
}
|
||||||
|
if len(cph.key) == 0 {
|
||||||
|
return nil, errors.Errorf("no key for CheckPackageHandle %s", id)
|
||||||
|
}
|
||||||
|
pkg, err := cph.check(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
ah := &actionHandle{
|
||||||
|
analyzer: a,
|
||||||
|
pkg: pkg,
|
||||||
|
}
|
||||||
|
// Add a dependency on each required analyzers.
|
||||||
|
for _, req := range a.Requires {
|
||||||
|
reqActionHandle, err := s.actionHandle(ctx, id, mode, req)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
ah.deps = append(ah.deps, reqActionHandle)
|
||||||
|
}
|
||||||
|
// An analysis that consumes/produces facts
|
||||||
|
// must run on the package's dependencies too.
|
||||||
|
if len(a.FactTypes) > 0 {
|
||||||
|
importIDs := make([]string, 0, len(cph.m.deps))
|
||||||
|
for _, importID := range cph.m.deps {
|
||||||
|
importIDs = append(importIDs, string(importID))
|
||||||
|
}
|
||||||
|
sort.Strings(importIDs) // for determinism
|
||||||
|
for _, importID := range importIDs {
|
||||||
|
depActionHandle, err := s.actionHandle(ctx, packageID(importID), source.ParseExported, a)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
ah.deps = append(ah.deps, depActionHandle)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
h := s.view.session.cache.store.Bind(actionKey(a, cph), func(ctx context.Context) interface{} {
|
||||||
|
data := &actionData{}
|
||||||
|
data.diagnostics, data.result, data.err = ah.exec(ctx, s.view.session.cache.fset)
|
||||||
|
return data
|
||||||
|
})
|
||||||
|
ah.handle = h
|
||||||
|
return ah, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func execAll(ctx context.Context, fset *token.FileSet, actions []*Action) error {
|
func (ah *actionHandle) analyze(ctx context.Context) ([]*analysis.Diagnostic, interface{}, error) {
|
||||||
|
v := ah.handle.Get(ctx)
|
||||||
|
if v == nil {
|
||||||
|
return nil, nil, errors.Errorf("no analyses for %s", ah.pkg.ID())
|
||||||
|
}
|
||||||
|
data := v.(*actionData)
|
||||||
|
return data.diagnostics, data.result, data.err
|
||||||
|
}
|
||||||
|
|
||||||
|
func actionKey(a *analysis.Analyzer, cph *checkPackageHandle) string {
|
||||||
|
return hashContents([]byte(fmt.Sprintf("%s %s", a, string(cph.key))))
|
||||||
|
}
|
||||||
|
|
||||||
|
func (act *actionHandle) String() string {
|
||||||
|
return fmt.Sprintf("%s@%s", act.analyzer, act.pkg.PkgPath())
|
||||||
|
}
|
||||||
|
|
||||||
|
func execAll(ctx context.Context, fset *token.FileSet, actions []*actionHandle) (map[*actionHandle][]*analysis.Diagnostic, map[*actionHandle]interface{}, error) {
|
||||||
|
var (
|
||||||
|
mu sync.Mutex
|
||||||
|
diagnostics = make(map[*actionHandle][]*analysis.Diagnostic)
|
||||||
|
results = make(map[*actionHandle]interface{})
|
||||||
|
)
|
||||||
|
|
||||||
g, ctx := errgroup.WithContext(ctx)
|
g, ctx := errgroup.WithContext(ctx)
|
||||||
for _, act := range actions {
|
for _, act := range actions {
|
||||||
act := act
|
act := act
|
||||||
g.Go(func() error {
|
g.Go(func() error {
|
||||||
return act.exec(ctx, fset)
|
d, r, err := act.analyze(ctx)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
mu.Lock()
|
||||||
|
defer mu.Unlock()
|
||||||
|
|
||||||
|
diagnostics[act] = d
|
||||||
|
results[act] = r
|
||||||
|
|
||||||
|
return nil
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
return g.Wait()
|
return diagnostics, results, g.Wait()
|
||||||
}
|
}
|
||||||
|
|
||||||
func (act *Action) exec(ctx context.Context, fset *token.FileSet) error {
|
func (act *actionHandle) exec(ctx context.Context, fset *token.FileSet) (diagnostics []*analysis.Diagnostic, result interface{}, err error) {
|
||||||
var err error
|
|
||||||
act.once.Do(func() {
|
|
||||||
err = act.execOnce(ctx, fset)
|
|
||||||
})
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (act *Action) execOnce(ctx context.Context, fset *token.FileSet) error {
|
|
||||||
// Analyze dependencies.
|
// Analyze dependencies.
|
||||||
if err := execAll(ctx, fset, act.Deps); err != nil {
|
_, depResults, err := execAll(ctx, fset, act.deps)
|
||||||
return err
|
if err != nil {
|
||||||
}
|
return nil, nil, err
|
||||||
|
|
||||||
// Report an error if any dependency failed.
|
|
||||||
var failed []string
|
|
||||||
for _, dep := range act.Deps {
|
|
||||||
if dep.err != nil {
|
|
||||||
failed = append(failed, fmt.Sprintf("%s: %v", dep.String(), dep.err))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if failed != nil {
|
|
||||||
sort.Strings(failed)
|
|
||||||
act.err = errors.Errorf("failed prerequisites: %s", strings.Join(failed, ", "))
|
|
||||||
return act.err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Plumb the output values of the dependencies
|
// Plumb the output values of the dependencies
|
||||||
@ -135,14 +184,13 @@ func (act *Action) execOnce(ctx context.Context, fset *token.FileSet) error {
|
|||||||
inputs := make(map[*analysis.Analyzer]interface{})
|
inputs := make(map[*analysis.Analyzer]interface{})
|
||||||
act.objectFacts = make(map[objectFactKey]analysis.Fact)
|
act.objectFacts = make(map[objectFactKey]analysis.Fact)
|
||||||
act.packageFacts = make(map[packageFactKey]analysis.Fact)
|
act.packageFacts = make(map[packageFactKey]analysis.Fact)
|
||||||
for _, dep := range act.Deps {
|
for _, dep := range act.deps {
|
||||||
if dep.Pkg == act.Pkg {
|
if dep.pkg == act.pkg {
|
||||||
// Same package, different analysis (horizontal edge):
|
// Same package, different analysis (horizontal edge):
|
||||||
// in-memory outputs of prerequisite analyzers
|
// in-memory outputs of prerequisite analyzers
|
||||||
// become inputs to this analysis pass.
|
// become inputs to this analysis pass.
|
||||||
inputs[dep.Analyzer] = dep.result
|
inputs[dep.analyzer] = depResults[dep]
|
||||||
|
} else if dep.analyzer == act.analyzer { // (always true)
|
||||||
} else if dep.Analyzer == act.Analyzer { // (always true)
|
|
||||||
// Same analysis, different package (vertical edge):
|
// Same analysis, different package (vertical edge):
|
||||||
// serialized facts produced by prerequisite analysis
|
// serialized facts produced by prerequisite analysis
|
||||||
// become available to this analysis pass.
|
// become available to this analysis pass.
|
||||||
@ -152,14 +200,14 @@ func (act *Action) execOnce(ctx context.Context, fset *token.FileSet) error {
|
|||||||
|
|
||||||
// Run the analysis.
|
// Run the analysis.
|
||||||
pass := &analysis.Pass{
|
pass := &analysis.Pass{
|
||||||
Analyzer: act.Analyzer,
|
Analyzer: act.analyzer,
|
||||||
Fset: fset,
|
Fset: fset,
|
||||||
Files: act.Pkg.GetSyntax(ctx),
|
Files: act.pkg.GetSyntax(ctx),
|
||||||
Pkg: act.Pkg.GetTypes(),
|
Pkg: act.pkg.GetTypes(),
|
||||||
TypesInfo: act.Pkg.GetTypesInfo(),
|
TypesInfo: act.pkg.GetTypesInfo(),
|
||||||
TypesSizes: act.Pkg.GetTypesSizes(),
|
TypesSizes: act.pkg.GetTypesSizes(),
|
||||||
ResultOf: inputs,
|
ResultOf: inputs,
|
||||||
Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) },
|
Report: func(d analysis.Diagnostic) { diagnostics = append(diagnostics, &d) },
|
||||||
ImportObjectFact: act.importObjectFact,
|
ImportObjectFact: act.importObjectFact,
|
||||||
ExportObjectFact: act.exportObjectFact,
|
ExportObjectFact: act.exportObjectFact,
|
||||||
ImportPackageFact: act.importPackageFact,
|
ImportPackageFact: act.importPackageFact,
|
||||||
@ -169,13 +217,13 @@ func (act *Action) execOnce(ctx context.Context, fset *token.FileSet) error {
|
|||||||
}
|
}
|
||||||
act.pass = pass
|
act.pass = pass
|
||||||
|
|
||||||
if act.Pkg.IsIllTyped() {
|
if act.pkg.IsIllTyped() {
|
||||||
act.err = errors.Errorf("analysis skipped due to errors in package: %v", act.Pkg.GetErrors())
|
return nil, nil, errors.Errorf("analysis skipped due to errors in package: %v", act.pkg.GetErrors())
|
||||||
} else {
|
} else {
|
||||||
act.result, act.err = pass.Analyzer.Run(pass)
|
result, err = pass.Analyzer.Run(pass)
|
||||||
if act.err == nil {
|
if err == nil {
|
||||||
if got, want := reflect.TypeOf(act.result), pass.Analyzer.ResultType; got != want {
|
if got, want := reflect.TypeOf(result), pass.Analyzer.ResultType; got != want {
|
||||||
act.err = errors.Errorf(
|
err = errors.Errorf(
|
||||||
"internal error: on package %s, analyzer %s returned a result of type %v, but declared ResultType %v",
|
"internal error: on package %s, analyzer %s returned a result of type %v, but declared ResultType %v",
|
||||||
pass.Pkg.Path(), pass.Analyzer, got, want)
|
pass.Pkg.Path(), pass.Analyzer, got, want)
|
||||||
}
|
}
|
||||||
@ -186,17 +234,17 @@ func (act *Action) execOnce(ctx context.Context, fset *token.FileSet) error {
|
|||||||
pass.ExportObjectFact = nil
|
pass.ExportObjectFact = nil
|
||||||
pass.ExportPackageFact = nil
|
pass.ExportPackageFact = nil
|
||||||
|
|
||||||
return act.err
|
return diagnostics, result, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// inheritFacts populates act.facts with
|
// inheritFacts populates act.facts with
|
||||||
// those it obtains from its dependency, dep.
|
// those it obtains from its dependency, dep.
|
||||||
func inheritFacts(act, dep *Action) {
|
func inheritFacts(act, dep *actionHandle) {
|
||||||
for key, fact := range dep.objectFacts {
|
for key, fact := range dep.objectFacts {
|
||||||
// Filter out facts related to objects
|
// Filter out facts related to objects
|
||||||
// that are irrelevant downstream
|
// that are irrelevant downstream
|
||||||
// (equivalently: not in the compiler export data).
|
// (equivalently: not in the compiler export data).
|
||||||
if !exportedFrom(key.obj, dep.Pkg.GetTypes()) {
|
if !exportedFrom(key.obj, dep.pkg.types) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
act.objectFacts[key] = fact
|
act.objectFacts[key] = fact
|
||||||
@ -236,7 +284,7 @@ func exportedFrom(obj types.Object, pkg *types.Package) bool {
|
|||||||
// importObjectFact implements Pass.ImportObjectFact.
|
// importObjectFact implements Pass.ImportObjectFact.
|
||||||
// Given a non-nil pointer ptr of type *T, where *T satisfies Fact,
|
// Given a non-nil pointer ptr of type *T, where *T satisfies Fact,
|
||||||
// importObjectFact copies the fact value to *ptr.
|
// importObjectFact copies the fact value to *ptr.
|
||||||
func (act *Action) importObjectFact(obj types.Object, ptr analysis.Fact) bool {
|
func (act *actionHandle) importObjectFact(obj types.Object, ptr analysis.Fact) bool {
|
||||||
if obj == nil {
|
if obj == nil {
|
||||||
panic("nil object")
|
panic("nil object")
|
||||||
}
|
}
|
||||||
@ -249,14 +297,14 @@ func (act *Action) importObjectFact(obj types.Object, ptr analysis.Fact) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// exportObjectFact implements Pass.ExportObjectFact.
|
// exportObjectFact implements Pass.ExportObjectFact.
|
||||||
func (act *Action) exportObjectFact(obj types.Object, fact analysis.Fact) {
|
func (act *actionHandle) exportObjectFact(obj types.Object, fact analysis.Fact) {
|
||||||
if act.pass.ExportObjectFact == nil {
|
if act.pass.ExportObjectFact == nil {
|
||||||
panic(fmt.Sprintf("%s: Pass.ExportObjectFact(%s, %T) called after Run", act, obj, fact))
|
panic(fmt.Sprintf("%s: Pass.ExportObjectFact(%s, %T) called after Run", act, obj, fact))
|
||||||
}
|
}
|
||||||
|
|
||||||
if obj.Pkg() != act.Pkg.GetTypes() {
|
if obj.Pkg() != act.pkg.types {
|
||||||
panic(fmt.Sprintf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package",
|
panic(fmt.Sprintf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package",
|
||||||
act.Analyzer, act.Pkg, obj, fact))
|
act.analyzer, act.pkg.ID(), obj, fact))
|
||||||
}
|
}
|
||||||
|
|
||||||
key := objectFactKey{obj, factType(fact)}
|
key := objectFactKey{obj, factType(fact)}
|
||||||
@ -264,7 +312,7 @@ func (act *Action) exportObjectFact(obj types.Object, fact analysis.Fact) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// allObjectFacts implements Pass.AllObjectFacts.
|
// allObjectFacts implements Pass.AllObjectFacts.
|
||||||
func (act *Action) allObjectFacts() []analysis.ObjectFact {
|
func (act *actionHandle) allObjectFacts() []analysis.ObjectFact {
|
||||||
facts := make([]analysis.ObjectFact, 0, len(act.objectFacts))
|
facts := make([]analysis.ObjectFact, 0, len(act.objectFacts))
|
||||||
for k := range act.objectFacts {
|
for k := range act.objectFacts {
|
||||||
facts = append(facts, analysis.ObjectFact{Object: k.obj, Fact: act.objectFacts[k]})
|
facts = append(facts, analysis.ObjectFact{Object: k.obj, Fact: act.objectFacts[k]})
|
||||||
@ -275,7 +323,7 @@ func (act *Action) allObjectFacts() []analysis.ObjectFact {
|
|||||||
// importPackageFact implements Pass.ImportPackageFact.
|
// importPackageFact implements Pass.ImportPackageFact.
|
||||||
// Given a non-nil pointer ptr of type *T, where *T satisfies Fact,
|
// Given a non-nil pointer ptr of type *T, where *T satisfies Fact,
|
||||||
// fact copies the fact value to *ptr.
|
// fact copies the fact value to *ptr.
|
||||||
func (act *Action) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool {
|
func (act *actionHandle) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool {
|
||||||
if pkg == nil {
|
if pkg == nil {
|
||||||
panic("nil package")
|
panic("nil package")
|
||||||
}
|
}
|
||||||
@ -288,7 +336,7 @@ func (act *Action) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool
|
|||||||
}
|
}
|
||||||
|
|
||||||
// exportPackageFact implements Pass.ExportPackageFact.
|
// exportPackageFact implements Pass.ExportPackageFact.
|
||||||
func (act *Action) exportPackageFact(fact analysis.Fact) {
|
func (act *actionHandle) exportPackageFact(fact analysis.Fact) {
|
||||||
if act.pass.ExportPackageFact == nil {
|
if act.pass.ExportPackageFact == nil {
|
||||||
panic(fmt.Sprintf("%s: Pass.ExportPackageFact(%T) called after Run", act, fact))
|
panic(fmt.Sprintf("%s: Pass.ExportPackageFact(%T) called after Run", act, fact))
|
||||||
}
|
}
|
||||||
@ -306,7 +354,7 @@ func factType(fact analysis.Fact) reflect.Type {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// allObjectFacts implements Pass.AllObjectFacts.
|
// allObjectFacts implements Pass.AllObjectFacts.
|
||||||
func (act *Action) allPackageFacts() []analysis.PackageFact {
|
func (act *actionHandle) allPackageFacts() []analysis.PackageFact {
|
||||||
facts := make([]analysis.PackageFact, 0, len(act.packageFacts))
|
facts := make([]analysis.PackageFact, 0, len(act.packageFacts))
|
||||||
for k := range act.packageFacts {
|
for k := range act.packageFacts {
|
||||||
facts = append(facts, analysis.PackageFact{Package: k.pkg, Fact: act.packageFacts[k]})
|
facts = append(facts, analysis.PackageFact{Package: k.pkg, Fact: act.packageFacts[k]})
|
2
internal/lsp/cache/check.go
vendored
2
internal/lsp/cache/check.go
vendored
@ -14,7 +14,6 @@ import (
|
|||||||
"sort"
|
"sort"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"golang.org/x/tools/go/analysis"
|
|
||||||
"golang.org/x/tools/go/packages"
|
"golang.org/x/tools/go/packages"
|
||||||
"golang.org/x/tools/internal/lsp/source"
|
"golang.org/x/tools/internal/lsp/source"
|
||||||
"golang.org/x/tools/internal/lsp/telemetry"
|
"golang.org/x/tools/internal/lsp/telemetry"
|
||||||
@ -279,7 +278,6 @@ func (imp *importer) typeCheck(ctx context.Context, cph *checkPackageHandle) (*p
|
|||||||
Selections: make(map[*ast.SelectorExpr]*types.Selection),
|
Selections: make(map[*ast.SelectorExpr]*types.Selection),
|
||||||
Scopes: make(map[ast.Node]*types.Scope),
|
Scopes: make(map[ast.Node]*types.Scope),
|
||||||
},
|
},
|
||||||
analyses: make(map[*analysis.Analyzer]*analysisEntry),
|
|
||||||
}
|
}
|
||||||
// If the package comes back with errors from `go list`,
|
// If the package comes back with errors from `go list`,
|
||||||
// don't bother type-checking it.
|
// don't bother type-checking it.
|
||||||
|
105
internal/lsp/cache/pkg.go
vendored
105
internal/lsp/cache/pkg.go
vendored
@ -8,7 +8,6 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"go/ast"
|
"go/ast"
|
||||||
"go/types"
|
"go/types"
|
||||||
"sort"
|
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"golang.org/x/tools/go/analysis"
|
"golang.org/x/tools/go/analysis"
|
||||||
@ -33,13 +32,6 @@ type pkg struct {
|
|||||||
typesInfo *types.Info
|
typesInfo *types.Info
|
||||||
typesSizes types.Sizes
|
typesSizes types.Sizes
|
||||||
|
|
||||||
// The analysis cache holds analysis information for all the packages in a view.
|
|
||||||
// Each graph node (action) is one unit of analysis.
|
|
||||||
// Edges express package-to-package (vertical) dependencies,
|
|
||||||
// and analysis-to-analysis (horizontal) dependencies.
|
|
||||||
mu sync.Mutex
|
|
||||||
analyses map[*analysis.Analyzer]*analysisEntry
|
|
||||||
|
|
||||||
diagMu sync.Mutex
|
diagMu sync.Mutex
|
||||||
diagnostics map[*analysis.Analyzer][]source.Diagnostic
|
diagnostics map[*analysis.Analyzer][]source.Diagnostic
|
||||||
}
|
}
|
||||||
@ -50,95 +42,6 @@ type pkg struct {
|
|||||||
type packageID string
|
type packageID string
|
||||||
type packagePath string
|
type packagePath string
|
||||||
|
|
||||||
type analysisEntry struct {
|
|
||||||
done chan struct{}
|
|
||||||
succeeded bool
|
|
||||||
*source.Action
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *pkg) GetActionGraph(ctx context.Context, a *analysis.Analyzer) (*source.Action, error) {
|
|
||||||
p.mu.Lock()
|
|
||||||
e, ok := p.analyses[a]
|
|
||||||
if ok {
|
|
||||||
// cache hit
|
|
||||||
p.mu.Unlock()
|
|
||||||
|
|
||||||
// wait for entry to become ready or the context to be cancelled
|
|
||||||
select {
|
|
||||||
case <-e.done:
|
|
||||||
// If the goroutine we are waiting on was cancelled, we should retry.
|
|
||||||
// If errors other than cancelation/timeout become possible, it may
|
|
||||||
// no longer be appropriate to always retry here.
|
|
||||||
if !e.succeeded {
|
|
||||||
return p.GetActionGraph(ctx, a)
|
|
||||||
}
|
|
||||||
case <-ctx.Done():
|
|
||||||
return nil, ctx.Err()
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
// cache miss
|
|
||||||
e = &analysisEntry{
|
|
||||||
done: make(chan struct{}),
|
|
||||||
Action: &source.Action{
|
|
||||||
Analyzer: a,
|
|
||||||
Pkg: p,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
p.analyses[a] = e
|
|
||||||
p.mu.Unlock()
|
|
||||||
|
|
||||||
defer func() {
|
|
||||||
// If we got an error, clear out our defunct cache entry. We don't cache
|
|
||||||
// errors since they could depend on our dependencies, which can change.
|
|
||||||
// Currently the only possible error is context.Canceled, though, which
|
|
||||||
// should also not be cached.
|
|
||||||
if !e.succeeded {
|
|
||||||
p.mu.Lock()
|
|
||||||
delete(p.analyses, a)
|
|
||||||
p.mu.Unlock()
|
|
||||||
}
|
|
||||||
|
|
||||||
// Always close done so waiters don't get stuck.
|
|
||||||
close(e.done)
|
|
||||||
}()
|
|
||||||
|
|
||||||
// This goroutine becomes responsible for populating
|
|
||||||
// the entry and broadcasting its readiness.
|
|
||||||
|
|
||||||
// Add a dependency on each required analyzers.
|
|
||||||
for _, req := range a.Requires {
|
|
||||||
act, err := p.GetActionGraph(ctx, req)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
e.Deps = append(e.Deps, act)
|
|
||||||
}
|
|
||||||
|
|
||||||
// An analysis that consumes/produces facts
|
|
||||||
// must run on the package's dependencies too.
|
|
||||||
if len(a.FactTypes) > 0 {
|
|
||||||
importPaths := make([]string, 0, len(p.imports))
|
|
||||||
for importPath := range p.imports {
|
|
||||||
importPaths = append(importPaths, string(importPath))
|
|
||||||
}
|
|
||||||
sort.Strings(importPaths) // for determinism
|
|
||||||
for _, importPath := range importPaths {
|
|
||||||
dep, err := p.GetImport(ctx, importPath)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
act, err := dep.GetActionGraph(ctx, a)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
e.Deps = append(e.Deps, act)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
e.succeeded = true
|
|
||||||
}
|
|
||||||
return e.Action, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *pkg) ID() string {
|
func (p *pkg) ID() string {
|
||||||
return string(p.id)
|
return string(p.id)
|
||||||
}
|
}
|
||||||
@ -208,11 +111,11 @@ func (p *pkg) SetDiagnostics(a *analysis.Analyzer, diags []source.Diagnostic) {
|
|||||||
p.diagnostics[a] = diags
|
p.diagnostics[a] = diags
|
||||||
}
|
}
|
||||||
|
|
||||||
func (pkg *pkg) FindDiagnostic(pdiag protocol.Diagnostic) (*source.Diagnostic, error) {
|
func (p *pkg) FindDiagnostic(pdiag protocol.Diagnostic) (*source.Diagnostic, error) {
|
||||||
pkg.diagMu.Lock()
|
p.diagMu.Lock()
|
||||||
defer pkg.diagMu.Unlock()
|
defer p.diagMu.Unlock()
|
||||||
|
|
||||||
for a, diagnostics := range pkg.diagnostics {
|
for a, diagnostics := range p.diagnostics {
|
||||||
if a.Name != pdiag.Source {
|
if a.Name != pdiag.Source {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
4
internal/lsp/cache/snapshot.go
vendored
4
internal/lsp/cache/snapshot.go
vendored
@ -34,6 +34,10 @@ type snapshot struct {
|
|||||||
packages map[packageKey]*checkPackageHandle
|
packages map[packageKey]*checkPackageHandle
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *snapshot) View() source.View {
|
||||||
|
return s.view
|
||||||
|
}
|
||||||
|
|
||||||
func (s *snapshot) getImportedBy(id packageID) []packageID {
|
func (s *snapshot) getImportedBy(id packageID) []packageID {
|
||||||
s.mu.Lock()
|
s.mu.Lock()
|
||||||
defer s.mu.Unlock()
|
defer s.mu.Unlock()
|
||||||
|
@ -41,7 +41,7 @@ func Diagnostics(ctx context.Context, view View, f File, disabledAnalyses map[st
|
|||||||
ctx, done := trace.StartSpan(ctx, "source.Diagnostics", telemetry.File.Of(f.URI()))
|
ctx, done := trace.StartSpan(ctx, "source.Diagnostics", telemetry.File.Of(f.URI()))
|
||||||
defer done()
|
defer done()
|
||||||
|
|
||||||
_, cphs, err := view.CheckPackageHandles(ctx, f)
|
snapshot, cphs, err := view.CheckPackageHandles(ctx, f)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, "", err
|
return nil, "", err
|
||||||
}
|
}
|
||||||
@ -82,7 +82,7 @@ func Diagnostics(ctx context.Context, view View, f File, disabledAnalyses map[st
|
|||||||
// Run diagnostics for the package that this URI belongs to.
|
// Run diagnostics for the package that this URI belongs to.
|
||||||
if !diagnostics(ctx, view, pkg, reports) {
|
if !diagnostics(ctx, view, pkg, reports) {
|
||||||
// If we don't have any list, parse, or type errors, run analyses.
|
// If we don't have any list, parse, or type errors, run analyses.
|
||||||
if err := analyses(ctx, view, cph, disabledAnalyses, reports); err != nil {
|
if err := analyses(ctx, snapshot, cph, disabledAnalyses, reports); err != nil {
|
||||||
log.Error(ctx, "failed to run analyses", err, telemetry.File.Of(f.URI()))
|
log.Error(ctx, "failed to run analyses", err, telemetry.File.Of(f.URI()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -188,14 +188,16 @@ func spanToRange(ctx context.Context, view View, pkg Package, spn span.Span, isT
|
|||||||
return m.Range(spn)
|
return m.Range(spn)
|
||||||
}
|
}
|
||||||
|
|
||||||
func analyses(ctx context.Context, view View, cph CheckPackageHandle, disabledAnalyses map[string]struct{}, reports map[span.URI][]Diagnostic) error {
|
func analyses(ctx context.Context, snapshot Snapshot, cph CheckPackageHandle, disabledAnalyses map[string]struct{}, reports map[span.URI][]Diagnostic) error {
|
||||||
// Type checking and parsing succeeded. Run analyses.
|
// Type checking and parsing succeeded. Run analyses.
|
||||||
if err := runAnalyses(ctx, view, cph, disabledAnalyses, func(a *analysis.Analyzer, diag analysis.Diagnostic) error {
|
if err := runAnalyses(ctx, snapshot, cph, disabledAnalyses, func(diags []*analysis.Diagnostic, a *analysis.Analyzer) error {
|
||||||
diagnostic, err := toDiagnostic(ctx, view, diag, a.Name)
|
for _, diag := range diags {
|
||||||
if err != nil {
|
diagnostic, err := toDiagnostic(ctx, snapshot.View(), diag, a.Name)
|
||||||
return err
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
addReport(snapshot.View(), reports, diagnostic.URI, diagnostic)
|
||||||
}
|
}
|
||||||
addReport(view, reports, diagnostic.URI, diagnostic)
|
|
||||||
return nil
|
return nil
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
return err
|
return err
|
||||||
@ -203,16 +205,13 @@ func analyses(ctx context.Context, view View, cph CheckPackageHandle, disabledAn
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func toDiagnostic(ctx context.Context, view View, diag analysis.Diagnostic, category string) (Diagnostic, error) {
|
func toDiagnostic(ctx context.Context, view View, diag *analysis.Diagnostic, category string) (Diagnostic, error) {
|
||||||
r := span.NewRange(view.Session().Cache().FileSet(), diag.Pos, diag.End)
|
r := span.NewRange(view.Session().Cache().FileSet(), diag.Pos, diag.End)
|
||||||
spn, err := r.Span()
|
spn, err := r.Span()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// The diagnostic has an invalid position, so we don't have a valid span.
|
// The diagnostic has an invalid position, so we don't have a valid span.
|
||||||
return Diagnostic{}, err
|
return Diagnostic{}, err
|
||||||
}
|
}
|
||||||
if diag.Category != "" {
|
|
||||||
category += "." + category
|
|
||||||
}
|
|
||||||
f, err := view.GetFile(ctx, spn.URI())
|
f, err := view.GetFile(ctx, spn.URI())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return Diagnostic{}, err
|
return Diagnostic{}, err
|
||||||
@ -246,6 +245,9 @@ func toDiagnostic(ctx context.Context, view View, diag analysis.Diagnostic, cate
|
|||||||
if onlyDeletions(fixes) {
|
if onlyDeletions(fixes) {
|
||||||
tags = append(tags, protocol.Unnecessary)
|
tags = append(tags, protocol.Unnecessary)
|
||||||
}
|
}
|
||||||
|
if diag.Category != "" {
|
||||||
|
category += "." + diag.Category
|
||||||
|
}
|
||||||
return Diagnostic{
|
return Diagnostic{
|
||||||
URI: spn.URI(),
|
URI: spn.URI(),
|
||||||
Range: rng,
|
Range: rng,
|
||||||
@ -309,33 +311,28 @@ func singleDiagnostic(uri span.URI, format string, a ...interface{}) map[span.UR
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func runAnalyses(ctx context.Context, view View, cph CheckPackageHandle, disabledAnalyses map[string]struct{}, report func(a *analysis.Analyzer, diag analysis.Diagnostic) error) error {
|
func runAnalyses(ctx context.Context, snapshot Snapshot, cph CheckPackageHandle, disabledAnalyses map[string]struct{}, report func(diag []*analysis.Diagnostic, a *analysis.Analyzer) error) error {
|
||||||
var analyzers []*analysis.Analyzer
|
var analyzers []*analysis.Analyzer
|
||||||
for _, a := range view.Options().Analyzers {
|
for _, a := range snapshot.View().Options().Analyzers {
|
||||||
if _, ok := disabledAnalyses[a.Name]; ok {
|
if _, ok := disabledAnalyses[a.Name]; ok {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
analyzers = append(analyzers, a)
|
analyzers = append(analyzers, a)
|
||||||
}
|
}
|
||||||
|
|
||||||
roots, err := analyze(ctx, view, []CheckPackageHandle{cph}, analyzers)
|
diagnostics, err := snapshot.Analyze(ctx, cph.ID(), analyzers)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// Report diagnostics and errors from root analyzers.
|
// Report diagnostics and errors from root analyzers.
|
||||||
for _, r := range roots {
|
var sdiags []Diagnostic
|
||||||
var sdiags []Diagnostic
|
for a, diags := range diagnostics {
|
||||||
for _, diag := range r.diagnostics {
|
if err := report(diags, a); err != nil {
|
||||||
if r.err != nil {
|
return err
|
||||||
// TODO(matloob): This isn't quite right: we might return a failed prerequisites error,
|
}
|
||||||
// which isn't super useful...
|
for _, diag := range diags {
|
||||||
return r.err
|
sdiag, err := toDiagnostic(ctx, snapshot.View(), diag, a.Name)
|
||||||
}
|
|
||||||
if err := report(r.Analyzer, diag); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
sdiag, err := toDiagnostic(ctx, view, diag, r.Analyzer.Name)
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -345,7 +342,7 @@ func runAnalyses(ctx context.Context, view View, cph CheckPackageHandle, disable
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
pkg.SetDiagnostics(r.Analyzer, sdiags)
|
pkg.SetDiagnostics(a, sdiags)
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -13,7 +13,7 @@ type SuggestedFix struct {
|
|||||||
Edits map[span.URI][]protocol.TextEdit
|
Edits map[span.URI][]protocol.TextEdit
|
||||||
}
|
}
|
||||||
|
|
||||||
func suggestedFixes(ctx context.Context, view View, pkg Package, diag analysis.Diagnostic) ([]SuggestedFix, error) {
|
func suggestedFixes(ctx context.Context, view View, pkg Package, diag *analysis.Diagnostic) ([]SuggestedFix, error) {
|
||||||
var fixes []SuggestedFix
|
var fixes []SuggestedFix
|
||||||
for _, fix := range diag.SuggestedFixes {
|
for _, fix := range diag.SuggestedFixes {
|
||||||
edits := make(map[span.URI][]protocol.TextEdit)
|
edits := make(map[span.URI][]protocol.TextEdit)
|
||||||
|
@ -111,6 +111,7 @@ type CheckPackageHandle interface {
|
|||||||
// Cached returns the Package for the CheckPackageHandle if it has already been stored.
|
// Cached returns the Package for the CheckPackageHandle if it has already been stored.
|
||||||
Cached(ctx context.Context) (Package, error)
|
Cached(ctx context.Context) (Package, error)
|
||||||
|
|
||||||
|
// MissingDependencies reports any unresolved imports.
|
||||||
MissingDependencies() []string
|
MissingDependencies() []string
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -256,6 +257,12 @@ type View interface {
|
|||||||
type Snapshot interface {
|
type Snapshot interface {
|
||||||
// Handle returns the FileHandle for the given file.
|
// Handle returns the FileHandle for the given file.
|
||||||
Handle(ctx context.Context, f File) FileHandle
|
Handle(ctx context.Context, f File) FileHandle
|
||||||
|
|
||||||
|
// View returns the View associated with this snapshot.
|
||||||
|
View() View
|
||||||
|
|
||||||
|
// Analyze runs the analyses for the given package at this snapshot.
|
||||||
|
Analyze(ctx context.Context, id string, analyzers []*analysis.Analyzer) (map[*analysis.Analyzer][]*analysis.Diagnostic, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// File represents a source file of any type.
|
// File represents a source file of any type.
|
||||||
@ -284,9 +291,6 @@ type Package interface {
|
|||||||
// GetImport returns the CheckPackageHandle for a package imported by this package.
|
// GetImport returns the CheckPackageHandle for a package imported by this package.
|
||||||
GetImport(ctx context.Context, pkgPath string) (Package, error)
|
GetImport(ctx context.Context, pkgPath string) (Package, error)
|
||||||
|
|
||||||
// GetActionGraph returns the action graph for the given package.
|
|
||||||
GetActionGraph(ctx context.Context, a *analysis.Analyzer) (*Action, error)
|
|
||||||
|
|
||||||
// FindFile returns the AST and type information for a file that may
|
// FindFile returns the AST and type information for a file that may
|
||||||
// belong to or be part of a dependency of the given package.
|
// belong to or be part of a dependency of the given package.
|
||||||
FindFile(ctx context.Context, uri span.URI) (ParseGoHandle, Package, error)
|
FindFile(ctx context.Context, uri span.URI) (ParseGoHandle, Package, error)
|
||||||
|
Loading…
Reference in New Issue
Block a user