1
0
mirror of https://github.com/golang/go synced 2024-11-18 11:04:42 -07:00

internal/lsp: cache analysis using memoize package

This change moves to the approach of caching the analysis using the
memoize package. This means that we will do less work, as we no longer
need to recompute results that are unchanged. The cache key for an
analysis is simply the key of the CheckPackageHandle, along with the
name of the analyzer.

Change-Id: I0e589ccf088ff1de5670401b7207ffa77a254ceb
Reviewed-on: https://go-review.googlesource.com/c/tools/+/200817
Run-TryBot: Rebecca Stambler <rstambler@golang.org>
Reviewed-by: Ian Cottrell <iancottrell@google.com>
This commit is contained in:
Rebecca Stambler 2019-10-04 17:18:43 -04:00
parent 5fa5b1782b
commit b53505e708
7 changed files with 191 additions and 237 deletions

View File

@ -1,10 +1,4 @@
// Copyright 2019 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// This file is largely based on go/analysis/internal/checker/checker.go.
package source
package cache
import (
"context"
@ -13,69 +7,65 @@ import (
"go/types"
"reflect"
"sort"
"strings"
"sync"
"time"
"golang.org/x/sync/errgroup"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/internal/telemetry/trace"
"golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/memoize"
"golang.org/x/tools/internal/telemetry/log"
errors "golang.org/x/xerrors"
)
func analyze(ctx context.Context, v View, cphs []CheckPackageHandle, analyzers []*analysis.Analyzer) ([]*Action, error) {
ctx, done := trace.StartSpan(ctx, "source.analyze")
defer done()
func (s *snapshot) Analyze(ctx context.Context, id string, analyzers []*analysis.Analyzer) (map[*analysis.Analyzer][]*analysis.Diagnostic, error) {
var roots []*actionHandle
for _, a := range analyzers {
ah, err := s.actionHandle(ctx, packageID(id), source.ParseFull, a)
if err != nil {
return nil, err
}
ah.isroot = true
roots = append(roots, ah)
}
// Check if the context has been canceled before running the analyses.
if ctx.Err() != nil {
return nil, ctx.Err()
}
// Build nodes for initial packages.
var roots []*Action
for _, a := range analyzers {
for _, cph := range cphs {
pkg, err := cph.Check(ctx)
if err != nil {
return nil, err
}
root, err := pkg.GetActionGraph(ctx, a)
if err != nil {
return nil, err
}
root.isroot = true
root.view = v
roots = append(roots, root)
results := make(map[*analysis.Analyzer][]*analysis.Diagnostic)
for _, ah := range roots {
diagnostics, _, err := ah.analyze(ctx)
if err != nil {
log.Error(ctx, "no results", err)
continue
}
results[ah.analyzer] = diagnostics
}
// Execute the graph in parallel.
if err := execAll(ctx, v.Session().Cache().FileSet(), roots); err != nil {
return nil, err
}
return roots, nil
return results, nil
}
// An action represents one unit of analysis work: the application of
// one analysis to one package. Actions form a DAG, both within a
// package (as different analyzers are applied, either in sequence or
// parallel), and across packages (as dependencies are analyzed).
type Action struct {
once sync.Once
Analyzer *analysis.Analyzer
Pkg Package
Deps []*Action
diagnostics []analysis.Diagnostic
type actionHandle struct {
handle *memoize.Handle
analyzer *analysis.Analyzer
deps []*actionHandle
pkg *pkg
pass *analysis.Pass
isroot bool
objectFacts map[objectFactKey]analysis.Fact
packageFacts map[packageFactKey]analysis.Fact
inputs map[*analysis.Analyzer]interface{}
result interface{}
err error
duration time.Duration
view View
}
type actionData struct {
diagnostics []*analysis.Diagnostic
result interface{}
err error
}
type objectFactKey struct {
@ -88,46 +78,105 @@ type packageFactKey struct {
typ reflect.Type
}
func (act *Action) String() string {
return fmt.Sprintf("%s@%s", act.Analyzer, act.Pkg.PkgPath())
func (s *snapshot) actionHandle(ctx context.Context, id packageID, mode source.ParseMode, a *analysis.Analyzer) (*actionHandle, error) {
cph := s.getPackage(id, mode)
if cph == nil {
return nil, errors.Errorf("no CheckPackageHandle for %s:%v", id, mode == source.ParseExported)
}
if len(cph.key) == 0 {
return nil, errors.Errorf("no key for CheckPackageHandle %s", id)
}
pkg, err := cph.check(ctx)
if err != nil {
return nil, err
}
ah := &actionHandle{
analyzer: a,
pkg: pkg,
}
// Add a dependency on each required analyzers.
for _, req := range a.Requires {
reqActionHandle, err := s.actionHandle(ctx, id, mode, req)
if err != nil {
return nil, err
}
ah.deps = append(ah.deps, reqActionHandle)
}
// An analysis that consumes/produces facts
// must run on the package's dependencies too.
if len(a.FactTypes) > 0 {
importIDs := make([]string, 0, len(cph.m.deps))
for _, importID := range cph.m.deps {
importIDs = append(importIDs, string(importID))
}
sort.Strings(importIDs) // for determinism
for _, importID := range importIDs {
depActionHandle, err := s.actionHandle(ctx, packageID(importID), source.ParseExported, a)
if err != nil {
return nil, err
}
ah.deps = append(ah.deps, depActionHandle)
}
}
h := s.view.session.cache.store.Bind(actionKey(a, cph), func(ctx context.Context) interface{} {
data := &actionData{}
data.diagnostics, data.result, data.err = ah.exec(ctx, s.view.session.cache.fset)
return data
})
ah.handle = h
return ah, nil
}
func execAll(ctx context.Context, fset *token.FileSet, actions []*Action) error {
func (ah *actionHandle) analyze(ctx context.Context) ([]*analysis.Diagnostic, interface{}, error) {
v := ah.handle.Get(ctx)
if v == nil {
return nil, nil, errors.Errorf("no analyses for %s", ah.pkg.ID())
}
data := v.(*actionData)
return data.diagnostics, data.result, data.err
}
func actionKey(a *analysis.Analyzer, cph *checkPackageHandle) string {
return hashContents([]byte(fmt.Sprintf("%s %s", a, string(cph.key))))
}
func (act *actionHandle) String() string {
return fmt.Sprintf("%s@%s", act.analyzer, act.pkg.PkgPath())
}
func execAll(ctx context.Context, fset *token.FileSet, actions []*actionHandle) (map[*actionHandle][]*analysis.Diagnostic, map[*actionHandle]interface{}, error) {
var (
mu sync.Mutex
diagnostics = make(map[*actionHandle][]*analysis.Diagnostic)
results = make(map[*actionHandle]interface{})
)
g, ctx := errgroup.WithContext(ctx)
for _, act := range actions {
act := act
g.Go(func() error {
return act.exec(ctx, fset)
d, r, err := act.analyze(ctx)
if err != nil {
return err
}
mu.Lock()
defer mu.Unlock()
diagnostics[act] = d
results[act] = r
return nil
})
}
return g.Wait()
return diagnostics, results, g.Wait()
}
func (act *Action) exec(ctx context.Context, fset *token.FileSet) error {
var err error
act.once.Do(func() {
err = act.execOnce(ctx, fset)
})
return err
}
func (act *Action) execOnce(ctx context.Context, fset *token.FileSet) error {
func (act *actionHandle) exec(ctx context.Context, fset *token.FileSet) (diagnostics []*analysis.Diagnostic, result interface{}, err error) {
// Analyze dependencies.
if err := execAll(ctx, fset, act.Deps); err != nil {
return err
}
// Report an error if any dependency failed.
var failed []string
for _, dep := range act.Deps {
if dep.err != nil {
failed = append(failed, fmt.Sprintf("%s: %v", dep.String(), dep.err))
}
}
if failed != nil {
sort.Strings(failed)
act.err = errors.Errorf("failed prerequisites: %s", strings.Join(failed, ", "))
return act.err
_, depResults, err := execAll(ctx, fset, act.deps)
if err != nil {
return nil, nil, err
}
// Plumb the output values of the dependencies
@ -135,14 +184,13 @@ func (act *Action) execOnce(ctx context.Context, fset *token.FileSet) error {
inputs := make(map[*analysis.Analyzer]interface{})
act.objectFacts = make(map[objectFactKey]analysis.Fact)
act.packageFacts = make(map[packageFactKey]analysis.Fact)
for _, dep := range act.Deps {
if dep.Pkg == act.Pkg {
for _, dep := range act.deps {
if dep.pkg == act.pkg {
// Same package, different analysis (horizontal edge):
// in-memory outputs of prerequisite analyzers
// become inputs to this analysis pass.
inputs[dep.Analyzer] = dep.result
} else if dep.Analyzer == act.Analyzer { // (always true)
inputs[dep.analyzer] = depResults[dep]
} else if dep.analyzer == act.analyzer { // (always true)
// Same analysis, different package (vertical edge):
// serialized facts produced by prerequisite analysis
// become available to this analysis pass.
@ -152,14 +200,14 @@ func (act *Action) execOnce(ctx context.Context, fset *token.FileSet) error {
// Run the analysis.
pass := &analysis.Pass{
Analyzer: act.Analyzer,
Analyzer: act.analyzer,
Fset: fset,
Files: act.Pkg.GetSyntax(ctx),
Pkg: act.Pkg.GetTypes(),
TypesInfo: act.Pkg.GetTypesInfo(),
TypesSizes: act.Pkg.GetTypesSizes(),
Files: act.pkg.GetSyntax(ctx),
Pkg: act.pkg.GetTypes(),
TypesInfo: act.pkg.GetTypesInfo(),
TypesSizes: act.pkg.GetTypesSizes(),
ResultOf: inputs,
Report: func(d analysis.Diagnostic) { act.diagnostics = append(act.diagnostics, d) },
Report: func(d analysis.Diagnostic) { diagnostics = append(diagnostics, &d) },
ImportObjectFact: act.importObjectFact,
ExportObjectFact: act.exportObjectFact,
ImportPackageFact: act.importPackageFact,
@ -169,13 +217,13 @@ func (act *Action) execOnce(ctx context.Context, fset *token.FileSet) error {
}
act.pass = pass
if act.Pkg.IsIllTyped() {
act.err = errors.Errorf("analysis skipped due to errors in package: %v", act.Pkg.GetErrors())
if act.pkg.IsIllTyped() {
return nil, nil, errors.Errorf("analysis skipped due to errors in package: %v", act.pkg.GetErrors())
} else {
act.result, act.err = pass.Analyzer.Run(pass)
if act.err == nil {
if got, want := reflect.TypeOf(act.result), pass.Analyzer.ResultType; got != want {
act.err = errors.Errorf(
result, err = pass.Analyzer.Run(pass)
if err == nil {
if got, want := reflect.TypeOf(result), pass.Analyzer.ResultType; got != want {
err = errors.Errorf(
"internal error: on package %s, analyzer %s returned a result of type %v, but declared ResultType %v",
pass.Pkg.Path(), pass.Analyzer, got, want)
}
@ -186,17 +234,17 @@ func (act *Action) execOnce(ctx context.Context, fset *token.FileSet) error {
pass.ExportObjectFact = nil
pass.ExportPackageFact = nil
return act.err
return diagnostics, result, err
}
// inheritFacts populates act.facts with
// those it obtains from its dependency, dep.
func inheritFacts(act, dep *Action) {
func inheritFacts(act, dep *actionHandle) {
for key, fact := range dep.objectFacts {
// Filter out facts related to objects
// that are irrelevant downstream
// (equivalently: not in the compiler export data).
if !exportedFrom(key.obj, dep.Pkg.GetTypes()) {
if !exportedFrom(key.obj, dep.pkg.types) {
continue
}
act.objectFacts[key] = fact
@ -236,7 +284,7 @@ func exportedFrom(obj types.Object, pkg *types.Package) bool {
// importObjectFact implements Pass.ImportObjectFact.
// Given a non-nil pointer ptr of type *T, where *T satisfies Fact,
// importObjectFact copies the fact value to *ptr.
func (act *Action) importObjectFact(obj types.Object, ptr analysis.Fact) bool {
func (act *actionHandle) importObjectFact(obj types.Object, ptr analysis.Fact) bool {
if obj == nil {
panic("nil object")
}
@ -249,14 +297,14 @@ func (act *Action) importObjectFact(obj types.Object, ptr analysis.Fact) bool {
}
// exportObjectFact implements Pass.ExportObjectFact.
func (act *Action) exportObjectFact(obj types.Object, fact analysis.Fact) {
func (act *actionHandle) exportObjectFact(obj types.Object, fact analysis.Fact) {
if act.pass.ExportObjectFact == nil {
panic(fmt.Sprintf("%s: Pass.ExportObjectFact(%s, %T) called after Run", act, obj, fact))
}
if obj.Pkg() != act.Pkg.GetTypes() {
if obj.Pkg() != act.pkg.types {
panic(fmt.Sprintf("internal error: in analysis %s of package %s: Fact.Set(%s, %T): can't set facts on objects belonging another package",
act.Analyzer, act.Pkg, obj, fact))
act.analyzer, act.pkg.ID(), obj, fact))
}
key := objectFactKey{obj, factType(fact)}
@ -264,7 +312,7 @@ func (act *Action) exportObjectFact(obj types.Object, fact analysis.Fact) {
}
// allObjectFacts implements Pass.AllObjectFacts.
func (act *Action) allObjectFacts() []analysis.ObjectFact {
func (act *actionHandle) allObjectFacts() []analysis.ObjectFact {
facts := make([]analysis.ObjectFact, 0, len(act.objectFacts))
for k := range act.objectFacts {
facts = append(facts, analysis.ObjectFact{Object: k.obj, Fact: act.objectFacts[k]})
@ -275,7 +323,7 @@ func (act *Action) allObjectFacts() []analysis.ObjectFact {
// importPackageFact implements Pass.ImportPackageFact.
// Given a non-nil pointer ptr of type *T, where *T satisfies Fact,
// fact copies the fact value to *ptr.
func (act *Action) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool {
func (act *actionHandle) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool {
if pkg == nil {
panic("nil package")
}
@ -288,7 +336,7 @@ func (act *Action) importPackageFact(pkg *types.Package, ptr analysis.Fact) bool
}
// exportPackageFact implements Pass.ExportPackageFact.
func (act *Action) exportPackageFact(fact analysis.Fact) {
func (act *actionHandle) exportPackageFact(fact analysis.Fact) {
if act.pass.ExportPackageFact == nil {
panic(fmt.Sprintf("%s: Pass.ExportPackageFact(%T) called after Run", act, fact))
}
@ -306,7 +354,7 @@ func factType(fact analysis.Fact) reflect.Type {
}
// allObjectFacts implements Pass.AllObjectFacts.
func (act *Action) allPackageFacts() []analysis.PackageFact {
func (act *actionHandle) allPackageFacts() []analysis.PackageFact {
facts := make([]analysis.PackageFact, 0, len(act.packageFacts))
for k := range act.packageFacts {
facts = append(facts, analysis.PackageFact{Package: k.pkg, Fact: act.packageFacts[k]})

View File

@ -14,7 +14,6 @@ import (
"sort"
"sync"
"golang.org/x/tools/go/analysis"
"golang.org/x/tools/go/packages"
"golang.org/x/tools/internal/lsp/source"
"golang.org/x/tools/internal/lsp/telemetry"
@ -279,7 +278,6 @@ func (imp *importer) typeCheck(ctx context.Context, cph *checkPackageHandle) (*p
Selections: make(map[*ast.SelectorExpr]*types.Selection),
Scopes: make(map[ast.Node]*types.Scope),
},
analyses: make(map[*analysis.Analyzer]*analysisEntry),
}
// If the package comes back with errors from `go list`,
// don't bother type-checking it.

View File

@ -8,7 +8,6 @@ import (
"context"
"go/ast"
"go/types"
"sort"
"sync"
"golang.org/x/tools/go/analysis"
@ -33,13 +32,6 @@ type pkg struct {
typesInfo *types.Info
typesSizes types.Sizes
// The analysis cache holds analysis information for all the packages in a view.
// Each graph node (action) is one unit of analysis.
// Edges express package-to-package (vertical) dependencies,
// and analysis-to-analysis (horizontal) dependencies.
mu sync.Mutex
analyses map[*analysis.Analyzer]*analysisEntry
diagMu sync.Mutex
diagnostics map[*analysis.Analyzer][]source.Diagnostic
}
@ -50,95 +42,6 @@ type pkg struct {
type packageID string
type packagePath string
type analysisEntry struct {
done chan struct{}
succeeded bool
*source.Action
}
func (p *pkg) GetActionGraph(ctx context.Context, a *analysis.Analyzer) (*source.Action, error) {
p.mu.Lock()
e, ok := p.analyses[a]
if ok {
// cache hit
p.mu.Unlock()
// wait for entry to become ready or the context to be cancelled
select {
case <-e.done:
// If the goroutine we are waiting on was cancelled, we should retry.
// If errors other than cancelation/timeout become possible, it may
// no longer be appropriate to always retry here.
if !e.succeeded {
return p.GetActionGraph(ctx, a)
}
case <-ctx.Done():
return nil, ctx.Err()
}
} else {
// cache miss
e = &analysisEntry{
done: make(chan struct{}),
Action: &source.Action{
Analyzer: a,
Pkg: p,
},
}
p.analyses[a] = e
p.mu.Unlock()
defer func() {
// If we got an error, clear out our defunct cache entry. We don't cache
// errors since they could depend on our dependencies, which can change.
// Currently the only possible error is context.Canceled, though, which
// should also not be cached.
if !e.succeeded {
p.mu.Lock()
delete(p.analyses, a)
p.mu.Unlock()
}
// Always close done so waiters don't get stuck.
close(e.done)
}()
// This goroutine becomes responsible for populating
// the entry and broadcasting its readiness.
// Add a dependency on each required analyzers.
for _, req := range a.Requires {
act, err := p.GetActionGraph(ctx, req)
if err != nil {
return nil, err
}
e.Deps = append(e.Deps, act)
}
// An analysis that consumes/produces facts
// must run on the package's dependencies too.
if len(a.FactTypes) > 0 {
importPaths := make([]string, 0, len(p.imports))
for importPath := range p.imports {
importPaths = append(importPaths, string(importPath))
}
sort.Strings(importPaths) // for determinism
for _, importPath := range importPaths {
dep, err := p.GetImport(ctx, importPath)
if err != nil {
return nil, err
}
act, err := dep.GetActionGraph(ctx, a)
if err != nil {
return nil, err
}
e.Deps = append(e.Deps, act)
}
}
e.succeeded = true
}
return e.Action, nil
}
func (p *pkg) ID() string {
return string(p.id)
}
@ -208,11 +111,11 @@ func (p *pkg) SetDiagnostics(a *analysis.Analyzer, diags []source.Diagnostic) {
p.diagnostics[a] = diags
}
func (pkg *pkg) FindDiagnostic(pdiag protocol.Diagnostic) (*source.Diagnostic, error) {
pkg.diagMu.Lock()
defer pkg.diagMu.Unlock()
func (p *pkg) FindDiagnostic(pdiag protocol.Diagnostic) (*source.Diagnostic, error) {
p.diagMu.Lock()
defer p.diagMu.Unlock()
for a, diagnostics := range pkg.diagnostics {
for a, diagnostics := range p.diagnostics {
if a.Name != pdiag.Source {
continue
}

View File

@ -34,6 +34,10 @@ type snapshot struct {
packages map[packageKey]*checkPackageHandle
}
func (s *snapshot) View() source.View {
return s.view
}
func (s *snapshot) getImportedBy(id packageID) []packageID {
s.mu.Lock()
defer s.mu.Unlock()

View File

@ -41,7 +41,7 @@ func Diagnostics(ctx context.Context, view View, f File, disabledAnalyses map[st
ctx, done := trace.StartSpan(ctx, "source.Diagnostics", telemetry.File.Of(f.URI()))
defer done()
_, cphs, err := view.CheckPackageHandles(ctx, f)
snapshot, cphs, err := view.CheckPackageHandles(ctx, f)
if err != nil {
return nil, "", err
}
@ -82,7 +82,7 @@ func Diagnostics(ctx context.Context, view View, f File, disabledAnalyses map[st
// Run diagnostics for the package that this URI belongs to.
if !diagnostics(ctx, view, pkg, reports) {
// If we don't have any list, parse, or type errors, run analyses.
if err := analyses(ctx, view, cph, disabledAnalyses, reports); err != nil {
if err := analyses(ctx, snapshot, cph, disabledAnalyses, reports); err != nil {
log.Error(ctx, "failed to run analyses", err, telemetry.File.Of(f.URI()))
}
}
@ -188,14 +188,16 @@ func spanToRange(ctx context.Context, view View, pkg Package, spn span.Span, isT
return m.Range(spn)
}
func analyses(ctx context.Context, view View, cph CheckPackageHandle, disabledAnalyses map[string]struct{}, reports map[span.URI][]Diagnostic) error {
func analyses(ctx context.Context, snapshot Snapshot, cph CheckPackageHandle, disabledAnalyses map[string]struct{}, reports map[span.URI][]Diagnostic) error {
// Type checking and parsing succeeded. Run analyses.
if err := runAnalyses(ctx, view, cph, disabledAnalyses, func(a *analysis.Analyzer, diag analysis.Diagnostic) error {
diagnostic, err := toDiagnostic(ctx, view, diag, a.Name)
if err != nil {
return err
if err := runAnalyses(ctx, snapshot, cph, disabledAnalyses, func(diags []*analysis.Diagnostic, a *analysis.Analyzer) error {
for _, diag := range diags {
diagnostic, err := toDiagnostic(ctx, snapshot.View(), diag, a.Name)
if err != nil {
return err
}
addReport(snapshot.View(), reports, diagnostic.URI, diagnostic)
}
addReport(view, reports, diagnostic.URI, diagnostic)
return nil
}); err != nil {
return err
@ -203,16 +205,13 @@ func analyses(ctx context.Context, view View, cph CheckPackageHandle, disabledAn
return nil
}
func toDiagnostic(ctx context.Context, view View, diag analysis.Diagnostic, category string) (Diagnostic, error) {
func toDiagnostic(ctx context.Context, view View, diag *analysis.Diagnostic, category string) (Diagnostic, error) {
r := span.NewRange(view.Session().Cache().FileSet(), diag.Pos, diag.End)
spn, err := r.Span()
if err != nil {
// The diagnostic has an invalid position, so we don't have a valid span.
return Diagnostic{}, err
}
if diag.Category != "" {
category += "." + category
}
f, err := view.GetFile(ctx, spn.URI())
if err != nil {
return Diagnostic{}, err
@ -246,6 +245,9 @@ func toDiagnostic(ctx context.Context, view View, diag analysis.Diagnostic, cate
if onlyDeletions(fixes) {
tags = append(tags, protocol.Unnecessary)
}
if diag.Category != "" {
category += "." + diag.Category
}
return Diagnostic{
URI: spn.URI(),
Range: rng,
@ -309,33 +311,28 @@ func singleDiagnostic(uri span.URI, format string, a ...interface{}) map[span.UR
}
}
func runAnalyses(ctx context.Context, view View, cph CheckPackageHandle, disabledAnalyses map[string]struct{}, report func(a *analysis.Analyzer, diag analysis.Diagnostic) error) error {
func runAnalyses(ctx context.Context, snapshot Snapshot, cph CheckPackageHandle, disabledAnalyses map[string]struct{}, report func(diag []*analysis.Diagnostic, a *analysis.Analyzer) error) error {
var analyzers []*analysis.Analyzer
for _, a := range view.Options().Analyzers {
for _, a := range snapshot.View().Options().Analyzers {
if _, ok := disabledAnalyses[a.Name]; ok {
continue
}
analyzers = append(analyzers, a)
}
roots, err := analyze(ctx, view, []CheckPackageHandle{cph}, analyzers)
diagnostics, err := snapshot.Analyze(ctx, cph.ID(), analyzers)
if err != nil {
return err
}
// Report diagnostics and errors from root analyzers.
for _, r := range roots {
var sdiags []Diagnostic
for _, diag := range r.diagnostics {
if r.err != nil {
// TODO(matloob): This isn't quite right: we might return a failed prerequisites error,
// which isn't super useful...
return r.err
}
if err := report(r.Analyzer, diag); err != nil {
return err
}
sdiag, err := toDiagnostic(ctx, view, diag, r.Analyzer.Name)
var sdiags []Diagnostic
for a, diags := range diagnostics {
if err := report(diags, a); err != nil {
return err
}
for _, diag := range diags {
sdiag, err := toDiagnostic(ctx, snapshot.View(), diag, a.Name)
if err != nil {
return err
}
@ -345,7 +342,7 @@ func runAnalyses(ctx context.Context, view View, cph CheckPackageHandle, disable
if err != nil {
return err
}
pkg.SetDiagnostics(r.Analyzer, sdiags)
pkg.SetDiagnostics(a, sdiags)
}
return nil
}

View File

@ -13,7 +13,7 @@ type SuggestedFix struct {
Edits map[span.URI][]protocol.TextEdit
}
func suggestedFixes(ctx context.Context, view View, pkg Package, diag analysis.Diagnostic) ([]SuggestedFix, error) {
func suggestedFixes(ctx context.Context, view View, pkg Package, diag *analysis.Diagnostic) ([]SuggestedFix, error) {
var fixes []SuggestedFix
for _, fix := range diag.SuggestedFixes {
edits := make(map[span.URI][]protocol.TextEdit)

View File

@ -111,6 +111,7 @@ type CheckPackageHandle interface {
// Cached returns the Package for the CheckPackageHandle if it has already been stored.
Cached(ctx context.Context) (Package, error)
// MissingDependencies reports any unresolved imports.
MissingDependencies() []string
}
@ -256,6 +257,12 @@ type View interface {
type Snapshot interface {
// Handle returns the FileHandle for the given file.
Handle(ctx context.Context, f File) FileHandle
// View returns the View associated with this snapshot.
View() View
// Analyze runs the analyses for the given package at this snapshot.
Analyze(ctx context.Context, id string, analyzers []*analysis.Analyzer) (map[*analysis.Analyzer][]*analysis.Diagnostic, error)
}
// File represents a source file of any type.
@ -284,9 +291,6 @@ type Package interface {
// GetImport returns the CheckPackageHandle for a package imported by this package.
GetImport(ctx context.Context, pkgPath string) (Package, error)
// GetActionGraph returns the action graph for the given package.
GetActionGraph(ctx context.Context, a *analysis.Analyzer) (*Action, error)
// FindFile returns the AST and type information for a file that may
// belong to or be part of a dependency of the given package.
FindFile(ctx context.Context, uri span.URI) (ParseGoHandle, Package, error)