2019-05-10 14:35:43 -06:00
|
|
|
// Copyright 2019 The Go Authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
2019-03-06 14:33:47 -07:00
|
|
|
package cache
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
|
|
|
"go/ast"
|
|
|
|
"go/types"
|
|
|
|
"sort"
|
|
|
|
"sync"
|
|
|
|
|
|
|
|
"golang.org/x/tools/go/analysis"
|
|
|
|
"golang.org/x/tools/go/packages"
|
|
|
|
"golang.org/x/tools/internal/lsp/source"
|
2019-09-09 18:22:42 -06:00
|
|
|
"golang.org/x/tools/internal/span"
|
|
|
|
errors "golang.org/x/xerrors"
|
2019-03-06 14:33:47 -07:00
|
|
|
)
|
|
|
|
|
2019-05-14 21:04:23 -06:00
|
|
|
// pkg contains the type information needed by the source package.
|
|
|
|
type pkg struct {
|
2019-08-06 16:51:17 -06:00
|
|
|
view *view
|
|
|
|
|
2019-06-11 15:09:26 -06:00
|
|
|
// ID and package path have their own types to avoid being used interchangeably.
|
|
|
|
id packageID
|
|
|
|
pkgPath packagePath
|
|
|
|
|
2019-07-11 19:05:55 -06:00
|
|
|
files []source.ParseGoHandle
|
2019-06-11 15:09:26 -06:00
|
|
|
errors []packages.Error
|
|
|
|
imports map[packagePath]*pkg
|
|
|
|
types *types.Package
|
|
|
|
typesInfo *types.Info
|
|
|
|
typesSizes types.Sizes
|
2019-03-06 14:33:47 -07:00
|
|
|
|
|
|
|
// The analysis cache holds analysis information for all the packages in a view.
|
|
|
|
// Each graph node (action) is one unit of analysis.
|
|
|
|
// Edges express package-to-package (vertical) dependencies,
|
|
|
|
// and analysis-to-analysis (horizontal) dependencies.
|
|
|
|
mu sync.Mutex
|
|
|
|
analyses map[*analysis.Analyzer]*analysisEntry
|
2019-06-18 16:25:22 -06:00
|
|
|
|
|
|
|
diagMu sync.Mutex
|
2019-09-04 11:16:09 -06:00
|
|
|
diagnostics map[*analysis.Analyzer][]source.Diagnostic
|
2019-03-06 14:33:47 -07:00
|
|
|
}
|
|
|
|
|
2019-06-11 15:09:26 -06:00
|
|
|
// packageID is a type that abstracts a package ID.
|
|
|
|
type packageID string
|
|
|
|
|
|
|
|
// packagePath is a type that abstracts a package path.
|
|
|
|
type packagePath string
|
|
|
|
|
2019-03-06 14:33:47 -07:00
|
|
|
type analysisEntry struct {
|
2019-04-25 14:26:22 -06:00
|
|
|
done chan struct{}
|
|
|
|
succeeded bool
|
2019-03-06 14:33:47 -07:00
|
|
|
*source.Action
|
|
|
|
}
|
|
|
|
|
2019-05-14 21:04:23 -06:00
|
|
|
func (pkg *pkg) GetActionGraph(ctx context.Context, a *analysis.Analyzer) (*source.Action, error) {
|
2019-03-06 14:33:47 -07:00
|
|
|
pkg.mu.Lock()
|
|
|
|
e, ok := pkg.analyses[a]
|
|
|
|
if ok {
|
|
|
|
// cache hit
|
|
|
|
pkg.mu.Unlock()
|
|
|
|
|
|
|
|
// wait for entry to become ready or the context to be cancelled
|
|
|
|
select {
|
2019-04-25 14:26:22 -06:00
|
|
|
case <-e.done:
|
|
|
|
// If the goroutine we are waiting on was cancelled, we should retry.
|
|
|
|
// If errors other than cancelation/timeout become possible, it may
|
|
|
|
// no longer be appropriate to always retry here.
|
|
|
|
if !e.succeeded {
|
|
|
|
return pkg.GetActionGraph(ctx, a)
|
|
|
|
}
|
2019-03-06 14:33:47 -07:00
|
|
|
case <-ctx.Done():
|
2019-08-16 15:29:19 -06:00
|
|
|
return nil, ctx.Err()
|
2019-03-06 14:33:47 -07:00
|
|
|
}
|
|
|
|
} else {
|
|
|
|
// cache miss
|
|
|
|
e = &analysisEntry{
|
2019-04-25 14:26:22 -06:00
|
|
|
done: make(chan struct{}),
|
2019-03-06 14:33:47 -07:00
|
|
|
Action: &source.Action{
|
|
|
|
Analyzer: a,
|
|
|
|
Pkg: pkg,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
pkg.analyses[a] = e
|
|
|
|
pkg.mu.Unlock()
|
|
|
|
|
2019-04-25 14:26:22 -06:00
|
|
|
defer func() {
|
|
|
|
// If we got an error, clear out our defunct cache entry. We don't cache
|
|
|
|
// errors since they could depend on our dependencies, which can change.
|
|
|
|
// Currently the only possible error is context.Canceled, though, which
|
|
|
|
// should also not be cached.
|
|
|
|
if !e.succeeded {
|
|
|
|
pkg.mu.Lock()
|
|
|
|
delete(pkg.analyses, a)
|
|
|
|
pkg.mu.Unlock()
|
|
|
|
}
|
|
|
|
|
|
|
|
// Always close done so waiters don't get stuck.
|
|
|
|
close(e.done)
|
|
|
|
}()
|
|
|
|
|
2019-03-06 14:33:47 -07:00
|
|
|
// This goroutine becomes responsible for populating
|
|
|
|
// the entry and broadcasting its readiness.
|
|
|
|
|
|
|
|
// Add a dependency on each required analyzers.
|
|
|
|
for _, req := range a.Requires {
|
|
|
|
act, err := pkg.GetActionGraph(ctx, req)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
e.Deps = append(e.Deps, act)
|
|
|
|
}
|
|
|
|
|
|
|
|
// An analysis that consumes/produces facts
|
|
|
|
// must run on the package's dependencies too.
|
|
|
|
if len(a.FactTypes) > 0 {
|
|
|
|
importPaths := make([]string, 0, len(pkg.imports))
|
|
|
|
for importPath := range pkg.imports {
|
2019-06-11 15:09:26 -06:00
|
|
|
importPaths = append(importPaths, string(importPath))
|
2019-03-06 14:33:47 -07:00
|
|
|
}
|
|
|
|
sort.Strings(importPaths) // for determinism
|
|
|
|
for _, importPath := range importPaths {
|
2019-07-09 15:52:23 -06:00
|
|
|
dep, err := pkg.GetImport(ctx, importPath)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2019-04-26 22:00:15 -06:00
|
|
|
}
|
2019-03-06 14:33:47 -07:00
|
|
|
act, err := dep.GetActionGraph(ctx, a)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
e.Deps = append(e.Deps, act)
|
|
|
|
}
|
|
|
|
}
|
2019-04-25 14:26:22 -06:00
|
|
|
e.succeeded = true
|
2019-03-06 14:33:47 -07:00
|
|
|
}
|
|
|
|
return e.Action, nil
|
|
|
|
}
|
|
|
|
|
2019-06-11 16:06:27 -06:00
|
|
|
func (pkg *pkg) ID() string {
|
|
|
|
return string(pkg.id)
|
|
|
|
}
|
|
|
|
|
2019-05-14 21:04:23 -06:00
|
|
|
func (pkg *pkg) PkgPath() string {
|
2019-06-11 15:09:26 -06:00
|
|
|
return string(pkg.pkgPath)
|
2019-05-01 20:46:07 -06:00
|
|
|
}
|
|
|
|
|
2019-08-06 16:51:17 -06:00
|
|
|
func (pkg *pkg) GetHandles() []source.ParseGoHandle {
|
|
|
|
return pkg.files
|
2019-03-06 14:33:47 -07:00
|
|
|
}
|
|
|
|
|
2019-07-11 19:05:55 -06:00
|
|
|
func (pkg *pkg) GetSyntax(ctx context.Context) []*ast.File {
|
2019-06-21 15:00:02 -06:00
|
|
|
var syntax []*ast.File
|
2019-07-11 19:05:55 -06:00
|
|
|
for _, ph := range pkg.files {
|
2019-09-09 22:36:39 -06:00
|
|
|
file, _, _ := ph.Cached(ctx)
|
2019-07-11 19:05:55 -06:00
|
|
|
if file != nil {
|
|
|
|
syntax = append(syntax, file)
|
2019-06-21 15:00:02 -06:00
|
|
|
}
|
2019-06-05 15:44:09 -06:00
|
|
|
}
|
|
|
|
return syntax
|
2019-03-06 14:33:47 -07:00
|
|
|
}
|
|
|
|
|
2019-05-14 21:04:23 -06:00
|
|
|
func (pkg *pkg) GetErrors() []packages.Error {
|
2019-03-06 14:33:47 -07:00
|
|
|
return pkg.errors
|
|
|
|
}
|
|
|
|
|
2019-05-14 21:04:23 -06:00
|
|
|
func (pkg *pkg) GetTypes() *types.Package {
|
2019-03-06 14:33:47 -07:00
|
|
|
return pkg.types
|
|
|
|
}
|
|
|
|
|
2019-05-14 21:04:23 -06:00
|
|
|
func (pkg *pkg) GetTypesInfo() *types.Info {
|
2019-03-06 14:33:47 -07:00
|
|
|
return pkg.typesInfo
|
|
|
|
}
|
2019-03-11 15:14:55 -06:00
|
|
|
|
2019-05-14 21:04:23 -06:00
|
|
|
func (pkg *pkg) GetTypesSizes() types.Sizes {
|
2019-03-29 14:39:22 -06:00
|
|
|
return pkg.typesSizes
|
|
|
|
}
|
|
|
|
|
2019-05-14 21:04:23 -06:00
|
|
|
func (pkg *pkg) IsIllTyped() bool {
|
2019-08-20 14:36:06 -06:00
|
|
|
return pkg.types == nil || pkg.typesInfo == nil || pkg.typesSizes == nil
|
2019-03-11 15:14:55 -06:00
|
|
|
}
|
2019-05-10 08:32:25 -06:00
|
|
|
|
2019-09-04 11:16:09 -06:00
|
|
|
func (pkg *pkg) SetDiagnostics(a *analysis.Analyzer, diags []source.Diagnostic) {
|
2019-06-18 16:25:22 -06:00
|
|
|
pkg.diagMu.Lock()
|
|
|
|
defer pkg.diagMu.Unlock()
|
2019-09-04 11:16:09 -06:00
|
|
|
if pkg.diagnostics == nil {
|
|
|
|
pkg.diagnostics = make(map[*analysis.Analyzer][]source.Diagnostic)
|
|
|
|
}
|
|
|
|
pkg.diagnostics[a] = diags
|
2019-06-18 16:25:22 -06:00
|
|
|
}
|
|
|
|
|
2019-06-20 14:57:45 -06:00
|
|
|
func (pkg *pkg) GetDiagnostics() []source.Diagnostic {
|
2019-06-18 16:25:22 -06:00
|
|
|
pkg.diagMu.Lock()
|
|
|
|
defer pkg.diagMu.Unlock()
|
2019-09-04 11:16:09 -06:00
|
|
|
|
|
|
|
var diags []source.Diagnostic
|
|
|
|
for _, d := range pkg.diagnostics {
|
|
|
|
diags = append(diags, d...)
|
|
|
|
}
|
|
|
|
return diags
|
2019-06-18 16:25:22 -06:00
|
|
|
}
|
2019-09-09 18:22:42 -06:00
|
|
|
|
2019-09-09 22:36:39 -06:00
|
|
|
func (p *pkg) FindFile(ctx context.Context, uri span.URI) (source.ParseGoHandle, *ast.File, source.Package, error) {
|
2019-09-09 18:22:42 -06:00
|
|
|
queue := []*pkg{p}
|
|
|
|
seen := make(map[string]bool)
|
|
|
|
|
|
|
|
for len(queue) > 0 {
|
|
|
|
pkg := queue[0]
|
|
|
|
queue = queue[1:]
|
|
|
|
seen[pkg.ID()] = true
|
|
|
|
|
|
|
|
for _, ph := range pkg.files {
|
|
|
|
if ph.File().Identity().URI == uri {
|
2019-09-09 22:36:39 -06:00
|
|
|
file, _, err := ph.Cached(ctx)
|
2019-09-09 18:22:42 -06:00
|
|
|
if file == nil {
|
|
|
|
return nil, nil, nil, err
|
|
|
|
}
|
2019-09-09 22:36:39 -06:00
|
|
|
return ph, file, pkg, nil
|
2019-09-09 18:22:42 -06:00
|
|
|
}
|
|
|
|
}
|
|
|
|
for _, dep := range pkg.imports {
|
|
|
|
if !seen[dep.ID()] {
|
|
|
|
queue = append(queue, dep)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil, nil, nil, errors.Errorf("no file for %s", uri)
|
|
|
|
}
|