2019-03-04 15:01:39 -07:00
|
|
|
package cache
|
|
|
|
|
|
|
|
import (
|
2019-03-05 15:30:44 -07:00
|
|
|
"context"
|
2019-03-04 15:01:39 -07:00
|
|
|
"fmt"
|
|
|
|
"go/ast"
|
2019-03-04 16:01:51 -07:00
|
|
|
"go/parser"
|
2019-03-04 15:01:39 -07:00
|
|
|
"go/scanner"
|
|
|
|
"go/types"
|
|
|
|
"io/ioutil"
|
|
|
|
"log"
|
|
|
|
"os"
|
|
|
|
"path/filepath"
|
|
|
|
"strings"
|
|
|
|
"sync"
|
|
|
|
|
|
|
|
"golang.org/x/tools/go/packages"
|
|
|
|
"golang.org/x/tools/internal/lsp/source"
|
|
|
|
)
|
|
|
|
|
2019-03-05 15:30:44 -07:00
|
|
|
func (v *View) parse(ctx context.Context, uri source.URI) error {
|
2019-03-04 16:01:51 -07:00
|
|
|
v.mcache.mu.Lock()
|
|
|
|
defer v.mcache.mu.Unlock()
|
|
|
|
|
2019-03-05 15:30:44 -07:00
|
|
|
// Apply any queued-up content changes.
|
|
|
|
if err := v.applyContentChanges(ctx); err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2019-03-04 16:01:51 -07:00
|
|
|
f := v.files[uri]
|
|
|
|
|
|
|
|
// This should never happen.
|
|
|
|
if f == nil {
|
|
|
|
return fmt.Errorf("no file for %v", uri)
|
|
|
|
}
|
2019-03-05 15:30:44 -07:00
|
|
|
// If the package for the file has not been invalidated by the application
|
|
|
|
// of the pending changes, there is no need to continue.
|
|
|
|
if f.isPopulated() {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
// Check if the file's imports have changed. If they have, update the
|
|
|
|
// metadata by calling packages.Load.
|
|
|
|
if err := v.checkMetadata(ctx, f); err != nil {
|
2019-03-04 15:01:39 -07:00
|
|
|
return err
|
|
|
|
}
|
2019-03-04 16:01:51 -07:00
|
|
|
if f.meta == nil {
|
|
|
|
return fmt.Errorf("no metadata found for %v", uri)
|
2019-03-04 15:01:39 -07:00
|
|
|
}
|
2019-03-04 16:01:51 -07:00
|
|
|
// Start prefetching direct imports.
|
|
|
|
for importPath := range f.meta.children {
|
2019-03-05 15:30:44 -07:00
|
|
|
go v.Import(importPath)
|
2019-03-04 16:01:51 -07:00
|
|
|
}
|
|
|
|
// Type-check package.
|
2019-03-05 15:30:44 -07:00
|
|
|
pkg, err := v.typeCheck(f.meta.pkgPath)
|
2019-03-04 16:01:51 -07:00
|
|
|
if pkg == nil || pkg.Types == nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
// Add every file in this package to our cache.
|
|
|
|
v.cachePackage(pkg)
|
|
|
|
|
2019-03-04 15:45:01 -07:00
|
|
|
// If we still have not found the package for the file, something is wrong.
|
2019-03-04 16:01:51 -07:00
|
|
|
if f.pkg == nil {
|
2019-03-04 15:01:39 -07:00
|
|
|
return fmt.Errorf("no package found for %v", uri)
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-03-04 15:45:01 -07:00
|
|
|
func (v *View) cachePackage(pkg *packages.Package) {
|
|
|
|
for _, file := range pkg.Syntax {
|
|
|
|
// TODO: If a file is in multiple packages, which package do we store?
|
|
|
|
if !file.Pos().IsValid() {
|
|
|
|
log.Printf("invalid position for file %v", file.Name)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
tok := v.Config.Fset.File(file.Pos())
|
|
|
|
if tok == nil {
|
|
|
|
log.Printf("no token.File for %v", file.Name)
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
fURI := source.ToURI(tok.Name())
|
|
|
|
f := v.getFile(fURI)
|
|
|
|
f.token = tok
|
|
|
|
f.ast = file
|
2019-03-04 16:01:51 -07:00
|
|
|
f.imports = f.ast.Imports
|
2019-03-04 15:45:01 -07:00
|
|
|
f.pkg = pkg
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-03-05 15:30:44 -07:00
|
|
|
func (v *View) checkMetadata(ctx context.Context, f *File) error {
|
2019-03-04 16:01:51 -07:00
|
|
|
filename, err := f.URI.Filename()
|
2019-03-04 15:45:01 -07:00
|
|
|
if err != nil {
|
2019-03-05 15:30:44 -07:00
|
|
|
return err
|
2019-03-04 16:01:51 -07:00
|
|
|
}
|
2019-03-05 15:30:44 -07:00
|
|
|
if v.reparseImports(ctx, f, filename) {
|
2019-03-04 16:01:51 -07:00
|
|
|
cfg := v.Config
|
|
|
|
cfg.Mode = packages.LoadImports
|
|
|
|
pkgs, err := packages.Load(&cfg, fmt.Sprintf("file=%s", filename))
|
|
|
|
if len(pkgs) == 0 {
|
|
|
|
if err == nil {
|
|
|
|
err = fmt.Errorf("no packages found for %s", filename)
|
|
|
|
}
|
2019-03-05 15:30:44 -07:00
|
|
|
return err
|
2019-03-04 16:01:51 -07:00
|
|
|
}
|
|
|
|
for _, pkg := range pkgs {
|
|
|
|
// If the package comes back with errors from `go list`, don't bother
|
|
|
|
// type-checking it.
|
|
|
|
for _, err := range pkg.Errors {
|
|
|
|
switch err.Kind {
|
|
|
|
case packages.UnknownError, packages.ListError:
|
2019-03-05 15:30:44 -07:00
|
|
|
return err
|
2019-03-04 16:01:51 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
v.link(pkg.PkgPath, pkg, nil)
|
2019-03-04 15:45:01 -07:00
|
|
|
}
|
|
|
|
}
|
2019-03-05 15:30:44 -07:00
|
|
|
return nil
|
2019-03-04 16:01:51 -07:00
|
|
|
}
|
|
|
|
|
|
|
|
// reparseImports reparses a file's import declarations to determine if they
|
|
|
|
// have changed.
|
2019-03-05 15:30:44 -07:00
|
|
|
func (v *View) reparseImports(ctx context.Context, f *File, filename string) bool {
|
2019-03-04 16:01:51 -07:00
|
|
|
if f.meta == nil {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
// Get file content in case we don't already have it?
|
2019-03-05 15:30:44 -07:00
|
|
|
f.read(ctx)
|
2019-03-04 16:01:51 -07:00
|
|
|
parsed, _ := parser.ParseFile(v.Config.Fset, filename, f.content, parser.ImportsOnly)
|
|
|
|
if parsed == nil {
|
|
|
|
return true
|
2019-03-04 15:45:01 -07:00
|
|
|
}
|
2019-03-04 16:01:51 -07:00
|
|
|
if len(f.imports) != len(parsed.Imports) {
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
for i, importSpec := range f.imports {
|
|
|
|
if importSpec.Path.Value != f.imports[i].Path.Value {
|
|
|
|
return true
|
2019-03-04 15:45:01 -07:00
|
|
|
}
|
|
|
|
}
|
2019-03-04 16:01:51 -07:00
|
|
|
return false
|
2019-03-04 15:45:01 -07:00
|
|
|
}
|
|
|
|
|
2019-03-04 16:01:51 -07:00
|
|
|
func (v *View) link(pkgPath string, pkg *packages.Package, parent *metadata) *metadata {
|
|
|
|
m, ok := v.mcache.packages[pkgPath]
|
|
|
|
if !ok {
|
|
|
|
m = &metadata{
|
|
|
|
pkgPath: pkgPath,
|
|
|
|
id: pkg.ID,
|
|
|
|
parents: make(map[string]bool),
|
|
|
|
children: make(map[string]bool),
|
|
|
|
}
|
|
|
|
v.mcache.packages[pkgPath] = m
|
|
|
|
}
|
|
|
|
// Reset any field that could have changed across calls to packages.Load.
|
|
|
|
m.name = pkg.Name
|
|
|
|
m.files = pkg.CompiledGoFiles
|
|
|
|
for _, filename := range m.files {
|
|
|
|
if f, ok := v.files[source.ToURI(filename)]; ok {
|
|
|
|
f.meta = m
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Connect the import graph.
|
|
|
|
if parent != nil {
|
|
|
|
m.parents[parent.pkgPath] = true
|
|
|
|
parent.children[pkgPath] = true
|
2019-03-04 15:01:39 -07:00
|
|
|
}
|
|
|
|
for importPath, importPkg := range pkg.Imports {
|
2019-03-04 16:01:51 -07:00
|
|
|
if _, ok := m.children[importPath]; !ok {
|
|
|
|
v.link(importPath, importPkg, m)
|
2019-03-04 15:01:39 -07:00
|
|
|
}
|
|
|
|
}
|
2019-03-04 16:01:51 -07:00
|
|
|
// Clear out any imports that have been removed.
|
|
|
|
for importPath := range m.children {
|
|
|
|
if _, ok := pkg.Imports[importPath]; !ok {
|
|
|
|
delete(m.children, importPath)
|
|
|
|
if child, ok := v.mcache.packages[importPath]; ok {
|
|
|
|
delete(child.parents, pkgPath)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return m
|
2019-03-04 15:01:39 -07:00
|
|
|
}
|
|
|
|
|
2019-03-05 15:30:44 -07:00
|
|
|
func (v *View) Import(pkgPath string) (*types.Package, error) {
|
|
|
|
v.pcache.mu.Lock()
|
|
|
|
e, ok := v.pcache.packages[pkgPath]
|
2019-03-04 15:01:39 -07:00
|
|
|
if ok {
|
|
|
|
// cache hit
|
2019-03-05 15:30:44 -07:00
|
|
|
v.pcache.mu.Unlock()
|
2019-03-04 15:01:39 -07:00
|
|
|
// wait for entry to become ready
|
|
|
|
<-e.ready
|
|
|
|
} else {
|
|
|
|
// cache miss
|
|
|
|
e = &entry{ready: make(chan struct{})}
|
2019-03-05 15:30:44 -07:00
|
|
|
v.pcache.packages[pkgPath] = e
|
|
|
|
v.pcache.mu.Unlock()
|
2019-03-04 15:01:39 -07:00
|
|
|
|
|
|
|
// This goroutine becomes responsible for populating
|
|
|
|
// the entry and broadcasting its readiness.
|
2019-03-05 15:30:44 -07:00
|
|
|
e.pkg, e.err = v.typeCheck(pkgPath)
|
2019-03-04 15:01:39 -07:00
|
|
|
close(e.ready)
|
|
|
|
}
|
2019-03-04 15:45:01 -07:00
|
|
|
if e.err != nil {
|
|
|
|
return nil, e.err
|
|
|
|
}
|
|
|
|
return e.pkg.Types, nil
|
2019-03-04 15:01:39 -07:00
|
|
|
}
|
|
|
|
|
2019-03-05 15:30:44 -07:00
|
|
|
func (v *View) typeCheck(pkgPath string) (*packages.Package, error) {
|
|
|
|
meta, ok := v.mcache.packages[pkgPath]
|
2019-03-04 15:01:39 -07:00
|
|
|
if !ok {
|
|
|
|
return nil, fmt.Errorf("no metadata for %v", pkgPath)
|
|
|
|
}
|
2019-03-04 16:01:51 -07:00
|
|
|
// Use the default type information for the unsafe package.
|
|
|
|
var typ *types.Package
|
|
|
|
if meta.pkgPath == "unsafe" {
|
|
|
|
typ = types.Unsafe
|
|
|
|
} else {
|
|
|
|
typ = types.NewPackage(meta.pkgPath, meta.name)
|
|
|
|
}
|
|
|
|
pkg := &packages.Package{
|
|
|
|
ID: meta.id,
|
|
|
|
Name: meta.name,
|
|
|
|
PkgPath: meta.pkgPath,
|
|
|
|
CompiledGoFiles: meta.files,
|
|
|
|
Imports: make(map[string]*packages.Package),
|
2019-03-05 15:30:44 -07:00
|
|
|
Fset: v.Config.Fset,
|
2019-03-04 16:01:51 -07:00
|
|
|
Types: typ,
|
|
|
|
TypesInfo: &types.Info{
|
|
|
|
Types: make(map[ast.Expr]types.TypeAndValue),
|
|
|
|
Defs: make(map[*ast.Ident]types.Object),
|
|
|
|
Uses: make(map[*ast.Ident]types.Object),
|
|
|
|
Implicits: make(map[ast.Node]types.Object),
|
|
|
|
Selections: make(map[*ast.SelectorExpr]*types.Selection),
|
|
|
|
Scopes: make(map[ast.Node]*types.Scope),
|
|
|
|
},
|
|
|
|
// TODO(rstambler): Get real TypeSizes from go/packages (golang.org/issues/30139).
|
|
|
|
TypesSizes: &types.StdSizes{},
|
|
|
|
}
|
2019-03-04 15:01:39 -07:00
|
|
|
appendError := func(err error) {
|
2019-03-05 15:30:44 -07:00
|
|
|
v.appendPkgError(pkg, err)
|
2019-03-04 15:01:39 -07:00
|
|
|
}
|
2019-03-05 15:30:44 -07:00
|
|
|
files, errs := v.parseFiles(meta.files)
|
2019-03-04 15:01:39 -07:00
|
|
|
for _, err := range errs {
|
|
|
|
appendError(err)
|
|
|
|
}
|
|
|
|
pkg.Syntax = files
|
|
|
|
cfg := &types.Config{
|
|
|
|
Error: appendError,
|
2019-03-05 15:30:44 -07:00
|
|
|
Importer: v,
|
2019-03-04 15:01:39 -07:00
|
|
|
}
|
2019-03-05 15:30:44 -07:00
|
|
|
check := types.NewChecker(cfg, v.Config.Fset, pkg.Types, pkg.TypesInfo)
|
2019-03-04 15:01:39 -07:00
|
|
|
check.Files(pkg.Syntax)
|
|
|
|
|
2019-03-05 15:30:44 -07:00
|
|
|
// Set imports of package to correspond to cached packages. This is
|
|
|
|
// necessary for go/analysis, but once we merge its approach with the
|
|
|
|
// current caching system, we can eliminate this.
|
|
|
|
v.pcache.mu.Lock()
|
|
|
|
for importPath := range meta.children {
|
|
|
|
if importEntry, ok := v.pcache.packages[importPath]; ok {
|
|
|
|
pkg.Imports[importPath] = importEntry.pkg
|
|
|
|
}
|
|
|
|
}
|
|
|
|
v.pcache.mu.Unlock()
|
|
|
|
|
2019-03-04 15:45:01 -07:00
|
|
|
return pkg, nil
|
2019-03-04 15:01:39 -07:00
|
|
|
}
|
|
|
|
|
2019-03-05 15:30:44 -07:00
|
|
|
func (v *View) appendPkgError(pkg *packages.Package, err error) {
|
2019-03-04 15:01:39 -07:00
|
|
|
if err == nil {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
var errs []packages.Error
|
|
|
|
switch err := err.(type) {
|
|
|
|
case *scanner.Error:
|
|
|
|
errs = append(errs, packages.Error{
|
|
|
|
Pos: err.Pos.String(),
|
|
|
|
Msg: err.Msg,
|
|
|
|
Kind: packages.ParseError,
|
|
|
|
})
|
|
|
|
case scanner.ErrorList:
|
|
|
|
// The first parser error is likely the root cause of the problem.
|
|
|
|
if err.Len() > 0 {
|
|
|
|
errs = append(errs, packages.Error{
|
|
|
|
Pos: err[0].Pos.String(),
|
|
|
|
Msg: err[0].Msg,
|
|
|
|
Kind: packages.ParseError,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
case types.Error:
|
|
|
|
errs = append(errs, packages.Error{
|
2019-03-05 15:30:44 -07:00
|
|
|
Pos: v.Config.Fset.Position(err.Pos).String(),
|
2019-03-04 15:01:39 -07:00
|
|
|
Msg: err.Msg,
|
|
|
|
Kind: packages.TypeError,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
pkg.Errors = append(pkg.Errors, errs...)
|
|
|
|
}
|
|
|
|
|
|
|
|
// We use a counting semaphore to limit
|
|
|
|
// the number of parallel I/O calls per process.
|
|
|
|
var ioLimit = make(chan bool, 20)
|
|
|
|
|
|
|
|
// parseFiles reads and parses the Go source files and returns the ASTs
|
|
|
|
// of the ones that could be at least partially parsed, along with a
|
|
|
|
// list of I/O and parse errors encountered.
|
|
|
|
//
|
|
|
|
// Because files are scanned in parallel, the token.Pos
|
|
|
|
// positions of the resulting ast.Files are not ordered.
|
|
|
|
//
|
2019-03-04 16:01:51 -07:00
|
|
|
func (v *View) parseFiles(filenames []string) ([]*ast.File, []error) {
|
2019-03-04 15:01:39 -07:00
|
|
|
var wg sync.WaitGroup
|
|
|
|
n := len(filenames)
|
|
|
|
parsed := make([]*ast.File, n)
|
|
|
|
errors := make([]error, n)
|
|
|
|
for i, filename := range filenames {
|
2019-03-04 16:01:51 -07:00
|
|
|
if v.Config.Context.Err() != nil {
|
2019-03-04 15:01:39 -07:00
|
|
|
parsed[i] = nil
|
2019-03-04 16:01:51 -07:00
|
|
|
errors[i] = v.Config.Context.Err()
|
2019-03-04 15:01:39 -07:00
|
|
|
continue
|
|
|
|
}
|
|
|
|
|
|
|
|
// First, check if we have already cached an AST for this file.
|
2019-03-04 16:01:51 -07:00
|
|
|
f := v.files[source.ToURI(filename)]
|
2019-03-04 15:01:39 -07:00
|
|
|
var fAST *ast.File
|
|
|
|
if f != nil {
|
|
|
|
fAST = f.ast
|
|
|
|
}
|
|
|
|
|
|
|
|
wg.Add(1)
|
|
|
|
go func(i int, filename string) {
|
|
|
|
ioLimit <- true // wait
|
|
|
|
|
|
|
|
if fAST != nil {
|
|
|
|
parsed[i], errors[i] = fAST, nil
|
|
|
|
} else {
|
|
|
|
// We don't have a cached AST for this file.
|
|
|
|
var src []byte
|
|
|
|
// Check for an available overlay.
|
2019-03-04 16:01:51 -07:00
|
|
|
for f, contents := range v.Config.Overlay {
|
2019-03-04 15:01:39 -07:00
|
|
|
if sameFile(f, filename) {
|
|
|
|
src = contents
|
|
|
|
}
|
|
|
|
}
|
|
|
|
var err error
|
|
|
|
// We don't have an overlay, so we must read the file's contents.
|
|
|
|
if src == nil {
|
|
|
|
src, err = ioutil.ReadFile(filename)
|
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
parsed[i], errors[i] = nil, err
|
|
|
|
} else {
|
|
|
|
// ParseFile may return both an AST and an error.
|
2019-03-04 16:01:51 -07:00
|
|
|
parsed[i], errors[i] = v.Config.ParseFile(v.Config.Fset, filename, src)
|
2019-03-04 15:01:39 -07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
<-ioLimit // signal
|
|
|
|
wg.Done()
|
|
|
|
}(i, filename)
|
|
|
|
}
|
|
|
|
wg.Wait()
|
|
|
|
|
|
|
|
// Eliminate nils, preserving order.
|
|
|
|
var o int
|
|
|
|
for _, f := range parsed {
|
|
|
|
if f != nil {
|
|
|
|
parsed[o] = f
|
|
|
|
o++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
parsed = parsed[:o]
|
|
|
|
|
|
|
|
o = 0
|
|
|
|
for _, err := range errors {
|
|
|
|
if err != nil {
|
|
|
|
errors[o] = err
|
|
|
|
o++
|
|
|
|
}
|
|
|
|
}
|
|
|
|
errors = errors[:o]
|
|
|
|
|
|
|
|
return parsed, errors
|
|
|
|
}
|
|
|
|
|
|
|
|
// sameFile returns true if x and y have the same basename and denote
|
|
|
|
// the same file.
|
|
|
|
//
|
|
|
|
func sameFile(x, y string) bool {
|
|
|
|
if x == y {
|
|
|
|
// It could be the case that y doesn't exist.
|
|
|
|
// For instance, it may be an overlay file that
|
|
|
|
// hasn't been written to disk. To handle that case
|
|
|
|
// let x == y through. (We added the exact absolute path
|
|
|
|
// string to the CompiledGoFiles list, so the unwritten
|
|
|
|
// overlay case implies x==y.)
|
|
|
|
return true
|
|
|
|
}
|
|
|
|
if strings.EqualFold(filepath.Base(x), filepath.Base(y)) { // (optimisation)
|
|
|
|
if xi, err := os.Stat(x); err == nil {
|
|
|
|
if yi, err := os.Stat(y); err == nil {
|
|
|
|
return os.SameFile(xi, yi)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|