2018-11-07 10:58:55 -07:00
|
|
|
// Copyright 2018 The Go Authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
2018-12-05 15:00:36 -07:00
|
|
|
// Package source provides core features for use by Go editors and tools.
|
2018-11-07 10:58:55 -07:00
|
|
|
package source
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"context"
|
2019-11-05 15:33:19 -07:00
|
|
|
"go/ast"
|
2018-11-07 10:58:55 -07:00
|
|
|
"go/format"
|
2019-11-05 15:33:19 -07:00
|
|
|
"go/parser"
|
2019-11-14 11:10:28 -07:00
|
|
|
"go/scanner"
|
2019-11-05 15:33:19 -07:00
|
|
|
"go/token"
|
2018-11-14 18:42:30 -07:00
|
|
|
|
2019-06-28 14:21:07 -06:00
|
|
|
"golang.org/x/tools/internal/imports"
|
2019-01-17 09:59:05 -07:00
|
|
|
"golang.org/x/tools/internal/lsp/diff"
|
2019-08-16 15:05:40 -06:00
|
|
|
"golang.org/x/tools/internal/lsp/protocol"
|
2019-02-19 19:11:15 -07:00
|
|
|
"golang.org/x/tools/internal/span"
|
2019-08-13 13:07:39 -06:00
|
|
|
"golang.org/x/tools/internal/telemetry/trace"
|
2019-08-06 13:13:11 -06:00
|
|
|
errors "golang.org/x/xerrors"
|
2018-11-07 10:58:55 -07:00
|
|
|
)
|
|
|
|
|
2018-12-14 15:00:24 -07:00
|
|
|
// Format formats a file with a given range.
|
2019-09-05 14:58:50 -06:00
|
|
|
func Format(ctx context.Context, view View, f File) ([]protocol.TextEdit, error) {
|
2019-06-26 20:46:12 -06:00
|
|
|
ctx, done := trace.StartSpan(ctx, "source.Format")
|
|
|
|
defer done()
|
2019-07-11 19:05:55 -06:00
|
|
|
|
2019-09-27 11:17:59 -06:00
|
|
|
snapshot, cphs, err := view.CheckPackageHandles(ctx, f)
|
2019-09-09 17:26:26 -06:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2019-10-10 13:22:30 -06:00
|
|
|
cph, err := NarrowestCheckPackageHandle(cphs)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2019-09-09 17:26:26 -06:00
|
|
|
pkg, err := cph.Check(ctx)
|
2019-07-09 15:52:23 -06:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2019-09-17 09:19:11 -06:00
|
|
|
ph, err := pkg.File(f.URI())
|
|
|
|
if err != nil {
|
2019-09-09 22:36:39 -06:00
|
|
|
return nil, err
|
|
|
|
}
|
2019-09-18 17:27:47 -06:00
|
|
|
// Be extra careful that the file's ParseMode is correct,
|
|
|
|
// otherwise we might replace the user's code with a trimmed AST.
|
|
|
|
if ph.Mode() != ParseFull {
|
|
|
|
return nil, errors.Errorf("%s was parsed in the incorrect mode", ph.File().Identity().URI)
|
|
|
|
}
|
2019-09-17 09:19:11 -06:00
|
|
|
file, m, _, err := ph.Parse(ctx)
|
|
|
|
if err != nil {
|
2019-09-06 21:58:07 -06:00
|
|
|
return nil, err
|
|
|
|
}
|
2019-10-20 17:57:03 -06:00
|
|
|
if hasListErrors(pkg) || hasParseErrors(pkg, f.URI()) {
|
2019-07-26 16:17:04 -06:00
|
|
|
// Even if this package has list or parse errors, this file may not
|
|
|
|
// have any parse errors and can still be formatted. Using format.Node
|
|
|
|
// on an ast with errors may result in code being added or removed.
|
|
|
|
// Attempt to format the source of this file instead.
|
2019-09-27 11:17:59 -06:00
|
|
|
formatted, err := formatSource(ctx, snapshot, f)
|
2019-07-26 16:17:04 -06:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2019-10-01 16:06:10 -06:00
|
|
|
return computeTextEdits(ctx, view, ph.File(), m, string(formatted))
|
2018-11-14 18:42:30 -07:00
|
|
|
}
|
2019-06-28 14:21:07 -06:00
|
|
|
|
2019-09-06 21:58:07 -06:00
|
|
|
fset := view.Session().Cache().FileSet()
|
2019-06-28 14:21:07 -06:00
|
|
|
buf := &bytes.Buffer{}
|
|
|
|
|
2018-11-07 10:58:55 -07:00
|
|
|
// format.Node changes slightly from one release to another, so the version
|
|
|
|
// of Go used to build the LSP server will determine how it formats code.
|
|
|
|
// This should be acceptable for all users, who likely be prompted to rebuild
|
|
|
|
// the LSP server on each Go release.
|
2019-09-05 14:58:50 -06:00
|
|
|
if err := format.Node(buf, fset, file); err != nil {
|
2018-11-07 10:58:55 -07:00
|
|
|
return nil, err
|
|
|
|
}
|
2019-10-01 16:06:10 -06:00
|
|
|
return computeTextEdits(ctx, view, ph.File(), m, buf.String())
|
2018-12-14 15:00:24 -07:00
|
|
|
}
|
|
|
|
|
2019-09-27 11:17:59 -06:00
|
|
|
func formatSource(ctx context.Context, s Snapshot, f File) ([]byte, error) {
|
2019-07-26 16:17:04 -06:00
|
|
|
ctx, done := trace.StartSpan(ctx, "source.formatSource")
|
|
|
|
defer done()
|
2019-09-27 11:17:59 -06:00
|
|
|
|
|
|
|
data, _, err := s.Handle(ctx, f).Read(ctx)
|
2019-07-26 16:17:04 -06:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
return format.Source(data)
|
|
|
|
}
|
|
|
|
|
2019-11-05 15:33:19 -07:00
|
|
|
type ImportFix struct {
|
|
|
|
Fix *imports.ImportFix
|
|
|
|
Edits []protocol.TextEdit
|
|
|
|
}
|
|
|
|
|
|
|
|
// AllImportsFixes formats f for each possible fix to the imports.
|
|
|
|
// In addition to returning the result of applying all edits,
|
|
|
|
// it returns a list of fixes that could be applied to the file, with the
|
|
|
|
// corresponding TextEdits that would be needed to apply that fix.
|
|
|
|
func AllImportsFixes(ctx context.Context, view View, f File) (allFixEdits []protocol.TextEdit, editsPerFix []*ImportFix, err error) {
|
|
|
|
ctx, done := trace.StartSpan(ctx, "source.AllImportsFixes")
|
2019-06-26 20:46:12 -06:00
|
|
|
defer done()
|
2019-09-09 17:26:26 -06:00
|
|
|
|
2019-09-27 11:17:59 -06:00
|
|
|
_, cphs, err := view.CheckPackageHandles(ctx, f)
|
2019-09-09 17:26:26 -06:00
|
|
|
if err != nil {
|
2019-11-05 15:33:19 -07:00
|
|
|
return nil, nil, err
|
2019-09-09 17:26:26 -06:00
|
|
|
}
|
2019-10-10 13:22:30 -06:00
|
|
|
cph, err := NarrowestCheckPackageHandle(cphs)
|
|
|
|
if err != nil {
|
2019-11-05 15:33:19 -07:00
|
|
|
return nil, nil, err
|
2019-10-10 13:22:30 -06:00
|
|
|
}
|
2019-09-09 17:26:26 -06:00
|
|
|
pkg, err := cph.Check(ctx)
|
2019-07-09 15:52:23 -06:00
|
|
|
if err != nil {
|
2019-11-05 15:33:19 -07:00
|
|
|
return nil, nil, err
|
2019-06-28 17:59:25 -06:00
|
|
|
}
|
2019-10-20 17:57:03 -06:00
|
|
|
if hasListErrors(pkg) {
|
2019-11-05 15:33:19 -07:00
|
|
|
return nil, nil, errors.Errorf("%s has list errors, not running goimports", f.URI())
|
2019-05-20 13:23:02 -06:00
|
|
|
}
|
2019-11-05 15:33:19 -07:00
|
|
|
var ph ParseGoHandle
|
|
|
|
for _, h := range pkg.Files() {
|
|
|
|
if h.File().Identity().URI == f.URI() {
|
|
|
|
ph = h
|
|
|
|
}
|
2019-09-09 22:36:39 -06:00
|
|
|
}
|
2019-11-05 15:33:19 -07:00
|
|
|
if ph == nil {
|
|
|
|
return nil, nil, errors.Errorf("no ParseGoHandle for %s", f.URI())
|
2019-09-18 17:27:47 -06:00
|
|
|
}
|
2019-11-05 15:33:19 -07:00
|
|
|
|
2019-06-28 14:21:07 -06:00
|
|
|
options := &imports.Options{
|
|
|
|
// Defaults.
|
|
|
|
AllErrors: true,
|
|
|
|
Comments: true,
|
|
|
|
Fragment: true,
|
|
|
|
FormatOnly: false,
|
|
|
|
TabIndent: true,
|
|
|
|
TabWidth: 8,
|
|
|
|
}
|
2019-11-05 15:33:19 -07:00
|
|
|
err = view.RunProcessEnvFunc(ctx, func(opts *imports.Options) error {
|
|
|
|
allFixEdits, editsPerFix, err = computeImportEdits(ctx, view, ph, opts)
|
2019-07-12 16:54:06 -06:00
|
|
|
return err
|
2019-11-05 15:33:19 -07:00
|
|
|
}, options)
|
2019-09-17 09:19:11 -06:00
|
|
|
if err != nil {
|
2019-11-05 15:33:19 -07:00
|
|
|
return nil, nil, err
|
2019-09-09 22:36:39 -06:00
|
|
|
}
|
2018-12-14 15:00:24 -07:00
|
|
|
|
2019-11-05 15:33:19 -07:00
|
|
|
return allFixEdits, editsPerFix, nil
|
2019-07-30 12:00:02 -06:00
|
|
|
}
|
|
|
|
|
2019-11-05 15:33:19 -07:00
|
|
|
// computeImportEdits computes a set of edits that perform one or all of the
|
|
|
|
// necessary import fixes.
|
|
|
|
func computeImportEdits(ctx context.Context, view View, ph ParseGoHandle, options *imports.Options) (allFixEdits []protocol.TextEdit, editsPerFix []*ImportFix, err error) {
|
|
|
|
filename := ph.File().Identity().URI.Filename()
|
2019-09-05 14:58:50 -06:00
|
|
|
|
2019-11-05 15:33:19 -07:00
|
|
|
// Build up basic information about the original file.
|
|
|
|
origData, _, err := ph.File().Read(ctx)
|
2019-09-09 17:26:26 -06:00
|
|
|
if err != nil {
|
|
|
|
return nil, nil, err
|
|
|
|
}
|
2019-11-05 15:33:19 -07:00
|
|
|
origAST, origMapper, _, err := ph.Parse(ctx)
|
2019-07-09 15:52:23 -06:00
|
|
|
if err != nil {
|
|
|
|
return nil, nil, err
|
2019-07-30 12:00:02 -06:00
|
|
|
}
|
2019-11-05 15:33:19 -07:00
|
|
|
origImports, origImportOffset := trimToImports(view.Session().Cache().FileSet(), origAST, origData)
|
|
|
|
|
|
|
|
allFixes, err := imports.FixImports(filename, origData, options)
|
2019-07-30 12:00:02 -06:00
|
|
|
if err != nil {
|
|
|
|
return nil, nil, err
|
|
|
|
}
|
|
|
|
|
2019-11-12 14:58:00 -07:00
|
|
|
allFixEdits, err = computeFixEdits(view, ph, options, origData, origAST, origMapper, origImports, origImportOffset, allFixes)
|
2019-11-05 15:33:19 -07:00
|
|
|
if err != nil {
|
|
|
|
return nil, nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// Apply all of the import fixes to the file.
|
|
|
|
// Add the edits for each fix to the result.
|
|
|
|
for _, fix := range allFixes {
|
2019-11-12 14:58:00 -07:00
|
|
|
edits, err := computeFixEdits(view, ph, options, origData, origAST, origMapper, origImports, origImportOffset, []*imports.ImportFix{fix})
|
2019-11-05 15:33:19 -07:00
|
|
|
if err != nil {
|
|
|
|
return nil, nil, err
|
|
|
|
}
|
|
|
|
editsPerFix = append(editsPerFix, &ImportFix{
|
|
|
|
Fix: fix,
|
|
|
|
Edits: edits,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
return allFixEdits, editsPerFix, nil
|
|
|
|
}
|
|
|
|
|
2019-11-12 14:58:00 -07:00
|
|
|
func computeOneImportFixEdits(ctx context.Context, view View, ph ParseGoHandle, fix *imports.ImportFix) ([]protocol.TextEdit, error) {
|
|
|
|
origData, _, err := ph.File().Read(ctx)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
origAST, origMapper, _, err := ph.Parse(ctx)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
origImports, origImportOffset := trimToImports(view.Session().Cache().FileSet(), origAST, origData)
|
|
|
|
|
|
|
|
options := &imports.Options{
|
|
|
|
// Defaults.
|
|
|
|
AllErrors: true,
|
|
|
|
Comments: true,
|
|
|
|
Fragment: true,
|
|
|
|
FormatOnly: false,
|
|
|
|
TabIndent: true,
|
|
|
|
TabWidth: 8,
|
|
|
|
}
|
|
|
|
return computeFixEdits(view, ph, options, origData, origAST, origMapper, origImports, origImportOffset, []*imports.ImportFix{fix})
|
|
|
|
}
|
|
|
|
|
|
|
|
func computeFixEdits(view View, ph ParseGoHandle, options *imports.Options, origData []byte, origAST *ast.File, origMapper *protocol.ColumnMapper, origImports []byte, origImportOffset int, fixes []*imports.ImportFix) ([]protocol.TextEdit, error) {
|
|
|
|
filename := ph.File().Identity().URI.Filename()
|
|
|
|
// Apply the fixes and re-parse the file so that we can locate the
|
|
|
|
// new imports.
|
|
|
|
fixedData, err := imports.ApplyFixes(fixes, filename, origData, options)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
fixedFset := token.NewFileSet()
|
|
|
|
fixedAST, err := parser.ParseFile(fixedFset, filename, fixedData, parser.ImportsOnly)
|
2019-11-14 11:10:28 -07:00
|
|
|
// Any error here prevents us from computing the edits.
|
2019-11-12 14:58:00 -07:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
fixedImports, fixedImportsOffset := trimToImports(fixedFset, fixedAST, fixedData)
|
|
|
|
|
|
|
|
// Prepare the diff. If both sides had import statements, we can diff
|
|
|
|
// just those sections against each other, then shift the resulting
|
|
|
|
// edits to the right lines in the original file.
|
|
|
|
left, right := origImports, fixedImports
|
|
|
|
converter := span.NewContentConverter(filename, origImports)
|
|
|
|
offset := origImportOffset
|
|
|
|
|
|
|
|
// If one side or the other has no imports, we won't know where to
|
|
|
|
// anchor the diffs. Instead, use the beginning of the file, up to its
|
|
|
|
// first non-imports decl. We know the imports code will insert
|
|
|
|
// somewhere before that.
|
|
|
|
if origImportOffset == 0 || fixedImportsOffset == 0 {
|
2019-11-14 11:10:28 -07:00
|
|
|
left, _ = trimToFirstNonImport(view.Session().Cache().FileSet(), origAST, origData, nil)
|
2019-11-12 14:58:00 -07:00
|
|
|
// We need the whole AST here, not just the ImportsOnly AST we parsed above.
|
|
|
|
fixedAST, err = parser.ParseFile(fixedFset, filename, fixedData, 0)
|
2019-11-14 11:10:28 -07:00
|
|
|
if fixedAST == nil {
|
2019-11-12 14:58:00 -07:00
|
|
|
return nil, err
|
|
|
|
}
|
2019-11-14 11:10:28 -07:00
|
|
|
var ok bool
|
|
|
|
right, ok = trimToFirstNonImport(fixedFset, fixedAST, fixedData, err)
|
|
|
|
if !ok {
|
|
|
|
return nil, errors.Errorf("error %v detected in the import block", err)
|
|
|
|
}
|
2019-11-12 14:58:00 -07:00
|
|
|
// We're now working with a prefix of the original file, so we can
|
|
|
|
// use the original converter, and there is no offset on the edits.
|
|
|
|
converter = origMapper.Converter
|
|
|
|
offset = 0
|
|
|
|
}
|
|
|
|
|
|
|
|
// Perform the diff and adjust the results for the trimming, if any.
|
|
|
|
edits := view.Options().ComputeEdits(ph.File().Identity().URI, string(left), string(right))
|
|
|
|
for i := range edits {
|
|
|
|
s, err := edits[i].Span.WithPosition(converter)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
start := span.NewPoint(s.Start().Line()+offset, s.Start().Column(), -1)
|
|
|
|
end := span.NewPoint(s.End().Line()+offset, s.End().Column(), -1)
|
|
|
|
edits[i].Span = span.New(s.URI(), start, end)
|
|
|
|
}
|
|
|
|
return ToProtocolEdits(origMapper, edits)
|
|
|
|
}
|
|
|
|
|
2019-11-05 15:33:19 -07:00
|
|
|
// trimToImports returns a section of the source file that covers all of the
|
|
|
|
// import declarations, and the line offset into the file that section starts at.
|
|
|
|
func trimToImports(fset *token.FileSet, f *ast.File, src []byte) ([]byte, int) {
|
|
|
|
var firstImport, lastImport ast.Decl
|
|
|
|
for _, decl := range f.Decls {
|
|
|
|
if gen, ok := decl.(*ast.GenDecl); ok && gen.Tok == token.IMPORT {
|
|
|
|
if firstImport == nil {
|
|
|
|
firstImport = decl
|
|
|
|
}
|
|
|
|
lastImport = decl
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if firstImport == nil {
|
|
|
|
return nil, 0
|
|
|
|
}
|
|
|
|
start := firstImport.Pos()
|
|
|
|
end := fset.File(f.Pos()).LineStart(fset.Position(lastImport.End()).Line + 1)
|
|
|
|
startLineOffset := fset.Position(start).Line - 1 // lines are 1-indexed.
|
|
|
|
return src[fset.Position(firstImport.Pos()).Offset:fset.Position(end).Offset], startLineOffset
|
|
|
|
}
|
|
|
|
|
|
|
|
// trimToFirstNonImport returns src from the beginning to the first non-import
|
|
|
|
// declaration, or the end of the file if there is no such decl.
|
2019-11-14 11:10:28 -07:00
|
|
|
func trimToFirstNonImport(fset *token.FileSet, f *ast.File, src []byte, err error) ([]byte, bool) {
|
2019-11-05 15:33:19 -07:00
|
|
|
var firstDecl ast.Decl
|
|
|
|
for _, decl := range f.Decls {
|
|
|
|
if gen, ok := decl.(*ast.GenDecl); ok && gen.Tok == token.IMPORT {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
firstDecl = decl
|
|
|
|
break
|
|
|
|
}
|
2019-11-14 11:10:28 -07:00
|
|
|
tok := fset.File(f.Pos())
|
|
|
|
if tok == nil {
|
|
|
|
return nil, false
|
|
|
|
}
|
2019-11-05 15:33:19 -07:00
|
|
|
end := f.End()
|
|
|
|
if firstDecl != nil {
|
2019-11-14 11:10:28 -07:00
|
|
|
end = tok.LineStart(fset.Position(firstDecl.Pos()).Line - 1)
|
|
|
|
}
|
|
|
|
// Any errors in the file must be after the part of the file that we care about.
|
|
|
|
switch err := err.(type) {
|
|
|
|
case *scanner.Error:
|
|
|
|
pos := tok.Pos(err.Pos.Offset)
|
|
|
|
if pos <= end {
|
|
|
|
return nil, false
|
|
|
|
}
|
|
|
|
case scanner.ErrorList:
|
|
|
|
if err.Len() > 0 {
|
|
|
|
pos := tok.Pos(err[0].Pos.Offset)
|
|
|
|
if pos <= end {
|
|
|
|
return nil, false
|
|
|
|
}
|
|
|
|
}
|
2019-11-05 15:33:19 -07:00
|
|
|
}
|
2019-11-14 11:10:28 -07:00
|
|
|
return src[0:fset.Position(end).Offset], true
|
2019-07-30 12:00:02 -06:00
|
|
|
}
|
|
|
|
|
2019-11-01 11:50:21 -06:00
|
|
|
// CandidateImports returns every import that could be added to filename.
|
|
|
|
func CandidateImports(ctx context.Context, view View, filename string) ([]imports.ImportFix, error) {
|
2019-08-14 15:25:47 -06:00
|
|
|
ctx, done := trace.StartSpan(ctx, "source.CandidateImports")
|
|
|
|
defer done()
|
|
|
|
|
|
|
|
options := &imports.Options{
|
|
|
|
// Defaults.
|
|
|
|
AllErrors: true,
|
|
|
|
Comments: true,
|
|
|
|
Fragment: true,
|
|
|
|
FormatOnly: false,
|
|
|
|
TabIndent: true,
|
|
|
|
TabWidth: 8,
|
|
|
|
}
|
2019-11-01 11:50:21 -06:00
|
|
|
|
|
|
|
var imps []imports.ImportFix
|
2019-08-14 15:25:47 -06:00
|
|
|
importFn := func(opts *imports.Options) error {
|
2019-11-01 11:50:21 -06:00
|
|
|
var err error
|
|
|
|
imps, err = imports.GetAllCandidates(filename, opts)
|
2019-08-14 15:25:47 -06:00
|
|
|
return err
|
|
|
|
}
|
2019-11-01 11:50:21 -06:00
|
|
|
err := view.RunProcessEnvFunc(ctx, importFn, options)
|
|
|
|
return imps, err
|
|
|
|
}
|
|
|
|
|
|
|
|
// PackageExports returns all the packages named pkg that could be imported by
|
|
|
|
// filename, and their exports.
|
|
|
|
func PackageExports(ctx context.Context, view View, pkg, filename string) ([]imports.PackageExport, error) {
|
|
|
|
ctx, done := trace.StartSpan(ctx, "source.PackageExports")
|
|
|
|
defer done()
|
|
|
|
|
|
|
|
options := &imports.Options{
|
|
|
|
// Defaults.
|
|
|
|
AllErrors: true,
|
|
|
|
Comments: true,
|
|
|
|
Fragment: true,
|
|
|
|
FormatOnly: false,
|
|
|
|
TabIndent: true,
|
|
|
|
TabWidth: 8,
|
2019-08-14 15:25:47 -06:00
|
|
|
}
|
|
|
|
|
2019-11-01 11:50:21 -06:00
|
|
|
var pkgs []imports.PackageExport
|
|
|
|
importFn := func(opts *imports.Options) error {
|
|
|
|
var err error
|
|
|
|
pkgs, err = imports.GetPackageExports(pkg, filename, opts)
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
err := view.RunProcessEnvFunc(ctx, importFn, options)
|
|
|
|
return pkgs, err
|
2019-08-14 15:25:47 -06:00
|
|
|
}
|
|
|
|
|
2019-08-02 17:54:15 -06:00
|
|
|
// hasParseErrors returns true if the given file has parse errors.
|
|
|
|
func hasParseErrors(pkg Package, uri span.URI) bool {
|
|
|
|
for _, err := range pkg.GetErrors() {
|
2019-10-21 15:25:09 -06:00
|
|
|
if err.URI == uri && err.Kind == ParseError {
|
2019-06-28 14:37:54 -06:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2019-10-20 17:57:03 -06:00
|
|
|
func hasListErrors(pkg Package) bool {
|
|
|
|
for _, err := range pkg.GetErrors() {
|
2019-10-21 15:25:09 -06:00
|
|
|
if err.Kind == ListError {
|
2019-06-28 14:37:54 -06:00
|
|
|
return true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return false
|
|
|
|
}
|
|
|
|
|
2019-10-01 16:06:10 -06:00
|
|
|
func computeTextEdits(ctx context.Context, view View, fh FileHandle, m *protocol.ColumnMapper, formatted string) ([]protocol.TextEdit, error) {
|
2019-06-26 20:46:12 -06:00
|
|
|
ctx, done := trace.StartSpan(ctx, "source.computeTextEdits")
|
|
|
|
defer done()
|
2019-09-05 14:58:50 -06:00
|
|
|
|
2019-09-09 22:36:39 -06:00
|
|
|
data, _, err := fh.Read(ctx)
|
2019-06-03 23:04:18 -06:00
|
|
|
if err != nil {
|
2019-09-05 14:58:50 -06:00
|
|
|
return nil, err
|
2019-05-17 10:15:22 -06:00
|
|
|
}
|
2019-10-01 16:06:10 -06:00
|
|
|
edits := view.Options().ComputeEdits(fh.Identity().URI, string(data), formatted)
|
2019-09-05 14:58:50 -06:00
|
|
|
return ToProtocolEdits(m, edits)
|
2018-11-07 10:58:55 -07:00
|
|
|
}
|
2019-08-16 15:05:40 -06:00
|
|
|
|
|
|
|
func ToProtocolEdits(m *protocol.ColumnMapper, edits []diff.TextEdit) ([]protocol.TextEdit, error) {
|
|
|
|
if edits == nil {
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
result := make([]protocol.TextEdit, len(edits))
|
|
|
|
for i, edit := range edits {
|
|
|
|
rng, err := m.Range(edit.Span)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
result[i] = protocol.TextEdit{
|
|
|
|
Range: rng,
|
|
|
|
NewText: edit.NewText,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return result, nil
|
|
|
|
}
|
2019-09-05 14:58:50 -06:00
|
|
|
|
|
|
|
func FromProtocolEdits(m *protocol.ColumnMapper, edits []protocol.TextEdit) ([]diff.TextEdit, error) {
|
|
|
|
if edits == nil {
|
|
|
|
return nil, nil
|
|
|
|
}
|
|
|
|
result := make([]diff.TextEdit, len(edits))
|
|
|
|
for i, edit := range edits {
|
|
|
|
spn, err := m.RangeSpan(edit.Range)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
result[i] = diff.TextEdit{
|
|
|
|
Span: spn,
|
|
|
|
NewText: edit.NewText,
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return result, nil
|
|
|
|
}
|