mirror of
https://github.com/golang/go
synced 2024-11-18 11:24:41 -07:00
internal/lsp/debug: show per-package memory usage
Calculate and display very crude memory usage statistics. This is complicated by various levels of sharing and indirection, so the numbers should be taken with *large* grains of salt and interpreted mostly by experts. Still, the results are interesting and helpful. Change-Id: Ia9aff974c7d5fddd63df0cfd5cecc08ead33cf84 Reviewed-on: https://go-review.googlesource.com/c/tools/+/236163 Run-TryBot: Heschi Kreinick <heschi@google.com> TryBot-Result: Gobot Gobot <gobot@golang.org> Reviewed-by: Robert Findley <rfindley@google.com>
This commit is contained in:
parent
e8084ff2bf
commit
4651fa3054
92
internal/lsp/cache/cache.go
vendored
92
internal/lsp/cache/cache.go
vendored
@ -8,10 +8,14 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"crypto/sha1"
|
"crypto/sha1"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"go/ast"
|
||||||
"go/token"
|
"go/token"
|
||||||
|
"go/types"
|
||||||
|
"html/template"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
"reflect"
|
"reflect"
|
||||||
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
"sync/atomic"
|
"sync/atomic"
|
||||||
"time"
|
"time"
|
||||||
@ -155,3 +159,91 @@ var cacheIndex, sessionIndex, viewIndex int64
|
|||||||
|
|
||||||
func (c *Cache) ID() string { return c.id }
|
func (c *Cache) ID() string { return c.id }
|
||||||
func (c *Cache) MemStats() map[reflect.Type]int { return c.store.Stats() }
|
func (c *Cache) MemStats() map[reflect.Type]int { return c.store.Stats() }
|
||||||
|
|
||||||
|
type packageStat struct {
|
||||||
|
id packageID
|
||||||
|
mode source.ParseMode
|
||||||
|
file int64
|
||||||
|
ast int64
|
||||||
|
types int64
|
||||||
|
typesInfo int64
|
||||||
|
total int64
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *Cache) PackageStats() template.HTML {
|
||||||
|
var packageStats []packageStat
|
||||||
|
c.store.DebugOnlyIterate(func(k, v interface{}) {
|
||||||
|
switch k.(type) {
|
||||||
|
case packageHandleKey:
|
||||||
|
v := v.(*packageData)
|
||||||
|
stat := packageStat{
|
||||||
|
id: v.pkg.id,
|
||||||
|
mode: v.pkg.mode,
|
||||||
|
types: typesCost(v.pkg.types.Scope()),
|
||||||
|
typesInfo: typesInfoCost(v.pkg.typesInfo),
|
||||||
|
}
|
||||||
|
for _, f := range v.pkg.compiledGoFiles {
|
||||||
|
fvi := f.handle.Cached()
|
||||||
|
if fvi == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
fv := fvi.(*parseGoData)
|
||||||
|
stat.file += int64(len(fv.src))
|
||||||
|
stat.ast += astCost(fv.ast)
|
||||||
|
}
|
||||||
|
stat.total = stat.file + stat.ast + stat.types + stat.typesInfo
|
||||||
|
packageStats = append(packageStats, stat)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
var totalCost int64
|
||||||
|
for _, stat := range packageStats {
|
||||||
|
totalCost += stat.total
|
||||||
|
}
|
||||||
|
sort.Slice(packageStats, func(i, j int) bool {
|
||||||
|
return packageStats[i].total > packageStats[j].total
|
||||||
|
})
|
||||||
|
html := "<table><thead><td>Name</td><td>total = file + ast + types + types info</td></thead>\n"
|
||||||
|
human := func(n int64) string {
|
||||||
|
return fmt.Sprintf("%.2f", float64(n)/(1024*1024))
|
||||||
|
}
|
||||||
|
var printedCost int64
|
||||||
|
for _, stat := range packageStats {
|
||||||
|
html += fmt.Sprintf("<tr><td>%v (%v)</td><td>%v = %v + %v + %v + %v</td></tr>\n", stat.id, stat.mode,
|
||||||
|
human(stat.total), human(stat.file), human(stat.ast), human(stat.types), human(stat.typesInfo))
|
||||||
|
printedCost += stat.total
|
||||||
|
if float64(printedCost) > float64(totalCost)*.9 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
html += "</table>\n"
|
||||||
|
return template.HTML(html)
|
||||||
|
}
|
||||||
|
|
||||||
|
func astCost(f *ast.File) int64 {
|
||||||
|
var count int64
|
||||||
|
ast.Inspect(f, func(n ast.Node) bool {
|
||||||
|
count += 32 // nodes are pretty small.
|
||||||
|
return true
|
||||||
|
})
|
||||||
|
return count
|
||||||
|
}
|
||||||
|
|
||||||
|
func typesCost(scope *types.Scope) int64 {
|
||||||
|
cost := 64 + int64(scope.Len())*128 // types.object looks pretty big
|
||||||
|
for i := 0; i < scope.NumChildren(); i++ {
|
||||||
|
cost += typesCost(scope.Child(i))
|
||||||
|
}
|
||||||
|
return cost
|
||||||
|
}
|
||||||
|
|
||||||
|
func typesInfoCost(info *types.Info) int64 {
|
||||||
|
// Most of these refer to existing objects, with the exception of InitOrder, Selections, and Types.
|
||||||
|
cost := 24*len(info.Defs) +
|
||||||
|
32*len(info.Implicits) +
|
||||||
|
256*len(info.InitOrder) + // these are big, but there aren't many of them.
|
||||||
|
32*len(info.Scopes) +
|
||||||
|
128*len(info.Selections) + // wild guess
|
||||||
|
128*len(info.Types) + // wild guess
|
||||||
|
32*len(info.Uses)
|
||||||
|
return int64(cost)
|
||||||
|
}
|
||||||
|
@ -686,6 +686,8 @@ var cacheTmpl = template.Must(template.Must(baseTemplate.Clone()).Parse(`
|
|||||||
{{define "body"}}
|
{{define "body"}}
|
||||||
<h2>memoize.Store entries</h2>
|
<h2>memoize.Store entries</h2>
|
||||||
<ul>{{range $k,$v := .MemStats}}<li>{{$k}} - {{$v}}</li>{{end}}</ul>
|
<ul>{{range $k,$v := .MemStats}}<li>{{$k}} - {{$v}}</li>{{end}}</ul>
|
||||||
|
<h2>Per-package usage - not accurate, for guidance only</h2>
|
||||||
|
{{.PackageStats}}
|
||||||
{{end}}
|
{{end}}
|
||||||
`))
|
`))
|
||||||
|
|
||||||
|
@ -164,6 +164,27 @@ func (s *Store) Stats() map[reflect.Type]int {
|
|||||||
return result
|
return result
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// DebugOnlyIterate iterates through all live cache entries and calls f on them.
|
||||||
|
// It should only be used for debugging purposes.
|
||||||
|
func (s *Store) DebugOnlyIterate(f func(k, v interface{})) {
|
||||||
|
s.mu.Lock()
|
||||||
|
defer s.mu.Unlock()
|
||||||
|
|
||||||
|
for k, e := range s.entries {
|
||||||
|
h := (*Handle)(unsafe.Pointer(e))
|
||||||
|
var v interface{}
|
||||||
|
h.mu.Lock()
|
||||||
|
if h.state == stateCompleted {
|
||||||
|
v = h.value
|
||||||
|
}
|
||||||
|
h.mu.Unlock()
|
||||||
|
if v == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
f(k, v)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Cached returns the value associated with a handle.
|
// Cached returns the value associated with a handle.
|
||||||
//
|
//
|
||||||
// It will never cause the value to be generated.
|
// It will never cause the value to be generated.
|
||||||
|
Loading…
Reference in New Issue
Block a user