mirror of
https://github.com/golang/go
synced 2024-11-23 19:50:06 -07:00
[dev.unified] cmd/compile: implement simple inline body pruning heuristic
An important optimization in the existing export data format is the pruning of unreachable inline bodies. That is, when re-exporting transitively imported types, omitting the inline bodies for methods that can't actually be needed due to importing that package. The existing logic (implemented in typecheck/crawler.go) is fairly sophisticated, but also relies on actually expanding inline bodies in the process, which is undesirable. However, including all inline bodies is also prohibitive for testing GOEXPERIMENT=unified against very large Go code bases that impose size limits on build action inputs. As a short-term solution, this CL implements a simple heuristic for GOEXPERIMENT=unified: include the inline bodies for all locally-declared functions/methods, and for any imported functions/methods that were inlined into this package. Change-Id: I686964a0cd9262b77d3d5587f89cfbcfe8b2e521 Reviewed-on: https://go-review.googlesource.com/c/go/+/419675 Run-TryBot: Matthew Dempsky <mdempsky@google.com> TryBot-Result: Gopher Robot <gobot@golang.org> Reviewed-by: David Chase <drchase@google.com>
This commit is contained in:
parent
f2851c67fd
commit
f995946094
@ -241,7 +241,7 @@ func readImportFile(path string, target *ir.Package, env *types2.Context, packag
|
|||||||
pr := pkgbits.NewPkgDecoder(pkg1.Path, data)
|
pr := pkgbits.NewPkgDecoder(pkg1.Path, data)
|
||||||
|
|
||||||
// Read package descriptors for both types2 and compiler backend.
|
// Read package descriptors for both types2 and compiler backend.
|
||||||
readPackage(newPkgReader(pr), pkg1)
|
readPackage(newPkgReader(pr), pkg1, false)
|
||||||
pkg2 = importer.ReadPackage(env, packages, pr)
|
pkg2 = importer.ReadPackage(env, packages, pr)
|
||||||
|
|
||||||
case 'i':
|
case 'i':
|
||||||
|
@ -38,8 +38,9 @@ import (
|
|||||||
type linker struct {
|
type linker struct {
|
||||||
pw pkgbits.PkgEncoder
|
pw pkgbits.PkgEncoder
|
||||||
|
|
||||||
pkgs map[string]pkgbits.Index
|
pkgs map[string]pkgbits.Index
|
||||||
decls map[*types.Sym]pkgbits.Index
|
decls map[*types.Sym]pkgbits.Index
|
||||||
|
bodies map[*types.Sym]pkgbits.Index
|
||||||
}
|
}
|
||||||
|
|
||||||
// relocAll ensures that all elements specified by pr and relocs are
|
// relocAll ensures that all elements specified by pr and relocs are
|
||||||
@ -170,21 +171,12 @@ func (l *linker) relocObj(pr *pkgReader, idx pkgbits.Index) pkgbits.Index {
|
|||||||
l.relocCommon(pr, &wname, pkgbits.RelocName, idx)
|
l.relocCommon(pr, &wname, pkgbits.RelocName, idx)
|
||||||
l.relocCommon(pr, &wdict, pkgbits.RelocObjDict, idx)
|
l.relocCommon(pr, &wdict, pkgbits.RelocObjDict, idx)
|
||||||
|
|
||||||
var obj *ir.Name
|
// Generic types and functions won't have definitions, and imported
|
||||||
if sym.Pkg == types.LocalPkg {
|
// objects may not either.
|
||||||
var ok bool
|
obj, _ := sym.Def.(*ir.Name)
|
||||||
obj, ok = sym.Def.(*ir.Name)
|
local := sym.Pkg == types.LocalPkg
|
||||||
|
|
||||||
// Generic types and functions and declared constraint types won't
|
if local && obj != nil {
|
||||||
// have definitions.
|
|
||||||
// For now, just generically copy their extension data.
|
|
||||||
// TODO(mdempsky): Restore assertion.
|
|
||||||
if !ok && false {
|
|
||||||
base.Fatalf("missing definition for %v", sym)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if obj != nil {
|
|
||||||
wext.Sync(pkgbits.SyncObject1)
|
wext.Sync(pkgbits.SyncObject1)
|
||||||
switch tag {
|
switch tag {
|
||||||
case pkgbits.ObjFunc:
|
case pkgbits.ObjFunc:
|
||||||
@ -199,9 +191,64 @@ func (l *linker) relocObj(pr *pkgReader, idx pkgbits.Index) pkgbits.Index {
|
|||||||
l.relocCommon(pr, &wext, pkgbits.RelocObjExt, idx)
|
l.relocCommon(pr, &wext, pkgbits.RelocObjExt, idx)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Check if we need to export the inline bodies for functions and
|
||||||
|
// methods.
|
||||||
|
if obj != nil {
|
||||||
|
if obj.Op() == ir.ONAME && obj.Class == ir.PFUNC {
|
||||||
|
l.exportBody(obj, local)
|
||||||
|
}
|
||||||
|
|
||||||
|
if obj.Op() == ir.OTYPE {
|
||||||
|
if typ := obj.Type(); !typ.IsInterface() {
|
||||||
|
for _, method := range typ.Methods().Slice() {
|
||||||
|
l.exportBody(method.Nname.(*ir.Name), local)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return w.Idx
|
return w.Idx
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// exportBody exports the given function or method's body, if
|
||||||
|
// appropriate. local indicates whether it's a local function or
|
||||||
|
// method available on a locally declared type. (Due to cross-package
|
||||||
|
// type aliases, a method may be imported, but still available on a
|
||||||
|
// locally declared type.)
|
||||||
|
func (l *linker) exportBody(obj *ir.Name, local bool) {
|
||||||
|
assert(obj.Op() == ir.ONAME && obj.Class == ir.PFUNC)
|
||||||
|
|
||||||
|
fn := obj.Func
|
||||||
|
if fn.Inl == nil {
|
||||||
|
return // not inlinable anyway
|
||||||
|
}
|
||||||
|
|
||||||
|
// As a simple heuristic, if the function was declared in this
|
||||||
|
// package or we inlined it somewhere in this package, then we'll
|
||||||
|
// (re)export the function body. This isn't perfect, but seems
|
||||||
|
// reasonable in practice. In particular, it has the nice property
|
||||||
|
// that in the worst case, adding a blank import ensures the
|
||||||
|
// function body is available for inlining.
|
||||||
|
//
|
||||||
|
// TODO(mdempsky): Reimplement the reachable method crawling logic
|
||||||
|
// from typecheck/crawler.go.
|
||||||
|
exportBody := local || fn.Inl.Body != nil
|
||||||
|
if !exportBody {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
sym := obj.Sym()
|
||||||
|
if _, ok := l.bodies[sym]; ok {
|
||||||
|
// Due to type aliases, we might visit methods multiple times.
|
||||||
|
base.AssertfAt(obj.Type().Recv() != nil, obj.Pos(), "expected method: %v", obj)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
pri, ok := bodyReaderFor(fn)
|
||||||
|
assert(ok)
|
||||||
|
l.bodies[sym] = l.relocIdx(pri.pr, pkgbits.RelocBody, pri.idx)
|
||||||
|
}
|
||||||
|
|
||||||
// relocCommon copies the specified element from pr into w,
|
// relocCommon copies the specified element from pr into w,
|
||||||
// recursively relocating any referenced elements as well.
|
// recursively relocating any referenced elements as well.
|
||||||
func (l *linker) relocCommon(pr *pkgReader, w *pkgbits.Encoder, k pkgbits.RelocKind, idx pkgbits.Index) {
|
func (l *linker) relocCommon(pr *pkgReader, w *pkgbits.Encoder, k pkgbits.RelocKind, idx pkgbits.Index) {
|
||||||
@ -240,10 +287,6 @@ func (l *linker) relocFuncExt(w *pkgbits.Encoder, name *ir.Name) {
|
|||||||
if inl := name.Func.Inl; w.Bool(inl != nil) {
|
if inl := name.Func.Inl; w.Bool(inl != nil) {
|
||||||
w.Len(int(inl.Cost))
|
w.Len(int(inl.Cost))
|
||||||
w.Bool(inl.CanDelayResults)
|
w.Bool(inl.CanDelayResults)
|
||||||
|
|
||||||
pri, ok := bodyReader[name.Func]
|
|
||||||
assert(ok)
|
|
||||||
w.Reloc(pkgbits.RelocBody, l.relocIdx(pri.pr, pkgbits.RelocBody, pri.idx))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
w.Sync(pkgbits.SyncEOF)
|
w.Sync(pkgbits.SyncEOF)
|
||||||
|
@ -897,6 +897,8 @@ func (r *reader) funcExt(name *ir.Name) {
|
|||||||
typecheck.Func(fn)
|
typecheck.Func(fn)
|
||||||
|
|
||||||
if r.Bool() {
|
if r.Bool() {
|
||||||
|
assert(name.Defn == nil)
|
||||||
|
|
||||||
fn.ABI = obj.ABI(r.Uint64())
|
fn.ABI = obj.ABI(r.Uint64())
|
||||||
|
|
||||||
// Escape analysis.
|
// Escape analysis.
|
||||||
@ -911,7 +913,6 @@ func (r *reader) funcExt(name *ir.Name) {
|
|||||||
Cost: int32(r.Len()),
|
Cost: int32(r.Len()),
|
||||||
CanDelayResults: r.Bool(),
|
CanDelayResults: r.Bool(),
|
||||||
}
|
}
|
||||||
r.addBody(name.Func)
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
r.addBody(name.Func)
|
r.addBody(name.Func)
|
||||||
@ -967,10 +968,26 @@ func (r *reader) pragmaFlag() ir.PragmaFlag {
|
|||||||
|
|
||||||
// @@@ Function bodies
|
// @@@ Function bodies
|
||||||
|
|
||||||
// bodyReader tracks where the serialized IR for a function's body can
|
// bodyReader tracks where the serialized IR for a local or imported,
|
||||||
// be found.
|
// generic function's body can be found.
|
||||||
var bodyReader = map[*ir.Func]pkgReaderIndex{}
|
var bodyReader = map[*ir.Func]pkgReaderIndex{}
|
||||||
|
|
||||||
|
// importBodyReader tracks where the serialized IR for an imported,
|
||||||
|
// static (i.e., non-generic) function body can be read.
|
||||||
|
var importBodyReader = map[*types.Sym]pkgReaderIndex{}
|
||||||
|
|
||||||
|
// bodyReaderFor returns the pkgReaderIndex for reading fn's
|
||||||
|
// serialized IR, and whether one was found.
|
||||||
|
func bodyReaderFor(fn *ir.Func) (pri pkgReaderIndex, ok bool) {
|
||||||
|
if fn.Nname.Defn != nil {
|
||||||
|
pri, ok = bodyReader[fn]
|
||||||
|
assert(ok) // must always be available
|
||||||
|
} else {
|
||||||
|
pri, ok = importBodyReader[fn.Sym()]
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// todoBodies holds the list of function bodies that still need to be
|
// todoBodies holds the list of function bodies that still need to be
|
||||||
// constructed.
|
// constructed.
|
||||||
var todoBodies []*ir.Func
|
var todoBodies []*ir.Func
|
||||||
@ -978,15 +995,13 @@ var todoBodies []*ir.Func
|
|||||||
// addBody reads a function body reference from the element bitstream,
|
// addBody reads a function body reference from the element bitstream,
|
||||||
// and associates it with fn.
|
// and associates it with fn.
|
||||||
func (r *reader) addBody(fn *ir.Func) {
|
func (r *reader) addBody(fn *ir.Func) {
|
||||||
|
// addBody should only be called for local functions or imported
|
||||||
|
// generic functions; see comment in funcExt.
|
||||||
|
assert(fn.Nname.Defn != nil)
|
||||||
|
|
||||||
pri := pkgReaderIndex{r.p, r.Reloc(pkgbits.RelocBody), r.dict}
|
pri := pkgReaderIndex{r.p, r.Reloc(pkgbits.RelocBody), r.dict}
|
||||||
bodyReader[fn] = pri
|
bodyReader[fn] = pri
|
||||||
|
|
||||||
if fn.Nname.Defn == nil {
|
|
||||||
// Don't read in function body for imported functions.
|
|
||||||
// See comment in funcExt.
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if r.curfn == nil {
|
if r.curfn == nil {
|
||||||
todoBodies = append(todoBodies, fn)
|
todoBodies = append(todoBodies, fn)
|
||||||
return
|
return
|
||||||
@ -2225,7 +2240,7 @@ func InlineCall(call *ir.CallExpr, fn *ir.Func, inlIndex int) *ir.InlinedCallExp
|
|||||||
// TODO(mdempsky): Turn callerfn into an explicit parameter.
|
// TODO(mdempsky): Turn callerfn into an explicit parameter.
|
||||||
callerfn := ir.CurFunc
|
callerfn := ir.CurFunc
|
||||||
|
|
||||||
pri, ok := bodyReader[fn]
|
pri, ok := bodyReaderFor(fn)
|
||||||
if !ok {
|
if !ok {
|
||||||
// TODO(mdempsky): Reconsider this diagnostic's wording, if it's
|
// TODO(mdempsky): Reconsider this diagnostic's wording, if it's
|
||||||
// to be included in Go 1.20.
|
// to be included in Go 1.20.
|
||||||
|
@ -85,7 +85,7 @@ func unified(noders []*noder) {
|
|||||||
typecheck.TypecheckAllowed = true
|
typecheck.TypecheckAllowed = true
|
||||||
|
|
||||||
localPkgReader = newPkgReader(pkgbits.NewPkgDecoder(types.LocalPkg.Path, data))
|
localPkgReader = newPkgReader(pkgbits.NewPkgDecoder(types.LocalPkg.Path, data))
|
||||||
readPackage(localPkgReader, types.LocalPkg)
|
readPackage(localPkgReader, types.LocalPkg, true)
|
||||||
|
|
||||||
r := localPkgReader.newReader(pkgbits.RelocMeta, pkgbits.PrivateRootIdx, pkgbits.SyncPrivate)
|
r := localPkgReader.newReader(pkgbits.RelocMeta, pkgbits.PrivateRootIdx, pkgbits.SyncPrivate)
|
||||||
r.pkgInit(types.LocalPkg, target)
|
r.pkgInit(types.LocalPkg, target)
|
||||||
@ -226,29 +226,54 @@ func freePackage(pkg *types2.Package) {
|
|||||||
|
|
||||||
// readPackage reads package export data from pr to populate
|
// readPackage reads package export data from pr to populate
|
||||||
// importpkg.
|
// importpkg.
|
||||||
func readPackage(pr *pkgReader, importpkg *types.Pkg) {
|
//
|
||||||
r := pr.newReader(pkgbits.RelocMeta, pkgbits.PublicRootIdx, pkgbits.SyncPublic)
|
// localStub indicates whether pr is reading the stub export data for
|
||||||
|
// the local package, as opposed to relocated export data for an
|
||||||
|
// import.
|
||||||
|
func readPackage(pr *pkgReader, importpkg *types.Pkg, localStub bool) {
|
||||||
|
{
|
||||||
|
r := pr.newReader(pkgbits.RelocMeta, pkgbits.PublicRootIdx, pkgbits.SyncPublic)
|
||||||
|
|
||||||
pkg := r.pkg()
|
pkg := r.pkg()
|
||||||
base.Assertf(pkg == importpkg, "have package %q (%p), want package %q (%p)", pkg.Path, pkg, importpkg.Path, importpkg)
|
base.Assertf(pkg == importpkg, "have package %q (%p), want package %q (%p)", pkg.Path, pkg, importpkg.Path, importpkg)
|
||||||
|
|
||||||
if r.Bool() {
|
if r.Bool() {
|
||||||
sym := pkg.Lookup(".inittask")
|
sym := pkg.Lookup(".inittask")
|
||||||
task := ir.NewNameAt(src.NoXPos, sym)
|
task := ir.NewNameAt(src.NoXPos, sym)
|
||||||
task.Class = ir.PEXTERN
|
task.Class = ir.PEXTERN
|
||||||
sym.Def = task
|
sym.Def = task
|
||||||
|
}
|
||||||
|
|
||||||
|
for i, n := 0, r.Len(); i < n; i++ {
|
||||||
|
r.Sync(pkgbits.SyncObject)
|
||||||
|
assert(!r.Bool())
|
||||||
|
idx := r.Reloc(pkgbits.RelocObj)
|
||||||
|
assert(r.Len() == 0)
|
||||||
|
|
||||||
|
path, name, code := r.p.PeekObj(idx)
|
||||||
|
if code != pkgbits.ObjStub {
|
||||||
|
objReader[types.NewPkg(path, "").Lookup(name)] = pkgReaderIndex{pr, idx, nil}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
r.Sync(pkgbits.SyncEOF)
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, n := 0, r.Len(); i < n; i++ {
|
if !localStub {
|
||||||
r.Sync(pkgbits.SyncObject)
|
r := pr.newReader(pkgbits.RelocMeta, pkgbits.PrivateRootIdx, pkgbits.SyncPrivate)
|
||||||
assert(!r.Bool())
|
|
||||||
idx := r.Reloc(pkgbits.RelocObj)
|
|
||||||
assert(r.Len() == 0)
|
|
||||||
|
|
||||||
path, name, code := r.p.PeekObj(idx)
|
for i, n := 0, r.Len(); i < n; i++ {
|
||||||
if code != pkgbits.ObjStub {
|
path := r.String()
|
||||||
objReader[types.NewPkg(path, "").Lookup(name)] = pkgReaderIndex{pr, idx, nil}
|
name := r.String()
|
||||||
|
idx := r.Reloc(pkgbits.RelocBody)
|
||||||
|
|
||||||
|
sym := types.NewPkg(path, "").Lookup(name)
|
||||||
|
if _, ok := importBodyReader[sym]; !ok {
|
||||||
|
importBodyReader[sym] = pkgReaderIndex{pr, idx, nil}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
r.Sync(pkgbits.SyncEOF)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -258,12 +283,15 @@ func writeUnifiedExport(out io.Writer) {
|
|||||||
l := linker{
|
l := linker{
|
||||||
pw: pkgbits.NewPkgEncoder(base.Debug.SyncFrames),
|
pw: pkgbits.NewPkgEncoder(base.Debug.SyncFrames),
|
||||||
|
|
||||||
pkgs: make(map[string]pkgbits.Index),
|
pkgs: make(map[string]pkgbits.Index),
|
||||||
decls: make(map[*types.Sym]pkgbits.Index),
|
decls: make(map[*types.Sym]pkgbits.Index),
|
||||||
|
bodies: make(map[*types.Sym]pkgbits.Index),
|
||||||
}
|
}
|
||||||
|
|
||||||
publicRootWriter := l.pw.NewEncoder(pkgbits.RelocMeta, pkgbits.SyncPublic)
|
publicRootWriter := l.pw.NewEncoder(pkgbits.RelocMeta, pkgbits.SyncPublic)
|
||||||
|
privateRootWriter := l.pw.NewEncoder(pkgbits.RelocMeta, pkgbits.SyncPrivate)
|
||||||
assert(publicRootWriter.Idx == pkgbits.PublicRootIdx)
|
assert(publicRootWriter.Idx == pkgbits.PublicRootIdx)
|
||||||
|
assert(privateRootWriter.Idx == pkgbits.PrivateRootIdx)
|
||||||
|
|
||||||
var selfPkgIdx pkgbits.Index
|
var selfPkgIdx pkgbits.Index
|
||||||
|
|
||||||
@ -320,5 +348,29 @@ func writeUnifiedExport(out io.Writer) {
|
|||||||
w.Flush()
|
w.Flush()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
type symIdx struct {
|
||||||
|
sym *types.Sym
|
||||||
|
idx pkgbits.Index
|
||||||
|
}
|
||||||
|
var bodies []symIdx
|
||||||
|
for sym, idx := range l.bodies {
|
||||||
|
bodies = append(bodies, symIdx{sym, idx})
|
||||||
|
}
|
||||||
|
sort.Slice(bodies, func(i, j int) bool { return bodies[i].idx < bodies[j].idx })
|
||||||
|
|
||||||
|
w := privateRootWriter
|
||||||
|
|
||||||
|
w.Len(len(bodies))
|
||||||
|
for _, body := range bodies {
|
||||||
|
w.String(body.sym.Pkg.Path)
|
||||||
|
w.String(body.sym.Name)
|
||||||
|
w.Reloc(pkgbits.RelocBody, body.idx)
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Sync(pkgbits.SyncEOF)
|
||||||
|
w.Flush()
|
||||||
|
}
|
||||||
|
|
||||||
base.Ctxt.Fingerprint = l.pw.DumpTo(out)
|
base.Ctxt.Fingerprint = l.pw.DumpTo(out)
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user