1
0
mirror of https://github.com/golang/go synced 2024-11-11 19:41:36 -07:00

[dev.regabi] cmd/compile: move Node.Opt to Name

Escape analysis uses Node.Opt to map nodes to their "location", so
that other references to the same node use the same location
again. But in the current implementation of escape analysis, we never
need to refer back to a node's location except for named nodes (since
other nodes are anonymous, and have no way to be referenced).

This CL moves Opt from Node down to Name, turns it into a directly
accessed field, and cleans up escape analysis to avoid setting Opt on
non-named expressions.

One nit: in walkCheckPtrArithmetic, we were abusing Opt as a way to
detect/prevent loops. This CL adds a CheckPtr bit flag instead.

Passes toolstash -cmp.

Change-Id: If57d5ad8d972fa63bedbe69b9ebb6753e31aba85
Reviewed-on: https://go-review.googlesource.com/c/go/+/280638
Trust: Matthew Dempsky <mdempsky@google.com>
Run-TryBot: Matthew Dempsky <mdempsky@google.com>
Reviewed-by: Cuong Manh Le <cuong.manhle.vn@gmail.com>
TryBot-Result: Go Bot <gobot@golang.org>
This commit is contained in:
Matthew Dempsky 2020-12-28 17:30:04 -08:00
parent 6acbae4fcc
commit 289da2b33e
7 changed files with 38 additions and 39 deletions

View File

@ -165,12 +165,16 @@ func Fmt(n ir.Node) string {
text = fmt.Sprintf("esc(%d)", n.Esc()) text = fmt.Sprintf("esc(%d)", n.Esc())
} }
if e, ok := n.Opt().(*location); ok && e.loopDepth != 0 { if n.Op() == ir.ONAME {
if text != "" { n := n.(*ir.Name)
text += " " if e, ok := n.Opt.(*location); ok && e.loopDepth != 0 {
if text != "" {
text += " "
}
text += fmt.Sprintf("ld(%d)", e.loopDepth)
} }
text += fmt.Sprintf("ld(%d)", e.loopDepth)
} }
return text return text
} }
@ -312,7 +316,7 @@ func (e *escape) stmt(n ir.Node) {
// Record loop depth at declaration. // Record loop depth at declaration.
n := n.(*ir.Decl) n := n.(*ir.Decl)
if !ir.IsBlank(n.X) { if !ir.IsBlank(n.X) {
e.dcl(n.X) e.dcl(n.X.(*ir.Name))
} }
case ir.OLABEL: case ir.OLABEL:
@ -370,7 +374,7 @@ func (e *escape) stmt(n ir.Node) {
var ks []hole var ks []hole
for _, cas := range n.Cases { // cases for _, cas := range n.Cases { // cases
if typesw && n.Tag.(*ir.TypeSwitchGuard).Tag != nil { if typesw && n.Tag.(*ir.TypeSwitchGuard).Tag != nil {
cv := cas.Var cv := cas.Var.(*ir.Name)
k := e.dcl(cv) // type switch variables have no ODCL. k := e.dcl(cv) // type switch variables have no ODCL.
if cv.Type().HasPointers() { if cv.Type().HasPointers() {
ks = append(ks, k.dotType(cv.Type(), cas, "switch case")) ks = append(ks, k.dotType(cv.Type(), cas, "switch case"))
@ -1097,7 +1101,7 @@ func (e *escape) teeHole(ks ...hole) hole {
return loc.asHole() return loc.asHole()
} }
func (e *escape) dcl(n ir.Node) hole { func (e *escape) dcl(n *ir.Name) hole {
loc := e.oldLoc(n) loc := e.oldLoc(n)
loc.loopDepth = e.loopDepth loc.loopDepth = e.loopDepth
return loc.asHole() return loc.asHole()
@ -1151,15 +1155,17 @@ func (e *escape) newLoc(n ir.Node, transient bool) *location {
} }
e.allLocs = append(e.allLocs, loc) e.allLocs = append(e.allLocs, loc)
if n != nil { if n != nil {
if n.Op() == ir.ONAME && n.Name().Curfn != e.curfn { if n.Op() == ir.ONAME {
n := n.(*ir.Name) n := n.(*ir.Name)
base.Fatalf("curfn mismatch: %v != %v", n.Name().Curfn, e.curfn) if n.Curfn != e.curfn {
} base.Fatalf("curfn mismatch: %v != %v", n.Name().Curfn, e.curfn)
}
if n.Opt() != nil { if n.Opt != nil {
base.Fatalf("%v already has a location", n) base.Fatalf("%v already has a location", n)
}
n.Opt = loc
} }
n.SetOpt(loc)
if why := HeapAllocReason(n); why != "" { if why := HeapAllocReason(n); why != "" {
e.flow(e.heapHole().addr(n, why), loc) e.flow(e.heapHole().addr(n, why), loc)
@ -1168,9 +1174,9 @@ func (e *escape) newLoc(n ir.Node, transient bool) *location {
return loc return loc
} }
func (e *escape) oldLoc(n ir.Node) *location { func (e *escape) oldLoc(n *ir.Name) *location {
n = canonicalNode(n) n = canonicalNode(n).(*ir.Name)
return n.Opt().(*location) return n.Opt.(*location)
} }
func (l *location) asHole() hole { func (l *location) asHole() hole {
@ -1516,7 +1522,10 @@ func (e *escape) finish(fns []*ir.Func) {
if n == nil { if n == nil {
continue continue
} }
n.SetOpt(nil) if n.Op() == ir.ONAME {
n := n.(*ir.Name)
n.Opt = nil
}
// Update n.Esc based on escape analysis results. // Update n.Esc based on escape analysis results.
@ -2122,7 +2131,7 @@ func (e *escape) paramTag(fn *ir.Func, narg int, f *types.Field) string {
return esc.Encode() return esc.Encode()
} }
n := ir.AsNode(f.Nname) n := f.Nname.(*ir.Name)
loc := e.oldLoc(n) loc := e.oldLoc(n)
esc := loc.paramEsc esc := loc.paramEsc
esc.Optimize() esc.Optimize()

View File

@ -48,8 +48,7 @@ type Expr interface {
type miniExpr struct { type miniExpr struct {
miniNode miniNode
typ *types.Type typ *types.Type
init Nodes // TODO(rsc): Don't require every Node to have an init init Nodes // TODO(rsc): Don't require every Node to have an init
opt interface{} // TODO(rsc): Don't require every Node to have an opt?
flags bitset8 flags bitset8
} }
@ -59,14 +58,13 @@ const (
miniExprTransient miniExprTransient
miniExprBounded miniExprBounded
miniExprImplicit // for use by implementations; not supported by every Expr miniExprImplicit // for use by implementations; not supported by every Expr
miniExprCheckPtr
) )
func (*miniExpr) isExpr() {} func (*miniExpr) isExpr() {}
func (n *miniExpr) Type() *types.Type { return n.typ } func (n *miniExpr) Type() *types.Type { return n.typ }
func (n *miniExpr) SetType(x *types.Type) { n.typ = x } func (n *miniExpr) SetType(x *types.Type) { n.typ = x }
func (n *miniExpr) Opt() interface{} { return n.opt }
func (n *miniExpr) SetOpt(x interface{}) { n.opt = x }
func (n *miniExpr) HasCall() bool { return n.flags&miniExprHasCall != 0 } func (n *miniExpr) HasCall() bool { return n.flags&miniExprHasCall != 0 }
func (n *miniExpr) SetHasCall(b bool) { n.flags.set(miniExprHasCall, b) } func (n *miniExpr) SetHasCall(b bool) { n.flags.set(miniExprHasCall, b) }
func (n *miniExpr) NonNil() bool { return n.flags&miniExprNonNil != 0 } func (n *miniExpr) NonNil() bool { return n.flags&miniExprNonNil != 0 }
@ -324,6 +322,8 @@ func NewConvExpr(pos src.XPos, op Op, typ *types.Type, x Node) *ConvExpr {
func (n *ConvExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 } func (n *ConvExpr) Implicit() bool { return n.flags&miniExprImplicit != 0 }
func (n *ConvExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) } func (n *ConvExpr) SetImplicit(b bool) { n.flags.set(miniExprImplicit, b) }
func (n *ConvExpr) CheckPtr() bool { return n.flags&miniExprCheckPtr != 0 }
func (n *ConvExpr) SetCheckPtr(b bool) { n.flags.set(miniExprCheckPtr, b) }
func (n *ConvExpr) SetOp(op Op) { func (n *ConvExpr) SetOp(op Op) {
switch op { switch op {

View File

@ -102,5 +102,3 @@ func (n *miniNode) HasCall() bool { return false }
func (n *miniNode) SetHasCall(bool) { panic(n.no("SetHasCall")) } func (n *miniNode) SetHasCall(bool) { panic(n.no("SetHasCall")) }
func (n *miniNode) NonNil() bool { return false } func (n *miniNode) NonNil() bool { return false }
func (n *miniNode) MarkNonNil() { panic(n.no("MarkNonNil")) } func (n *miniNode) MarkNonNil() { panic(n.no("MarkNonNil")) }
func (n *miniNode) Opt() interface{} { return nil }
func (n *miniNode) SetOpt(interface{}) { panic(n.no("SetOpt")) }

View File

@ -42,6 +42,7 @@ type Name struct {
Func *Func Func *Func
Offset_ int64 Offset_ int64
val constant.Value val constant.Value
Opt interface{} // for use by escape analysis
orig Node orig Node
Embed *[]Embed // list of embedded files, for ONAME var Embed *[]Embed // list of embedded files, for ONAME var
@ -321,8 +322,7 @@ func (n *Name) Val() constant.Value {
return n.val return n.val
} }
// SetVal sets the constant.Value for the node, // SetVal sets the constant.Value for the node.
// which must not have been used with SetOpt.
func (n *Name) SetVal(v constant.Value) { func (n *Name) SetVal(v constant.Value) {
if n.op != OLITERAL { if n.op != OLITERAL {
panic(n.no("SetVal")) panic(n.no("SetVal"))

View File

@ -50,8 +50,6 @@ type Node interface {
SetEsc(x uint16) SetEsc(x uint16)
Walkdef() uint8 Walkdef() uint8
SetWalkdef(x uint8) SetWalkdef(x uint8)
Opt() interface{}
SetOpt(x interface{})
Diag() bool Diag() bool
SetDiag(x bool) SetDiag(x bool)
Typecheck() uint8 Typecheck() uint8

View File

@ -438,18 +438,14 @@ func walkCheckPtrAlignment(n *ir.ConvExpr, init *ir.Nodes, count ir.Node) ir.Nod
} }
func walkCheckPtrArithmetic(n *ir.ConvExpr, init *ir.Nodes) ir.Node { func walkCheckPtrArithmetic(n *ir.ConvExpr, init *ir.Nodes) ir.Node {
// Calling cheapexpr(n, init) below leads to a recursive call // Calling cheapexpr(n, init) below leads to a recursive call to
// to walkexpr, which leads us back here again. Use n.Opt to // walkexpr, which leads us back here again. Use n.Checkptr to
// prevent infinite loops. // prevent infinite loops.
if opt := n.Opt(); opt == &walkCheckPtrArithmeticMarker { if n.CheckPtr() {
return n return n
} else if opt != nil {
// We use n.Opt() here because today it's not used for OCONVNOP. If that changes,
// there's no guarantee that temporarily replacing it is safe, so just hard fail here.
base.Fatalf("unexpected Opt: %v", opt)
} }
n.SetOpt(&walkCheckPtrArithmeticMarker) n.SetCheckPtr(true)
defer n.SetOpt(nil) defer n.SetCheckPtr(false)
// TODO(mdempsky): Make stricter. We only need to exempt // TODO(mdempsky): Make stricter. We only need to exempt
// reflect.Value.Pointer and reflect.Value.UnsafeAddr. // reflect.Value.Pointer and reflect.Value.UnsafeAddr.

View File

@ -377,8 +377,6 @@ func walkAppendArgs(n *ir.CallExpr, init *ir.Nodes) {
var wrapCall_prgen int var wrapCall_prgen int
var walkCheckPtrArithmeticMarker byte
// appendWalkStmt typechecks and walks stmt and then appends it to init. // appendWalkStmt typechecks and walks stmt and then appends it to init.
func appendWalkStmt(init *ir.Nodes, stmt ir.Node) { func appendWalkStmt(init *ir.Nodes, stmt ir.Node) {
op := stmt.Op() op := stmt.Op()