1
0
mirror of https://github.com/golang/go synced 2024-11-18 11:55:01 -07:00

cmd/compile: get rid of "volatile" in SSA

A value is "volatile" if it is a pointer to the argument region
on stack which will be clobbered by function call. This is used
to make sure the value is safe when inserting write barrier calls.
The writebarrier pass can tell whether a value is such a pointer.
Therefore no need to mark it when building SSA and thread this
information through.

Passes "toolstash -cmp" on std.

Updates #17583.

Change-Id: Idc5fc0d710152b94b3c504ce8db55ea9ff5b5195
Reviewed-on: https://go-review.googlesource.com/36835
Run-TryBot: Cherry Zhang <cherryyz@google.com>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: Keith Randall <khr@golang.org>
This commit is contained in:
Cherry Zhang 2017-02-02 19:47:59 -05:00
parent 4775b7feb1
commit 5bfd1ef036
4 changed files with 69 additions and 81 deletions

View File

@ -553,8 +553,8 @@ func (s *state) stmt(n *Node) {
deref = true
res = res.Args[0]
}
s.assign(n.List.First(), res, needwritebarrier(n.List.First()), deref, 0, false)
s.assign(n.List.Second(), resok, false, false, 0, false)
s.assign(n.List.First(), res, needwritebarrier(n.List.First()), deref, 0)
s.assign(n.List.Second(), resok, false, false, 0)
return
case OAS2FUNC:
@ -565,8 +565,8 @@ func (s *state) stmt(n *Node) {
v := s.intrinsicCall(n.Rlist.First())
v1 := s.newValue1(ssa.OpSelect0, n.List.First().Type, v)
v2 := s.newValue1(ssa.OpSelect1, n.List.Second().Type, v)
s.assign(n.List.First(), v1, needwritebarrier(n.List.First()), false, 0, false)
s.assign(n.List.Second(), v2, needwritebarrier(n.List.Second()), false, 0, false)
s.assign(n.List.First(), v1, needwritebarrier(n.List.First()), false, 0)
s.assign(n.List.Second(), v2, needwritebarrier(n.List.Second()), false, 0)
return
case ODCL:
@ -682,14 +682,13 @@ func (s *state) stmt(n *Node) {
}
}
var r *ssa.Value
var isVolatile bool
needwb := n.Right != nil && needwritebarrier(n.Left)
deref := !canSSAType(t)
if deref {
if rhs == nil {
r = nil // Signal assign to use OpZero.
} else {
r, isVolatile = s.addr(rhs, false)
r = s.addr(rhs, false)
}
} else {
if rhs == nil {
@ -741,7 +740,7 @@ func (s *state) stmt(n *Node) {
}
}
s.assign(n.Left, r, needwb, deref, skip, isVolatile)
s.assign(n.Left, r, needwb, deref, skip)
case OIF:
bThen := s.f.NewBlock(ssa.BlockPlain)
@ -1427,10 +1426,10 @@ func (s *state) expr(n *Node) *ssa.Value {
if s.canSSA(n) {
return s.variable(n, n.Type)
}
addr, _ := s.addr(n, false)
addr := s.addr(n, false)
return s.newValue2(ssa.OpLoad, n.Type, addr, s.mem())
case OCLOSUREVAR:
addr, _ := s.addr(n, false)
addr := s.addr(n, false)
return s.newValue2(ssa.OpLoad, n.Type, addr, s.mem())
case OLITERAL:
switch u := n.Val().U.(type) {
@ -1927,9 +1926,7 @@ func (s *state) expr(n *Node) *ssa.Value {
return s.expr(n.Left)
case OADDR:
a, _ := s.addr(n.Left, n.Bounded)
// Note we know the volatile result is false because you can't write &f() in Go.
return a
return s.addr(n.Left, n.Bounded)
case OINDREGSP:
addr := s.entryNewValue1I(ssa.OpOffPtr, ptrto(n.Type), n.Xoffset, s.sp)
@ -1954,7 +1951,7 @@ func (s *state) expr(n *Node) *ssa.Value {
}
return s.zeroVal(n.Type)
}
p, _ := s.addr(n, false)
p := s.addr(n, false)
return s.newValue2(ssa.OpLoad, n.Type, p, s.mem())
case ODOTPTR:
@ -1987,7 +1984,7 @@ func (s *state) expr(n *Node) *ssa.Value {
}
return s.newValue2(ssa.OpLoad, Types[TUINT8], ptr, s.mem())
case n.Left.Type.IsSlice():
p, _ := s.addr(n, false)
p := s.addr(n, false)
return s.newValue2(ssa.OpLoad, n.Left.Type.Elem(), p, s.mem())
case n.Left.Type.IsArray():
if bound := n.Left.Type.NumElem(); bound <= 1 {
@ -2006,7 +2003,7 @@ func (s *state) expr(n *Node) *ssa.Value {
s.boundsCheck(i, s.constInt(Types[TINT], bound))
return s.newValue1I(ssa.OpArraySelect, n.Type, 0, a)
}
p, _ := s.addr(n, false)
p := s.addr(n, false)
return s.newValue2(ssa.OpLoad, n.Left.Type.Elem(), p, s.mem())
default:
s.Fatalf("bad type for index %v", n.Left.Type)
@ -2158,7 +2155,7 @@ func (s *state) append(n *Node, inplace bool) *ssa.Value {
var slice, addr *ssa.Value
if inplace {
addr, _ = s.addr(sn, false)
addr = s.addr(sn, false)
slice = s.newValue2(ssa.OpLoad, n.Type, addr, s.mem())
} else {
slice = s.expr(sn)
@ -2235,19 +2232,17 @@ func (s *state) append(n *Node, inplace bool) *ssa.Value {
// Evaluate args
type argRec struct {
// if store is true, we're appending the value v. If false, we're appending the
// value at *v. If store==false, isVolatile reports whether the source
// is in the outargs section of the stack frame.
// value at *v.
v *ssa.Value
store bool
isVolatile bool
}
args := make([]argRec, 0, nargs)
for _, n := range n.List.Slice()[1:] {
if canSSAType(n.Type) {
args = append(args, argRec{v: s.expr(n), store: true})
} else {
v, isVolatile := s.addr(n, false)
args = append(args, argRec{v: v, isVolatile: isVolatile})
v := s.addr(n, false)
args = append(args, argRec{v: v})
}
}
@ -2269,7 +2264,7 @@ func (s *state) append(n *Node, inplace bool) *ssa.Value {
}
} else {
if haspointers(et) {
s.insertWBmove(et, addr, arg.v, arg.isVolatile)
s.insertWBmove(et, addr, arg.v)
} else {
s.vars[&memVar] = s.newValue3I(ssa.OpMove, ssa.TypeMem, sizeAlignAuxInt(et), addr, arg.v, s.mem())
}
@ -2343,10 +2338,9 @@ const (
// Right has already been evaluated to ssa, left has not.
// If deref is true, then we do left = *right instead (and right has already been nil-checked).
// If deref is true and right == nil, just do left = 0.
// If deref is true, rightIsVolatile reports whether right points to volatile (clobbered by a call) storage.
// Include a write barrier if wb is true.
// skip indicates assignments (at the top level) that can be avoided.
func (s *state) assign(left *Node, right *ssa.Value, wb, deref bool, skip skipMask, rightIsVolatile bool) {
func (s *state) assign(left *Node, right *ssa.Value, wb, deref bool, skip skipMask) {
if left.Op == ONAME && isblank(left) {
return
}
@ -2387,7 +2381,7 @@ func (s *state) assign(left *Node, right *ssa.Value, wb, deref bool, skip skipMa
}
// Recursively assign the new value we've made to the base of the dot op.
s.assign(left.Left, new, false, false, 0, rightIsVolatile)
s.assign(left.Left, new, false, false, 0)
// TODO: do we need to update named values here?
return
}
@ -2412,7 +2406,7 @@ func (s *state) assign(left *Node, right *ssa.Value, wb, deref bool, skip skipMa
i = s.extendIndex(i, panicindex)
s.boundsCheck(i, s.constInt(Types[TINT], 1))
v := s.newValue1(ssa.OpArrayMake1, t, right)
s.assign(left.Left, v, false, false, 0, rightIsVolatile)
s.assign(left.Left, v, false, false, 0)
return
}
// Update variable assignment.
@ -2421,14 +2415,14 @@ func (s *state) assign(left *Node, right *ssa.Value, wb, deref bool, skip skipMa
return
}
// Left is not ssa-able. Compute its address.
addr, _ := s.addr(left, false)
addr := s.addr(left, false)
if left.Op == ONAME && skip == 0 {
s.vars[&memVar] = s.newValue1A(ssa.OpVarDef, ssa.TypeMem, left, s.mem())
}
if deref {
// Treat as a mem->mem move.
if wb && !ssa.IsStackAddr(addr) {
s.insertWBmove(t, addr, right, rightIsVolatile)
s.insertWBmove(t, addr, right)
return
}
if right == nil {
@ -3066,12 +3060,10 @@ func (s *state) lookupSymbol(n *Node, sym interface{}) interface{} {
}
// addr converts the address of the expression n to SSA, adds it to s and returns the SSA result.
// Also returns a bool reporting whether the returned value is "volatile", that is it
// points to the outargs section and thus the referent will be clobbered by any call.
// The value that the returned Value represents is guaranteed to be non-nil.
// If bounded is true then this address does not require a nil check for its operand
// even if that would otherwise be implied.
func (s *state) addr(n *Node, bounded bool) (*ssa.Value, bool) {
func (s *state) addr(n *Node, bounded bool) *ssa.Value {
t := ptrto(n.Type)
switch n.Op {
case ONAME:
@ -3084,36 +3076,36 @@ func (s *state) addr(n *Node, bounded bool) (*ssa.Value, bool) {
if n.Xoffset != 0 {
v = s.entryNewValue1I(ssa.OpOffPtr, v.Type, n.Xoffset, v)
}
return v, false
return v
case PPARAM:
// parameter slot
v := s.decladdrs[n]
if v != nil {
return v, false
return v
}
if n == nodfp {
// Special arg that points to the frame pointer (Used by ORECOVER).
aux := s.lookupSymbol(n, &ssa.ArgSymbol{Typ: n.Type, Node: n})
return s.entryNewValue1A(ssa.OpAddr, t, aux, s.sp), false
return s.entryNewValue1A(ssa.OpAddr, t, aux, s.sp)
}
s.Fatalf("addr of undeclared ONAME %v. declared: %v", n, s.decladdrs)
return nil, false
return nil
case PAUTO:
aux := s.lookupSymbol(n, &ssa.AutoSymbol{Typ: n.Type, Node: n})
return s.newValue1A(ssa.OpAddr, t, aux, s.sp), false
return s.newValue1A(ssa.OpAddr, t, aux, s.sp)
case PPARAMOUT: // Same as PAUTO -- cannot generate LEA early.
// ensure that we reuse symbols for out parameters so
// that cse works on their addresses
aux := s.lookupSymbol(n, &ssa.ArgSymbol{Typ: n.Type, Node: n})
return s.newValue1A(ssa.OpAddr, t, aux, s.sp), false
return s.newValue1A(ssa.OpAddr, t, aux, s.sp)
default:
s.Fatalf("variable address class %v not implemented", classnames[n.Class])
return nil, false
return nil
}
case OINDREGSP:
// indirect off REGSP
// used for storing/loading arguments/returns to/from callees
return s.entryNewValue1I(ssa.OpOffPtr, t, n.Xoffset, s.sp), true
return s.entryNewValue1I(ssa.OpOffPtr, t, n.Xoffset, s.sp)
case OINDEX:
if n.Left.Type.IsSlice() {
a := s.expr(n.Left)
@ -3124,33 +3116,33 @@ func (s *state) addr(n *Node, bounded bool) (*ssa.Value, bool) {
s.boundsCheck(i, len)
}
p := s.newValue1(ssa.OpSlicePtr, t, a)
return s.newValue2(ssa.OpPtrIndex, t, p, i), false
return s.newValue2(ssa.OpPtrIndex, t, p, i)
} else { // array
a, isVolatile := s.addr(n.Left, bounded)
a := s.addr(n.Left, bounded)
i := s.expr(n.Right)
i = s.extendIndex(i, panicindex)
len := s.constInt(Types[TINT], n.Left.Type.NumElem())
if !n.Bounded {
s.boundsCheck(i, len)
}
return s.newValue2(ssa.OpPtrIndex, ptrto(n.Left.Type.Elem()), a, i), isVolatile
return s.newValue2(ssa.OpPtrIndex, ptrto(n.Left.Type.Elem()), a, i)
}
case OIND:
return s.exprPtr(n.Left, bounded, n.Pos), false
return s.exprPtr(n.Left, bounded, n.Pos)
case ODOT:
p, isVolatile := s.addr(n.Left, bounded)
return s.newValue1I(ssa.OpOffPtr, t, n.Xoffset, p), isVolatile
p := s.addr(n.Left, bounded)
return s.newValue1I(ssa.OpOffPtr, t, n.Xoffset, p)
case ODOTPTR:
p := s.exprPtr(n.Left, bounded, n.Pos)
return s.newValue1I(ssa.OpOffPtr, t, n.Xoffset, p), false
return s.newValue1I(ssa.OpOffPtr, t, n.Xoffset, p)
case OCLOSUREVAR:
return s.newValue1I(ssa.OpOffPtr, t, n.Xoffset,
s.entryNewValue0(ssa.OpGetClosurePtr, ptrto(Types[TUINT8]))), false
s.entryNewValue0(ssa.OpGetClosurePtr, ptrto(Types[TUINT8])))
case OCONVNOP:
addr, isVolatile := s.addr(n.Left, bounded)
return s.newValue1(ssa.OpCopy, t, addr), isVolatile // ensure that addr has the right type
addr := s.addr(n.Left, bounded)
return s.newValue1(ssa.OpCopy, t, addr) // ensure that addr has the right type
case OCALLFUNC, OCALLINTER, OCALLMETH:
return s.call(n, callNormal), true
return s.call(n, callNormal)
case ODOTTYPE:
v, _ := s.dottype(n, false)
if v.Op != ssa.OpLoad {
@ -3159,10 +3151,10 @@ func (s *state) addr(n *Node, bounded bool) (*ssa.Value, bool) {
if v.Args[1] != s.mem() {
s.Fatalf("memory no longer live from dottype load")
}
return v.Args[0], false
return v.Args[0]
default:
s.Fatalf("unhandled addr %v", n.Op)
return nil, false
return nil
}
}
@ -3400,7 +3392,7 @@ func (s *state) rtcall(fn *obj.LSym, returns bool, results []*Type, args ...*ssa
// insertWBmove inserts the assignment *left = *right including a write barrier.
// t is the type being assigned.
// If right == nil, then we're zeroing *left.
func (s *state) insertWBmove(t *Type, left, right *ssa.Value, rightIsVolatile bool) {
func (s *state) insertWBmove(t *Type, left, right *ssa.Value) {
// if writeBarrier.enabled {
// typedmemmove(&t, left, right)
// } else {
@ -3426,13 +3418,7 @@ func (s *state) insertWBmove(t *Type, left, right *ssa.Value, rightIsVolatile bo
if right == nil {
val = s.newValue2I(ssa.OpZeroWB, ssa.TypeMem, sizeAlignAuxInt(t), left, s.mem())
} else {
var op ssa.Op
if rightIsVolatile {
op = ssa.OpMoveWBVolatile
} else {
op = ssa.OpMoveWB
}
val = s.newValue3I(op, ssa.TypeMem, sizeAlignAuxInt(t), left, right, s.mem())
val = s.newValue3I(ssa.OpMoveWB, ssa.TypeMem, sizeAlignAuxInt(t), left, right, s.mem())
}
val.Aux = &ssa.ExternSymbol{Typ: Types[TUINTPTR], Sym: Linksym(typenamesym(t))}
s.vars[&memVar] = val
@ -4109,7 +4095,7 @@ func (s *state) dottype(n *Node, commaok bool) (res, resok *ssa.Value) {
// unSSAable type, use temporary.
// TODO: get rid of some of these temporaries.
tmp = temp(n.Type)
addr, _ = s.addr(tmp, false)
addr = s.addr(tmp, false)
s.vars[&memVar] = s.newValue1A(ssa.OpVarDef, ssa.TypeMem, tmp, s.mem())
}

View File

@ -300,7 +300,6 @@ var genericOps = []opData{
// Expand to runtime calls. Write barrier will be removed if write on stack.
{name: "StoreWB", argLength: 3, typ: "Mem", aux: "Int64"}, // Store arg1 to arg0. arg2=memory, auxint=size. Returns memory.
{name: "MoveWB", argLength: 3, typ: "Mem", aux: "SymSizeAndAlign"}, // arg0=destptr, arg1=srcptr, arg2=mem, auxint=size+alignment, aux=symbol-of-type (for typedmemmove). Returns memory.
{name: "MoveWBVolatile", argLength: 3, typ: "Mem", aux: "SymSizeAndAlign"}, // arg0=destptr, arg1=srcptr, arg2=mem, auxint=size+alignment, aux=symbol-of-type (for typedmemmove). Returns memory. Src is volatile, i.e. needs to move to a temp space before calling typedmemmove.
{name: "ZeroWB", argLength: 2, typ: "Mem", aux: "SymSizeAndAlign"}, // arg0=destptr, arg1=mem, auxint=size+alignment, aux=symbol-of-type. Returns memory.
// Function calls. Arguments to the call have already been written to the stack.

View File

@ -1818,7 +1818,6 @@ const (
OpZero
OpStoreWB
OpMoveWB
OpMoveWBVolatile
OpZeroWB
OpClosureCall
OpStaticCall
@ -21623,12 +21622,6 @@ var opcodeTable = [...]opInfo{
argLen: 3,
generic: true,
},
{
name: "MoveWBVolatile",
auxType: auxSymSizeAndAlign,
argLen: 3,
generic: true,
},
{
name: "ZeroWB",
auxType: auxSymSizeAndAlign,

View File

@ -35,12 +35,12 @@ func writebarrier(f *Func) {
hasStore := false
for _, v := range b.Values {
switch v.Op {
case OpStoreWB, OpMoveWB, OpMoveWBVolatile, OpZeroWB:
case OpStoreWB, OpMoveWB, OpZeroWB:
if IsStackAddr(v.Args[0]) {
switch v.Op {
case OpStoreWB:
v.Op = OpStore
case OpMoveWB, OpMoveWBVolatile:
case OpMoveWB:
v.Op = OpMove
v.Aux = nil
case OpZeroWB:
@ -103,7 +103,7 @@ func writebarrier(f *Func) {
values := b.Values
for i := len(values) - 1; i >= 0; i-- {
w := values[i]
if w.Op == OpStoreWB || w.Op == OpMoveWB || w.Op == OpMoveWBVolatile || w.Op == OpZeroWB {
if w.Op == OpStoreWB || w.Op == OpMoveWB || w.Op == OpZeroWB {
if last == nil {
last = w
end = i + 1
@ -159,7 +159,7 @@ func writebarrier(f *Func) {
var val *Value
ptr := w.Args[0]
siz := w.AuxInt
typ := w.Aux // only non-nil for MoveWB, MoveWBVolatile, ZeroWB
typ := w.Aux // only non-nil for MoveWB, ZeroWB
pos = w.Pos
var op Op
@ -169,7 +169,7 @@ func writebarrier(f *Func) {
op = OpStore
fn = writebarrierptr
val = w.Args[1]
case OpMoveWB, OpMoveWBVolatile:
case OpMoveWB:
op = OpMove
fn = typedmemmove
val = w.Args[1]
@ -179,7 +179,8 @@ func writebarrier(f *Func) {
}
// then block: emit write barrier call
memThen = wbcall(pos, bThen, fn, typ, ptr, val, memThen, sp, sb, w.Op == OpMoveWBVolatile)
volatile := w.Op == OpMoveWB && isVolatile(val)
memThen = wbcall(pos, bThen, fn, typ, ptr, val, memThen, sp, sb, volatile)
// else block: normal store
if op == OpZero {
@ -223,7 +224,7 @@ func writebarrier(f *Func) {
// if we have more stores in this block, do this block again
for _, w := range b.Values {
if w.Op == OpStoreWB || w.Op == OpMoveWB || w.Op == OpMoveWBVolatile || w.Op == OpZeroWB {
if w.Op == OpStoreWB || w.Op == OpMoveWB || w.Op == OpZeroWB {
goto again
}
}
@ -303,3 +304,12 @@ func IsStackAddr(v *Value) bool {
}
return false
}
// isVolatile returns whether v is a pointer to argument region on stack which
// will be clobbered by a function call.
func isVolatile(v *Value) bool {
for v.Op == OpOffPtr || v.Op == OpAddPtr || v.Op == OpPtrIndex || v.Op == OpCopy {
v = v.Args[0]
}
return v.Op == OpSP
}