1
0
mirror of https://github.com/golang/go synced 2024-11-05 18:36:10 -07:00

cmd/compile: remove some write barriers for stack writes

This, along with CL 30140, removes ~50% of stack write barriers
mentioned in issue #17330. The remaining are most due to Phi and
FwdRef, which is not resolved when building SSA. We might be
able to do it at a later stage where Phi and Copy propagations
are done, but matching an if-(store-store-call)+ sequence seems
not very pleasant.

Updates #17330.

Change-Id: Iaa36c7b1f4c4fc3dc10a27018a3b0e261094cb21
Reviewed-on: https://go-review.googlesource.com/30290
Run-TryBot: Cherry Zhang <cherryyz@google.com>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: David Chase <drchase@google.com>
This commit is contained in:
Cherry Zhang 2016-10-03 23:01:26 -04:00
parent 2f0b8f88df
commit 68331750da

View File

@ -2161,7 +2161,11 @@ func (s *state) append(n *Node, inplace bool) *ssa.Value {
} }
capaddr := s.newValue1I(ssa.OpOffPtr, pt, int64(array_cap), addr) capaddr := s.newValue1I(ssa.OpOffPtr, pt, int64(array_cap), addr)
s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.IntSize, capaddr, r[2], s.mem()) s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, s.config.IntSize, capaddr, r[2], s.mem())
if isStackAddr(addr) {
s.vars[&memVar] = s.newValue3I(ssa.OpStore, ssa.TypeMem, pt.Size(), addr, r[0], s.mem())
} else {
s.insertWBstore(pt, addr, r[0], n.Lineno, 0) s.insertWBstore(pt, addr, r[0], n.Lineno, 0)
}
// load the value we just stored to avoid having to spill it // load the value we just stored to avoid having to spill it
s.vars[&ptrVar] = s.newValue2(ssa.OpLoad, pt, addr, s.mem()) s.vars[&ptrVar] = s.newValue2(ssa.OpLoad, pt, addr, s.mem())
s.vars[&lenVar] = r[1] // avoid a spill in the fast path s.vars[&lenVar] = r[1] // avoid a spill in the fast path
@ -2359,7 +2363,7 @@ func (s *state) assign(left *Node, right *ssa.Value, wb, deref bool, line int32,
s.vars[&memVar] = s.newValue2I(ssa.OpZero, ssa.TypeMem, sizeAlignAuxInt(t), addr, s.mem()) s.vars[&memVar] = s.newValue2I(ssa.OpZero, ssa.TypeMem, sizeAlignAuxInt(t), addr, s.mem())
return return
} }
if wb { if wb && !isStackAddr(addr) {
s.insertWBmove(t, addr, right, line, rightIsVolatile) s.insertWBmove(t, addr, right, line, rightIsVolatile)
return return
} }
@ -2367,7 +2371,7 @@ func (s *state) assign(left *Node, right *ssa.Value, wb, deref bool, line int32,
return return
} }
// Treat as a store. // Treat as a store.
if wb { if wb && !isStackAddr(addr) {
if skip&skipPtr != 0 { if skip&skipPtr != 0 {
// Special case: if we don't write back the pointers, don't bother // Special case: if we don't write back the pointers, don't bother
// doing the write barrier check. // doing the write barrier check.
@ -3259,6 +3263,20 @@ func (s *state) rtcall(fn *Node, returns bool, results []*Type, args ...*ssa.Val
return res return res
} }
// isStackAddr returns whether v is known to be an address of a stack slot
func isStackAddr(v *ssa.Value) bool {
for v.Op == ssa.OpOffPtr || v.Op == ssa.OpAddPtr || v.Op == ssa.OpPtrIndex || v.Op == ssa.OpCopy {
v = v.Args[0]
}
switch v.Op {
case ssa.OpSP:
return true
case ssa.OpAddr:
return v.Args[0].Op == ssa.OpSP
}
return false
}
// insertWBmove inserts the assignment *left = *right including a write barrier. // insertWBmove inserts the assignment *left = *right including a write barrier.
// t is the type being assigned. // t is the type being assigned.
func (s *state) insertWBmove(t *Type, left, right *ssa.Value, line int32, rightIsVolatile bool) { func (s *state) insertWBmove(t *Type, left, right *ssa.Value, line int32, rightIsVolatile bool) {