1
0
mirror of https://github.com/golang/go synced 2024-11-23 00:20:12 -07:00

[dev.typeparams] cmd/compile: add morestack arg spilling code on ARM64

Spill arg registers before calling morestack, and reload after.

Change-Id: I09404def321b8f935d5e8836a46ccae8256d0d55
Reviewed-on: https://go-review.googlesource.com/c/go/+/322853
Trust: Cherry Mui <cherryyz@google.com>
Run-TryBot: Cherry Mui <cherryyz@google.com>
TryBot-Result: Go Bot <gobot@golang.org>
Reviewed-by: David Chase <drchase@google.com>
This commit is contained in:
Cherry Mui 2021-05-25 18:40:28 -04:00
parent a4b2a04bc5
commit 4c68edd1fe
3 changed files with 20 additions and 8 deletions

View File

@ -162,7 +162,16 @@ func ssaGenValue(s *ssagen.State, v *ssa.Value) {
p.From.Reg = v.Args[0].Reg() p.From.Reg = v.Args[0].Reg()
ssagen.AddrAuto(&p.To, v) ssagen.AddrAuto(&p.To, v)
case ssa.OpArgIntReg, ssa.OpArgFloatReg: case ssa.OpArgIntReg, ssa.OpArgFloatReg:
// TODO: generate morestack spill code // The assembler needs to wrap the entry safepoint/stack growth code with spill/unspill
// The loop only runs once.
for _, a := range v.Block.Func.RegArgs {
// Pass the spill/unspill information along to the assembler, offset by size of
// the saved LR slot.
addr := ssagen.SpillSlotAddr(a, arm64.REGSP, base.Ctxt.FixedFrameSize())
s.FuncInfo().AddSpill(
obj.RegSpill{Reg: a.Reg, Addr: addr, Unspill: loadByType(a.Type), Spill: storeByType(a.Type)})
}
v.Block.Func.RegArgs = nil
ssagen.CheckArgReg(v) ssagen.CheckArgReg(v)
case ssa.OpARM64ADD, case ssa.OpARM64ADD,
ssa.OpARM64SUB, ssa.OpARM64SUB,

View File

@ -650,7 +650,6 @@ func buildssa(fn *ir.Func, worker int) *ssa.Func {
// it mimics the behavior of the former ABI (everything stored) and because it's not 100% // it mimics the behavior of the former ABI (everything stored) and because it's not 100%
// clear if naming conventions are respected in autogenerated code. // clear if naming conventions are respected in autogenerated code.
// TODO figure out exactly what's unused, don't spill it. Make liveness fine-grained, also. // TODO figure out exactly what's unused, don't spill it. Make liveness fine-grained, also.
// TODO non-amd64 architectures have link registers etc that may require adjustment here.
for _, p := range params.InParams() { for _, p := range params.InParams() {
typs, offs := p.RegisterTypesAndOffsets() typs, offs := p.RegisterTypesAndOffsets()
for i, t := range typs { for i, t := range typs {

View File

@ -161,17 +161,20 @@ func (c *ctxt7) stacksplit(p *obj.Prog, framesize int32) *obj.Prog {
pcdata := c.ctxt.EmitEntryStackMap(c.cursym, spfix, c.newprog) pcdata := c.ctxt.EmitEntryStackMap(c.cursym, spfix, c.newprog)
pcdata = c.ctxt.StartUnsafePoint(pcdata, c.newprog) pcdata = c.ctxt.StartUnsafePoint(pcdata, c.newprog)
if q != nil {
q.To.SetTarget(pcdata)
}
bls.To.SetTarget(pcdata)
spill := c.cursym.Func().SpillRegisterArgs(pcdata, c.newprog)
// MOV LR, R3 // MOV LR, R3
movlr := obj.Appendp(pcdata, c.newprog) movlr := obj.Appendp(spill, c.newprog)
movlr.As = AMOVD movlr.As = AMOVD
movlr.From.Type = obj.TYPE_REG movlr.From.Type = obj.TYPE_REG
movlr.From.Reg = REGLINK movlr.From.Reg = REGLINK
movlr.To.Type = obj.TYPE_REG movlr.To.Type = obj.TYPE_REG
movlr.To.Reg = REG_R3 movlr.To.Reg = REG_R3
if q != nil {
q.To.SetTarget(movlr)
}
bls.To.SetTarget(movlr)
debug := movlr debug := movlr
if false { if false {
@ -196,7 +199,8 @@ func (c *ctxt7) stacksplit(p *obj.Prog, framesize int32) *obj.Prog {
} }
call.To.Sym = c.ctxt.Lookup(morestack) call.To.Sym = c.ctxt.Lookup(morestack)
pcdata = c.ctxt.EndUnsafePoint(call, c.newprog, -1) unspill := c.cursym.Func().UnspillRegisterArgs(call, c.newprog)
pcdata = c.ctxt.EndUnsafePoint(unspill, c.newprog, -1)
// B start // B start
jmp := obj.Appendp(pcdata, c.newprog) jmp := obj.Appendp(pcdata, c.newprog)