1
0
mirror of https://github.com/golang/go synced 2024-11-14 14:00:25 -07:00
go/test/codegen/stack.go
khr@golang.org e930413331 cmd/compile: soften type matching when allocating stack slots
Currently we use pointer equality on types when deciding whether we can
reuse a stack slot. That's too strict, as we don't guarantee pointer
equality for the same type. In particular, it can vary based on whether
PtrTo has been called in the frontend or not.

Instead, use the type's LinkString, which is guaranteed to both be
unique for a type, and to not vary given two different type structures
describing the same type.

Update #65783

Change-Id: I64f55138475f04bfa30cfb819b786b7cc06aebe4
Reviewed-on: https://go-review.googlesource.com/c/go/+/565436
Reviewed-by: Keith Randall <khr@google.com>
Reviewed-by: Matthew Dempsky <mdempsky@google.com>
LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com>
Auto-Submit: Keith Randall <khr@golang.org>
Reviewed-by: Cuong Manh Le <cuong.manhle.vn@gmail.com>
2024-02-29 21:29:41 +00:00

145 lines
3.4 KiB
Go

// asmcheck
// Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package codegen
import "runtime"
// This file contains code generation tests related to the use of the
// stack.
// Check that stack stores are optimized away.
// 386:"TEXT\t.*, [$]0-"
// amd64:"TEXT\t.*, [$]0-"
// arm:"TEXT\t.*, [$]-4-"
// arm64:"TEXT\t.*, [$]0-"
// mips:"TEXT\t.*, [$]-4-"
// ppc64x:"TEXT\t.*, [$]0-"
// s390x:"TEXT\t.*, [$]0-"
func StackStore() int {
var x int
return *(&x)
}
type T struct {
A, B, C, D int // keep exported fields
x, y, z int // reset unexported fields
}
// Check that large structs are cleared directly (issue #24416).
// 386:"TEXT\t.*, [$]0-"
// amd64:"TEXT\t.*, [$]0-"
// arm:"TEXT\t.*, [$]0-" (spills return address)
// arm64:"TEXT\t.*, [$]0-"
// mips:"TEXT\t.*, [$]-4-"
// ppc64x:"TEXT\t.*, [$]0-"
// s390x:"TEXT\t.*, [$]0-"
func ZeroLargeStruct(x *T) {
t := T{}
*x = t
}
// Check that structs are partially initialised directly (issue #24386).
// Notes:
// - 386 fails due to spilling a register
// amd64:"TEXT\t.*, [$]0-"
// arm:"TEXT\t.*, [$]0-" (spills return address)
// arm64:"TEXT\t.*, [$]0-"
// ppc64x:"TEXT\t.*, [$]0-"
// s390x:"TEXT\t.*, [$]0-"
// Note: that 386 currently has to spill a register.
func KeepWanted(t *T) {
*t = T{A: t.A, B: t.B, C: t.C, D: t.D}
}
// Check that small array operations avoid using the stack (issue #15925).
// Notes:
// - 386 fails due to spilling a register
// - arm & mips fail due to softfloat calls
// amd64:"TEXT\t.*, [$]0-"
// arm64:"TEXT\t.*, [$]0-"
// ppc64x:"TEXT\t.*, [$]0-"
// s390x:"TEXT\t.*, [$]0-"
func ArrayAdd64(a, b [4]float64) [4]float64 {
return [4]float64{a[0] + b[0], a[1] + b[1], a[2] + b[2], a[3] + b[3]}
}
// Check that small array initialization avoids using the stack.
// 386:"TEXT\t.*, [$]0-"
// amd64:"TEXT\t.*, [$]0-"
// arm:"TEXT\t.*, [$]0-" (spills return address)
// arm64:"TEXT\t.*, [$]0-"
// mips:"TEXT\t.*, [$]-4-"
// ppc64x:"TEXT\t.*, [$]0-"
// s390x:"TEXT\t.*, [$]0-"
func ArrayInit(i, j int) [4]int {
return [4]int{i, 0, j, 0}
}
// Check that assembly output has matching offset and base register
// (issue #21064).
func check_asmout(b [2]int) int {
runtime.GC() // use some frame
// amd64:`.*b\+24\(SP\)`
// arm:`.*b\+4\(FP\)`
return b[1]
}
// Check that simple functions get promoted to nosplit, even when
// they might panic in various ways. See issue 31219.
// amd64:"TEXT\t.*NOSPLIT.*"
func MightPanic(a []int, i, j, k, s int) {
_ = a[i] // panicIndex
_ = a[i:j] // panicSlice
_ = a[i:j:k] // also panicSlice
_ = i << s // panicShift
_ = i / j // panicDivide
}
// Put a defer in a loop, so second defer is not open-coded
func Defer() {
for i := 0; i < 2; i++ {
defer func() {}()
}
// amd64:`CALL\truntime\.deferprocStack`
defer func() {}()
}
// Check that stack slots are shared among values of the same
// type, but not pointer-identical types. See issue 65783.
func spillSlotReuse() {
// The return values of getp1 and getp2 need to be
// spilled around the calls to nopInt. Make sure that
// spill slot gets reused.
//arm64:`.*autotmp_2-8\(SP\)`
getp1()[nopInt()] = 0
//arm64:`.*autotmp_2-8\(SP\)`
getp2()[nopInt()] = 0
}
//go:noinline
func nopInt() int {
return 0
}
//go:noinline
func getp1() *[4]int {
return nil
}
//go:noinline
func getp2() *[4]int {
return nil
}