mirror of
https://github.com/golang/go
synced 2024-11-08 18:46:16 -07:00
8af0c77df3
Use conditional moves instead of subtractions with borrow to handle saturation cases. This allows us to delete the SUBE/SUBEW ops and associated rules from the SSA backend. Using conditional moves also means we can detect when shift values are masked so I've added some new rules to constant fold the relevant comparisons and masking ops. Also use the new shiftIsBounded() function to avoid generating code to handle saturation cases where possible. Updates #25167 for s390x. Change-Id: Ief9991c91267c9151ce4c5ec07642abb4dcc1c0d Reviewed-on: https://go-review.googlesource.com/110070 Run-TryBot: Michael Munday <mike.munday@ibm.com> TryBot-Result: Gobot Gobot <gobot@golang.org> Reviewed-by: Brad Fitzpatrick <bradfitz@golang.org>
100 lines
1.9 KiB
Go
100 lines
1.9 KiB
Go
// asmcheck
|
|
|
|
// Copyright 2018 The Go Authors. All rights reserved.
|
|
// Use of this source code is governed by a BSD-style
|
|
// license that can be found in the LICENSE file.
|
|
|
|
package codegen
|
|
|
|
// ------------------ //
|
|
// masked shifts //
|
|
// ------------------ //
|
|
|
|
func lshMask64x64(v int64, s uint64) int64 {
|
|
// s390x:-".*AND",-".*MOVDGE"
|
|
return v << (s&63)
|
|
}
|
|
|
|
func rshMask64Ux64(v uint64, s uint64) uint64 {
|
|
// s390x:-".*AND",-".*MOVDGE"
|
|
return v >> (s&63)
|
|
}
|
|
|
|
func rshMask64x64(v int64, s uint64) int64 {
|
|
// s390x:-".*AND",-".*MOVDGE"
|
|
return v >> (s&63)
|
|
}
|
|
|
|
func lshMask32x64(v int32, s uint64) int32 {
|
|
// s390x:-".*AND",-".*MOVDGE"
|
|
return v << (s&63)
|
|
}
|
|
|
|
func rshMask32Ux64(v uint32, s uint64) uint32 {
|
|
// s390x:-".*AND",-".*MOVDGE"
|
|
return v >> (s&63)
|
|
}
|
|
|
|
func rshMask32x64(v int32, s uint64) int32 {
|
|
// s390x:-".*AND",-".*MOVDGE"
|
|
return v >> (s&63)
|
|
}
|
|
|
|
func lshMask64x32(v int64, s uint32) int64 {
|
|
// s390x:-".*AND",-".*MOVDGE"
|
|
return v << (s&63)
|
|
}
|
|
|
|
func rshMask64Ux32(v uint64, s uint32) uint64 {
|
|
// s390x:-".*AND",-".*MOVDGE"
|
|
return v >> (s&63)
|
|
}
|
|
|
|
func rshMask64x32(v int64, s uint32) int64 {
|
|
// s390x:-".*AND",-".*MOVDGE"
|
|
return v >> (s&63)
|
|
}
|
|
|
|
func lshMask64x32Ext(v int64, s int32) int64 {
|
|
// s390x:-".*AND",-".*MOVDGE"
|
|
return v << uint(s&63)
|
|
}
|
|
|
|
func rshMask64Ux32Ext(v uint64, s int32) uint64 {
|
|
// s390x:-".*AND",-".*MOVDGE"
|
|
return v >> uint(s&63)
|
|
}
|
|
|
|
func rshMask64x32Ext(v int64, s int32) int64 {
|
|
// s390x:-".*AND",-".*MOVDGE"
|
|
return v >> uint(s&63)
|
|
}
|
|
|
|
// ------------------ //
|
|
// bounded shifts //
|
|
// ------------------ //
|
|
|
|
func lshGuarded64(v int64, s uint) int64 {
|
|
if s < 64 {
|
|
// s390x:-".*AND",-".*MOVDGE"
|
|
return v >> s
|
|
}
|
|
panic("shift too large")
|
|
}
|
|
|
|
func rshGuarded64U(v uint64, s uint) uint64 {
|
|
if s < 64 {
|
|
// s390x:-".*AND",-".*MOVDGE"
|
|
return v >> s
|
|
}
|
|
panic("shift too large")
|
|
}
|
|
|
|
func rshGuarded64(v int64, s uint) int64 {
|
|
if s < 64 {
|
|
// s390x:-".*AND",-".*MOVDGE"
|
|
return v << s
|
|
}
|
|
panic("shift too large")
|
|
}
|