mirror of
https://github.com/golang/go
synced 2024-11-22 03:44:39 -07:00
cmd/compile/internal/ssagen: provide intrinsicBuilders
Create an intrinsicBuilders type that has functions for adding and looking up intrinsics. This makes the implementation more self contained, readable and testable. Additionally, pass an *intrinsicBuildConfig to initIntrinsics to improve testability without needing to modify package level variables. Change-Id: I0ee0a19c192dd6da9f1c5f1c29b98a3ad8161fe2 Reviewed-on: https://go-review.googlesource.com/c/go/+/605478 Reviewed-by: David Chase <drchase@google.com> Reviewed-by: Keith Randall <khr@google.com> Auto-Submit: Joel Sing <joel@sing.id.au> LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com> Reviewed-by: Keith Randall <khr@golang.org>
This commit is contained in:
parent
5d82dbb59c
commit
dff15aa610
@ -15,7 +15,7 @@ import (
|
||||
"cmd/internal/sys"
|
||||
)
|
||||
|
||||
var intrinsics map[intrinsicKey]intrinsicBuilder
|
||||
var intrinsics intrinsicBuilders
|
||||
|
||||
// An intrinsicBuilder converts a call node n into an ssa value that
|
||||
// implements that call as an intrinsic. args is a list of arguments to the func.
|
||||
@ -27,8 +27,80 @@ type intrinsicKey struct {
|
||||
fn string
|
||||
}
|
||||
|
||||
func initIntrinsics() {
|
||||
intrinsics = map[intrinsicKey]intrinsicBuilder{}
|
||||
// intrinsicBuildConfig specifies the config to use for intrinsic building.
|
||||
type intrinsicBuildConfig struct {
|
||||
instrumenting bool
|
||||
|
||||
go386 string
|
||||
goamd64 int
|
||||
goarm buildcfg.GoarmFeatures
|
||||
goarm64 buildcfg.Goarm64Features
|
||||
gomips string
|
||||
gomips64 string
|
||||
goppc64 int
|
||||
goriscv64 int
|
||||
}
|
||||
|
||||
type intrinsicBuilders map[intrinsicKey]intrinsicBuilder
|
||||
|
||||
// add adds the intrinsic builder b for pkg.fn for the given architecture.
|
||||
func (ib intrinsicBuilders) add(arch *sys.Arch, pkg, fn string, b intrinsicBuilder) {
|
||||
ib[intrinsicKey{arch, pkg, fn}] = b
|
||||
}
|
||||
|
||||
// addForArchs adds the intrinsic builder b for pkg.fn for the given architectures.
|
||||
func (ib intrinsicBuilders) addForArchs(pkg, fn string, b intrinsicBuilder, archs ...*sys.Arch) {
|
||||
for _, arch := range archs {
|
||||
ib.add(arch, pkg, fn, b)
|
||||
}
|
||||
}
|
||||
|
||||
// addForFamilies does the same as addForArchs but operates on architecture families.
|
||||
func (ib intrinsicBuilders) addForFamilies(pkg, fn string, b intrinsicBuilder, archFamilies ...sys.ArchFamily) {
|
||||
for _, arch := range sys.Archs {
|
||||
if arch.InFamily(archFamilies...) {
|
||||
intrinsics.add(arch, pkg, fn, b)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// alias aliases pkg.fn to targetPkg.targetFn for all architectures in archs
|
||||
// for which targetPkg.targetFn already exists.
|
||||
func (ib intrinsicBuilders) alias(pkg, fn, targetPkg, targetFn string, archs ...*sys.Arch) {
|
||||
// TODO(jsing): Consider making this work even if the alias is added
|
||||
// before the intrinsic.
|
||||
aliased := false
|
||||
for _, arch := range archs {
|
||||
if b := intrinsics.lookup(arch, targetPkg, targetFn); b != nil {
|
||||
intrinsics.add(arch, pkg, fn, b)
|
||||
aliased = true
|
||||
}
|
||||
}
|
||||
if !aliased {
|
||||
panic(fmt.Sprintf("attempted to alias undefined intrinsic: %s.%s", pkg, fn))
|
||||
}
|
||||
}
|
||||
|
||||
// lookup looks up the intrinsic for a pkg.fn on the specified architecture.
|
||||
func (ib intrinsicBuilders) lookup(arch *sys.Arch, pkg, fn string) intrinsicBuilder {
|
||||
return intrinsics[intrinsicKey{arch, pkg, fn}]
|
||||
}
|
||||
|
||||
func initIntrinsics(cfg *intrinsicBuildConfig) {
|
||||
if cfg == nil {
|
||||
cfg = &intrinsicBuildConfig{
|
||||
instrumenting: base.Flag.Cfg.Instrumenting,
|
||||
go386: buildcfg.GO386,
|
||||
goamd64: buildcfg.GOAMD64,
|
||||
goarm: buildcfg.GOARM,
|
||||
goarm64: buildcfg.GOARM64,
|
||||
gomips: buildcfg.GOMIPS,
|
||||
gomips64: buildcfg.GOMIPS64,
|
||||
goppc64: buildcfg.GOPPC64,
|
||||
goriscv64: buildcfg.GORISCV64,
|
||||
}
|
||||
}
|
||||
intrinsics = intrinsicBuilders{}
|
||||
|
||||
var p4 []*sys.Arch
|
||||
var p8 []*sys.Arch
|
||||
@ -45,36 +117,18 @@ func initIntrinsics() {
|
||||
}
|
||||
all := sys.Archs[:]
|
||||
|
||||
// add adds the intrinsic b for pkg.fn for the given list of architectures.
|
||||
add := func(pkg, fn string, b intrinsicBuilder, archs ...*sys.Arch) {
|
||||
for _, a := range archs {
|
||||
intrinsics[intrinsicKey{a, pkg, fn}] = b
|
||||
}
|
||||
intrinsics.addForArchs(pkg, fn, b, archs...)
|
||||
}
|
||||
// addF does the same as add but operates on architecture families.
|
||||
addF := func(pkg, fn string, b intrinsicBuilder, archFamilies ...sys.ArchFamily) {
|
||||
for _, a := range sys.Archs {
|
||||
if a.InFamily(archFamilies...) {
|
||||
intrinsics[intrinsicKey{a, pkg, fn}] = b
|
||||
}
|
||||
}
|
||||
intrinsics.addForFamilies(pkg, fn, b, archFamilies...)
|
||||
}
|
||||
// alias defines pkg.fn = pkg2.fn2 for all architectures in archs for which pkg2.fn2 exists.
|
||||
alias := func(pkg, fn, pkg2, fn2 string, archs ...*sys.Arch) {
|
||||
aliased := false
|
||||
for _, a := range archs {
|
||||
if b, ok := intrinsics[intrinsicKey{a, pkg2, fn2}]; ok {
|
||||
intrinsics[intrinsicKey{a, pkg, fn}] = b
|
||||
aliased = true
|
||||
}
|
||||
}
|
||||
if !aliased {
|
||||
panic(fmt.Sprintf("attempted to alias undefined intrinsic: %s.%s", pkg, fn))
|
||||
}
|
||||
intrinsics.alias(pkg, fn, pkg2, fn2, archs...)
|
||||
}
|
||||
|
||||
/******** runtime ********/
|
||||
if !base.Flag.Cfg.Instrumenting {
|
||||
if !cfg.instrumenting {
|
||||
add("runtime", "slicebytetostringtmp",
|
||||
func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
|
||||
// Compiler frontend optimizations emit OBYTES2STRTMP nodes
|
||||
@ -125,7 +179,7 @@ func initIntrinsics() {
|
||||
sys.ARM64, sys.PPC64, sys.RISCV64)
|
||||
|
||||
brev_arch := []sys.ArchFamily{sys.AMD64, sys.I386, sys.ARM64, sys.ARM, sys.S390X}
|
||||
if buildcfg.GOPPC64 >= 10 {
|
||||
if cfg.goppc64 >= 10 {
|
||||
// Use only on Power10 as the new byte reverse instructions that Power10 provide
|
||||
// make it worthwhile as an intrinsic
|
||||
brev_arch = append(brev_arch, sys.PPC64)
|
||||
@ -258,7 +312,7 @@ func initIntrinsics() {
|
||||
makeAtomicGuardedIntrinsicARM64common := func(op0, op1 ssa.Op, typ types.Kind, emit atomicOpEmitter, needReturn bool) intrinsicBuilder {
|
||||
|
||||
return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
|
||||
if buildcfg.GOARM64.LSE {
|
||||
if cfg.goarm64.LSE {
|
||||
emit(s, n, args, op1, typ, needReturn)
|
||||
} else {
|
||||
// Target Atomic feature is identified by dynamic detection
|
||||
@ -565,7 +619,7 @@ func initIntrinsics() {
|
||||
return s.variable(n, types.Types[types.TFLOAT64])
|
||||
}
|
||||
|
||||
if buildcfg.GOAMD64 >= 3 {
|
||||
if cfg.goamd64 >= 3 {
|
||||
return s.newValue3(ssa.OpFMA, types.Types[types.TFLOAT64], args[0], args[1], args[2])
|
||||
}
|
||||
|
||||
@ -631,7 +685,7 @@ func initIntrinsics() {
|
||||
|
||||
makeRoundAMD64 := func(op ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
|
||||
return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
|
||||
if buildcfg.GOAMD64 >= 2 {
|
||||
if cfg.goamd64 >= 2 {
|
||||
return s.newValue1(op, types.Types[types.TFLOAT64], args[0])
|
||||
}
|
||||
|
||||
@ -732,7 +786,7 @@ func initIntrinsics() {
|
||||
// ReverseBytes inlines correctly, no need to intrinsify it.
|
||||
// Nothing special is needed for targets where ReverseBytes16 lowers to a rotate
|
||||
// On Power10, 16-bit rotate is not available so use BRH instruction
|
||||
if buildcfg.GOPPC64 >= 10 {
|
||||
if cfg.goppc64 >= 10 {
|
||||
addF("math/bits", "ReverseBytes16",
|
||||
func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
|
||||
return s.newValue1(ssa.OpBswap16, types.Types[types.TUINT], args[0])
|
||||
@ -847,7 +901,7 @@ func initIntrinsics() {
|
||||
|
||||
makeOnesCountAMD64 := func(op ssa.Op) func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
|
||||
return func(s *state, n *ir.CallExpr, args []*ssa.Value) *ssa.Value {
|
||||
if buildcfg.GOAMD64 >= 2 {
|
||||
if cfg.goamd64 >= 2 {
|
||||
return s.newValue1(op, types.Types[types.TINT], args[0])
|
||||
}
|
||||
|
||||
@ -1032,7 +1086,7 @@ func findIntrinsic(sym *types.Sym) intrinsicBuilder {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return intrinsics[intrinsicKey{Arch.LinkArch.Arch, pkg, fn}]
|
||||
return intrinsics.lookup(Arch.LinkArch.Arch, pkg, fn)
|
||||
}
|
||||
|
||||
func IsIntrinsicCall(n *ir.CallExpr) bool {
|
||||
|
@ -7,6 +7,8 @@ package ssagen
|
||||
import (
|
||||
"internal/buildcfg"
|
||||
"testing"
|
||||
|
||||
"cmd/internal/sys"
|
||||
)
|
||||
|
||||
type testIntrinsicKey struct {
|
||||
@ -1231,7 +1233,7 @@ var wantIntrinsicsPower10 = map[testIntrinsicKey]struct{}{
|
||||
}
|
||||
|
||||
func TestIntrinsics(t *testing.T) {
|
||||
initIntrinsics()
|
||||
initIntrinsics(nil)
|
||||
|
||||
want := make(map[testIntrinsicKey]struct{})
|
||||
for ik, iv := range wantIntrinsics {
|
||||
@ -1258,3 +1260,46 @@ func TestIntrinsics(t *testing.T) {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestIntrinsicBuilders(t *testing.T) {
|
||||
cfg := &intrinsicBuildConfig{}
|
||||
initIntrinsics(cfg)
|
||||
|
||||
for _, arch := range sys.Archs {
|
||||
if intrinsics.lookup(arch, "runtime", "getcallersp") == nil {
|
||||
t.Errorf("No intrinsic for runtime.getcallersp on arch %v", arch)
|
||||
}
|
||||
}
|
||||
|
||||
if intrinsics.lookup(sys.ArchAMD64, "runtime", "slicebytetostringtmp") == nil {
|
||||
t.Error("No intrinsic for runtime.slicebytetostringtmp")
|
||||
}
|
||||
|
||||
if intrinsics.lookup(sys.ArchRISCV64, "runtime", "publicationBarrier") == nil {
|
||||
t.Errorf("No intrinsic for runtime.publicationBarrier on arch %v", sys.ArchRISCV64)
|
||||
}
|
||||
|
||||
if intrinsics.lookup(sys.ArchAMD64, "internal/runtime/sys", "Bswap32") == nil {
|
||||
t.Errorf("No intrinsic for internal/runtime/sys.Bswap32 on arch %v", sys.ArchAMD64)
|
||||
}
|
||||
if intrinsics.lookup(sys.ArchAMD64, "internal/runtime/sys", "Bswap64") == nil {
|
||||
t.Errorf("No intrinsic for internal/runtime/sys.Bswap64 on arch %v", sys.ArchAMD64)
|
||||
}
|
||||
|
||||
if intrinsics.lookup(sys.ArchPPC64, "internal/runtime/sys", "Bswap64") != nil {
|
||||
t.Errorf("Found intrinsic for internal/runtime/sys.Bswap64 on arch %v", sys.ArchPPC64)
|
||||
}
|
||||
|
||||
cfg.goppc64 = 10
|
||||
cfg.instrumenting = true
|
||||
|
||||
initIntrinsics(cfg)
|
||||
|
||||
if intrinsics.lookup(sys.ArchAMD64, "runtime", "slicebytetostringtmp") != nil {
|
||||
t.Error("Intrinsic incorrectly exists for runtime.slicebytetostringtmp")
|
||||
}
|
||||
|
||||
if intrinsics.lookup(sys.ArchPPC64, "internal/runtime/sys", "Bswap64") == nil {
|
||||
t.Errorf("No intrinsic for internal/runtime/sys.Bswap64 on arch %v", sys.ArchPPC64)
|
||||
}
|
||||
}
|
||||
|
@ -222,7 +222,7 @@ func InitConfig() {
|
||||
}
|
||||
|
||||
func InitTables() {
|
||||
initIntrinsics()
|
||||
initIntrinsics(nil)
|
||||
}
|
||||
|
||||
// AbiForBodylessFuncStackMap returns the ABI for a bodyless function's stack map.
|
||||
@ -2037,7 +2037,7 @@ func (s *state) stmt(n ir.Node) {
|
||||
if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Name) {
|
||||
// Note: we can only use the cache if we have the right atomic load instruction.
|
||||
// Double-check that here.
|
||||
if _, ok := intrinsics[intrinsicKey{Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp"}]; !ok {
|
||||
if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
|
||||
s.Fatalf("atomic load not available")
|
||||
}
|
||||
merge = s.f.NewBlock(ssa.BlockPlain)
|
||||
@ -5763,7 +5763,7 @@ func (s *state) dottype1(pos src.XPos, src, dst *types.Type, iface, source, targ
|
||||
if base.Flag.N == 0 && rtabi.UseInterfaceSwitchCache(Arch.LinkArch.Name) {
|
||||
// Note: we can only use the cache if we have the right atomic load instruction.
|
||||
// Double-check that here.
|
||||
if _, ok := intrinsics[intrinsicKey{Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp"}]; !ok {
|
||||
if intrinsics.lookup(Arch.LinkArch.Arch, "internal/runtime/atomic", "Loadp") == nil {
|
||||
s.Fatalf("atomic load not available")
|
||||
}
|
||||
// Pick right size ops.
|
||||
|
@ -70,12 +70,12 @@ func goamd64() int {
|
||||
return int(defaultGOAMD64[len("v")] - '0')
|
||||
}
|
||||
|
||||
type goarmFeatures struct {
|
||||
type GoarmFeatures struct {
|
||||
Version int
|
||||
SoftFloat bool
|
||||
}
|
||||
|
||||
func (g goarmFeatures) String() string {
|
||||
func (g GoarmFeatures) String() string {
|
||||
armStr := strconv.Itoa(g.Version)
|
||||
if g.SoftFloat {
|
||||
armStr += ",softfloat"
|
||||
@ -85,7 +85,7 @@ func (g goarmFeatures) String() string {
|
||||
return armStr
|
||||
}
|
||||
|
||||
func goarm() (g goarmFeatures) {
|
||||
func goarm() (g GoarmFeatures) {
|
||||
const (
|
||||
softFloatOpt = ",softfloat"
|
||||
hardFloatOpt = ",hardfloat"
|
||||
|
Loading…
Reference in New Issue
Block a user