From 2952fe4f16666e8ebe433f7505cb52e1cfc56f4b Mon Sep 17 00:00:00 2001 From: Wayne Zuo Date: Wed, 19 Oct 2022 09:10:01 +0800 Subject: [PATCH] cmd/compile: use correct type in riscv64 late lower pass The right-hand side SLLI always contains valid content in the high 32 bits, so we should use the 64 bit integer type. Using wrong type may lead to wrong optimizations in cse pass. Should fix x/text test failures. Change-Id: I972dd913b8fb238d180bb12f8b1801adc8503fc0 Reviewed-on: https://go-review.googlesource.com/c/go/+/443875 Reviewed-by: Dmitri Goutnik Reviewed-by: Cherry Mui TryBot-Result: Gopher Robot Run-TryBot: Wayne Zuo Reviewed-by: David Chase --- .../internal/ssa/_gen/RISCV64latelower.rules | 18 ++--- .../internal/ssa/rewriteRISCV64latelower.go | 66 +++++++++---------- 2 files changed, 39 insertions(+), 45 deletions(-) diff --git a/src/cmd/compile/internal/ssa/_gen/RISCV64latelower.rules b/src/cmd/compile/internal/ssa/_gen/RISCV64latelower.rules index c44a837bbf..cd55331dfd 100644 --- a/src/cmd/compile/internal/ssa/_gen/RISCV64latelower.rules +++ b/src/cmd/compile/internal/ssa/_gen/RISCV64latelower.rules @@ -3,15 +3,15 @@ // license that can be found in the LICENSE file. // Fold constant shift with extension. -(SRAI [c] (MOVBreg x)) && c < 8 => (SRAI [56+c] (SLLI [56] x)) -(SRAI [c] (MOVHreg x)) && c < 16 => (SRAI [48+c] (SLLI [48] x)) -(SRAI [c] (MOVWreg x)) && c < 32 => (SRAI [32+c] (SLLI [32] x)) -(SRLI [c] (MOVBUreg x)) && c < 8 => (SRLI [56+c] (SLLI [56] x)) -(SRLI [c] (MOVHUreg x)) && c < 16 => (SRLI [48+c] (SLLI [48] x)) -(SRLI [c] (MOVWUreg x)) && c < 32 => (SRLI [32+c] (SLLI [32] x)) -(SLLI [c] (MOVBUreg x)) && c <= 56 => (SRLI [56-c] (SLLI [56] x)) -(SLLI [c] (MOVHUreg x)) && c <= 48 => (SRLI [48-c] (SLLI [48] x)) -(SLLI [c] (MOVWUreg x)) && c <= 32 => (SRLI [32-c] (SLLI [32] x)) +(SRAI [c] (MOVBreg x)) && c < 8 => (SRAI [56+c] (SLLI [56] x)) +(SRAI [c] (MOVHreg x)) && c < 16 => (SRAI [48+c] (SLLI [48] x)) +(SRAI [c] (MOVWreg x)) && c < 32 => (SRAI [32+c] (SLLI [32] x)) +(SRLI [c] (MOVBUreg x)) && c < 8 => (SRLI [56+c] (SLLI [56] x)) +(SRLI [c] (MOVHUreg x)) && c < 16 => (SRLI [48+c] (SLLI [48] x)) +(SRLI [c] (MOVWUreg x)) && c < 32 => (SRLI [32+c] (SLLI [32] x)) +(SLLI [c] (MOVBUreg x)) && c <= 56 => (SRLI [56-c] (SLLI [56] x)) +(SLLI [c] (MOVHUreg x)) && c <= 48 => (SRLI [48-c] (SLLI [48] x)) +(SLLI [c] (MOVWUreg x)) && c <= 32 => (SRLI [32-c] (SLLI [32] x)) // Shift by zero. (SRAI [0] x) => x diff --git a/src/cmd/compile/internal/ssa/rewriteRISCV64latelower.go b/src/cmd/compile/internal/ssa/rewriteRISCV64latelower.go index 2cde073eec..04a9691b02 100644 --- a/src/cmd/compile/internal/ssa/rewriteRISCV64latelower.go +++ b/src/cmd/compile/internal/ssa/rewriteRISCV64latelower.go @@ -17,11 +17,11 @@ func rewriteValueRISCV64latelower(v *Value) bool { func rewriteValueRISCV64latelower_OpRISCV64SLLI(v *Value) bool { v_0 := v.Args[0] b := v.Block - // match: (SLLI [c] (MOVBUreg x)) + typ := &b.Func.Config.Types + // match: (SLLI [c] (MOVBUreg x)) // cond: c <= 56 - // result: (SRLI [56-c] (SLLI [56] x)) + // result: (SRLI [56-c] (SLLI [56] x)) for { - t := v.Type c := auxIntToInt64(v.AuxInt) if v_0.Op != OpRISCV64MOVBUreg { break @@ -32,17 +32,16 @@ func rewriteValueRISCV64latelower_OpRISCV64SLLI(v *Value) bool { } v.reset(OpRISCV64SRLI) v.AuxInt = int64ToAuxInt(56 - c) - v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t) + v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64) v0.AuxInt = int64ToAuxInt(56) v0.AddArg(x) v.AddArg(v0) return true } - // match: (SLLI [c] (MOVHUreg x)) + // match: (SLLI [c] (MOVHUreg x)) // cond: c <= 48 - // result: (SRLI [48-c] (SLLI [48] x)) + // result: (SRLI [48-c] (SLLI [48] x)) for { - t := v.Type c := auxIntToInt64(v.AuxInt) if v_0.Op != OpRISCV64MOVHUreg { break @@ -53,17 +52,16 @@ func rewriteValueRISCV64latelower_OpRISCV64SLLI(v *Value) bool { } v.reset(OpRISCV64SRLI) v.AuxInt = int64ToAuxInt(48 - c) - v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t) + v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64) v0.AuxInt = int64ToAuxInt(48) v0.AddArg(x) v.AddArg(v0) return true } - // match: (SLLI [c] (MOVWUreg x)) + // match: (SLLI [c] (MOVWUreg x)) // cond: c <= 32 - // result: (SRLI [32-c] (SLLI [32] x)) + // result: (SRLI [32-c] (SLLI [32] x)) for { - t := v.Type c := auxIntToInt64(v.AuxInt) if v_0.Op != OpRISCV64MOVWUreg { break @@ -74,7 +72,7 @@ func rewriteValueRISCV64latelower_OpRISCV64SLLI(v *Value) bool { } v.reset(OpRISCV64SRLI) v.AuxInt = int64ToAuxInt(32 - c) - v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t) + v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64) v0.AuxInt = int64ToAuxInt(32) v0.AddArg(x) v.AddArg(v0) @@ -95,11 +93,11 @@ func rewriteValueRISCV64latelower_OpRISCV64SLLI(v *Value) bool { func rewriteValueRISCV64latelower_OpRISCV64SRAI(v *Value) bool { v_0 := v.Args[0] b := v.Block - // match: (SRAI [c] (MOVBreg x)) + typ := &b.Func.Config.Types + // match: (SRAI [c] (MOVBreg x)) // cond: c < 8 - // result: (SRAI [56+c] (SLLI [56] x)) + // result: (SRAI [56+c] (SLLI [56] x)) for { - t := v.Type c := auxIntToInt64(v.AuxInt) if v_0.Op != OpRISCV64MOVBreg { break @@ -110,17 +108,16 @@ func rewriteValueRISCV64latelower_OpRISCV64SRAI(v *Value) bool { } v.reset(OpRISCV64SRAI) v.AuxInt = int64ToAuxInt(56 + c) - v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t) + v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.Int64) v0.AuxInt = int64ToAuxInt(56) v0.AddArg(x) v.AddArg(v0) return true } - // match: (SRAI [c] (MOVHreg x)) + // match: (SRAI [c] (MOVHreg x)) // cond: c < 16 - // result: (SRAI [48+c] (SLLI [48] x)) + // result: (SRAI [48+c] (SLLI [48] x)) for { - t := v.Type c := auxIntToInt64(v.AuxInt) if v_0.Op != OpRISCV64MOVHreg { break @@ -131,17 +128,16 @@ func rewriteValueRISCV64latelower_OpRISCV64SRAI(v *Value) bool { } v.reset(OpRISCV64SRAI) v.AuxInt = int64ToAuxInt(48 + c) - v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t) + v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.Int64) v0.AuxInt = int64ToAuxInt(48) v0.AddArg(x) v.AddArg(v0) return true } - // match: (SRAI [c] (MOVWreg x)) + // match: (SRAI [c] (MOVWreg x)) // cond: c < 32 - // result: (SRAI [32+c] (SLLI [32] x)) + // result: (SRAI [32+c] (SLLI [32] x)) for { - t := v.Type c := auxIntToInt64(v.AuxInt) if v_0.Op != OpRISCV64MOVWreg { break @@ -152,7 +148,7 @@ func rewriteValueRISCV64latelower_OpRISCV64SRAI(v *Value) bool { } v.reset(OpRISCV64SRAI) v.AuxInt = int64ToAuxInt(32 + c) - v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t) + v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.Int64) v0.AuxInt = int64ToAuxInt(32) v0.AddArg(x) v.AddArg(v0) @@ -173,11 +169,11 @@ func rewriteValueRISCV64latelower_OpRISCV64SRAI(v *Value) bool { func rewriteValueRISCV64latelower_OpRISCV64SRLI(v *Value) bool { v_0 := v.Args[0] b := v.Block - // match: (SRLI [c] (MOVBUreg x)) + typ := &b.Func.Config.Types + // match: (SRLI [c] (MOVBUreg x)) // cond: c < 8 - // result: (SRLI [56+c] (SLLI [56] x)) + // result: (SRLI [56+c] (SLLI [56] x)) for { - t := v.Type c := auxIntToInt64(v.AuxInt) if v_0.Op != OpRISCV64MOVBUreg { break @@ -188,17 +184,16 @@ func rewriteValueRISCV64latelower_OpRISCV64SRLI(v *Value) bool { } v.reset(OpRISCV64SRLI) v.AuxInt = int64ToAuxInt(56 + c) - v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t) + v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64) v0.AuxInt = int64ToAuxInt(56) v0.AddArg(x) v.AddArg(v0) return true } - // match: (SRLI [c] (MOVHUreg x)) + // match: (SRLI [c] (MOVHUreg x)) // cond: c < 16 - // result: (SRLI [48+c] (SLLI [48] x)) + // result: (SRLI [48+c] (SLLI [48] x)) for { - t := v.Type c := auxIntToInt64(v.AuxInt) if v_0.Op != OpRISCV64MOVHUreg { break @@ -209,17 +204,16 @@ func rewriteValueRISCV64latelower_OpRISCV64SRLI(v *Value) bool { } v.reset(OpRISCV64SRLI) v.AuxInt = int64ToAuxInt(48 + c) - v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t) + v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64) v0.AuxInt = int64ToAuxInt(48) v0.AddArg(x) v.AddArg(v0) return true } - // match: (SRLI [c] (MOVWUreg x)) + // match: (SRLI [c] (MOVWUreg x)) // cond: c < 32 - // result: (SRLI [32+c] (SLLI [32] x)) + // result: (SRLI [32+c] (SLLI [32] x)) for { - t := v.Type c := auxIntToInt64(v.AuxInt) if v_0.Op != OpRISCV64MOVWUreg { break @@ -230,7 +224,7 @@ func rewriteValueRISCV64latelower_OpRISCV64SRLI(v *Value) bool { } v.reset(OpRISCV64SRLI) v.AuxInt = int64ToAuxInt(32 + c) - v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t) + v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64) v0.AuxInt = int64ToAuxInt(32) v0.AddArg(x) v.AddArg(v0)