1
0
mirror of https://github.com/golang/go synced 2024-11-17 22:34:47 -07:00

cmd/compile: use correct type in riscv64 late lower pass

The right-hand side SLLI always contains valid content in the high 32 bits,
so we should use the 64 bit integer type. Using wrong type may lead to wrong
optimizations in cse pass.

Should fix x/text test failures.

Change-Id: I972dd913b8fb238d180bb12f8b1801adc8503fc0
Reviewed-on: https://go-review.googlesource.com/c/go/+/443875
Reviewed-by: Dmitri Goutnik <dgoutnik@gmail.com>
Reviewed-by: Cherry Mui <cherryyz@google.com>
TryBot-Result: Gopher Robot <gobot@golang.org>
Run-TryBot: Wayne Zuo <wdvxdr@golangcn.org>
Reviewed-by: David Chase <drchase@google.com>
This commit is contained in:
Wayne Zuo 2022-10-19 09:10:01 +08:00 committed by Meng Zhuo
parent 85196fc982
commit 2952fe4f16
2 changed files with 39 additions and 45 deletions

View File

@ -3,15 +3,15 @@
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// Fold constant shift with extension. // Fold constant shift with extension.
(SRAI <t> [c] (MOVBreg x)) && c < 8 => (SRAI [56+c] (SLLI <t> [56] x)) (SRAI [c] (MOVBreg x)) && c < 8 => (SRAI [56+c] (SLLI <typ.Int64> [56] x))
(SRAI <t> [c] (MOVHreg x)) && c < 16 => (SRAI [48+c] (SLLI <t> [48] x)) (SRAI [c] (MOVHreg x)) && c < 16 => (SRAI [48+c] (SLLI <typ.Int64> [48] x))
(SRAI <t> [c] (MOVWreg x)) && c < 32 => (SRAI [32+c] (SLLI <t> [32] x)) (SRAI [c] (MOVWreg x)) && c < 32 => (SRAI [32+c] (SLLI <typ.Int64> [32] x))
(SRLI <t> [c] (MOVBUreg x)) && c < 8 => (SRLI [56+c] (SLLI <t> [56] x)) (SRLI [c] (MOVBUreg x)) && c < 8 => (SRLI [56+c] (SLLI <typ.UInt64> [56] x))
(SRLI <t> [c] (MOVHUreg x)) && c < 16 => (SRLI [48+c] (SLLI <t> [48] x)) (SRLI [c] (MOVHUreg x)) && c < 16 => (SRLI [48+c] (SLLI <typ.UInt64> [48] x))
(SRLI <t> [c] (MOVWUreg x)) && c < 32 => (SRLI [32+c] (SLLI <t> [32] x)) (SRLI [c] (MOVWUreg x)) && c < 32 => (SRLI [32+c] (SLLI <typ.UInt64> [32] x))
(SLLI <t> [c] (MOVBUreg x)) && c <= 56 => (SRLI [56-c] (SLLI <t> [56] x)) (SLLI [c] (MOVBUreg x)) && c <= 56 => (SRLI [56-c] (SLLI <typ.UInt64> [56] x))
(SLLI <t> [c] (MOVHUreg x)) && c <= 48 => (SRLI [48-c] (SLLI <t> [48] x)) (SLLI [c] (MOVHUreg x)) && c <= 48 => (SRLI [48-c] (SLLI <typ.UInt64> [48] x))
(SLLI <t> [c] (MOVWUreg x)) && c <= 32 => (SRLI [32-c] (SLLI <t> [32] x)) (SLLI [c] (MOVWUreg x)) && c <= 32 => (SRLI [32-c] (SLLI <typ.UInt64> [32] x))
// Shift by zero. // Shift by zero.
(SRAI [0] x) => x (SRAI [0] x) => x

View File

@ -17,11 +17,11 @@ func rewriteValueRISCV64latelower(v *Value) bool {
func rewriteValueRISCV64latelower_OpRISCV64SLLI(v *Value) bool { func rewriteValueRISCV64latelower_OpRISCV64SLLI(v *Value) bool {
v_0 := v.Args[0] v_0 := v.Args[0]
b := v.Block b := v.Block
// match: (SLLI <t> [c] (MOVBUreg x)) typ := &b.Func.Config.Types
// match: (SLLI [c] (MOVBUreg x))
// cond: c <= 56 // cond: c <= 56
// result: (SRLI [56-c] (SLLI <t> [56] x)) // result: (SRLI [56-c] (SLLI <typ.UInt64> [56] x))
for { for {
t := v.Type
c := auxIntToInt64(v.AuxInt) c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpRISCV64MOVBUreg { if v_0.Op != OpRISCV64MOVBUreg {
break break
@ -32,17 +32,16 @@ func rewriteValueRISCV64latelower_OpRISCV64SLLI(v *Value) bool {
} }
v.reset(OpRISCV64SRLI) v.reset(OpRISCV64SRLI)
v.AuxInt = int64ToAuxInt(56 - c) v.AuxInt = int64ToAuxInt(56 - c)
v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t) v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
v0.AuxInt = int64ToAuxInt(56) v0.AuxInt = int64ToAuxInt(56)
v0.AddArg(x) v0.AddArg(x)
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (SLLI <t> [c] (MOVHUreg x)) // match: (SLLI [c] (MOVHUreg x))
// cond: c <= 48 // cond: c <= 48
// result: (SRLI [48-c] (SLLI <t> [48] x)) // result: (SRLI [48-c] (SLLI <typ.UInt64> [48] x))
for { for {
t := v.Type
c := auxIntToInt64(v.AuxInt) c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpRISCV64MOVHUreg { if v_0.Op != OpRISCV64MOVHUreg {
break break
@ -53,17 +52,16 @@ func rewriteValueRISCV64latelower_OpRISCV64SLLI(v *Value) bool {
} }
v.reset(OpRISCV64SRLI) v.reset(OpRISCV64SRLI)
v.AuxInt = int64ToAuxInt(48 - c) v.AuxInt = int64ToAuxInt(48 - c)
v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t) v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
v0.AuxInt = int64ToAuxInt(48) v0.AuxInt = int64ToAuxInt(48)
v0.AddArg(x) v0.AddArg(x)
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (SLLI <t> [c] (MOVWUreg x)) // match: (SLLI [c] (MOVWUreg x))
// cond: c <= 32 // cond: c <= 32
// result: (SRLI [32-c] (SLLI <t> [32] x)) // result: (SRLI [32-c] (SLLI <typ.UInt64> [32] x))
for { for {
t := v.Type
c := auxIntToInt64(v.AuxInt) c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpRISCV64MOVWUreg { if v_0.Op != OpRISCV64MOVWUreg {
break break
@ -74,7 +72,7 @@ func rewriteValueRISCV64latelower_OpRISCV64SLLI(v *Value) bool {
} }
v.reset(OpRISCV64SRLI) v.reset(OpRISCV64SRLI)
v.AuxInt = int64ToAuxInt(32 - c) v.AuxInt = int64ToAuxInt(32 - c)
v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t) v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
v0.AuxInt = int64ToAuxInt(32) v0.AuxInt = int64ToAuxInt(32)
v0.AddArg(x) v0.AddArg(x)
v.AddArg(v0) v.AddArg(v0)
@ -95,11 +93,11 @@ func rewriteValueRISCV64latelower_OpRISCV64SLLI(v *Value) bool {
func rewriteValueRISCV64latelower_OpRISCV64SRAI(v *Value) bool { func rewriteValueRISCV64latelower_OpRISCV64SRAI(v *Value) bool {
v_0 := v.Args[0] v_0 := v.Args[0]
b := v.Block b := v.Block
// match: (SRAI <t> [c] (MOVBreg x)) typ := &b.Func.Config.Types
// match: (SRAI [c] (MOVBreg x))
// cond: c < 8 // cond: c < 8
// result: (SRAI [56+c] (SLLI <t> [56] x)) // result: (SRAI [56+c] (SLLI <typ.Int64> [56] x))
for { for {
t := v.Type
c := auxIntToInt64(v.AuxInt) c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpRISCV64MOVBreg { if v_0.Op != OpRISCV64MOVBreg {
break break
@ -110,17 +108,16 @@ func rewriteValueRISCV64latelower_OpRISCV64SRAI(v *Value) bool {
} }
v.reset(OpRISCV64SRAI) v.reset(OpRISCV64SRAI)
v.AuxInt = int64ToAuxInt(56 + c) v.AuxInt = int64ToAuxInt(56 + c)
v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t) v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.Int64)
v0.AuxInt = int64ToAuxInt(56) v0.AuxInt = int64ToAuxInt(56)
v0.AddArg(x) v0.AddArg(x)
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (SRAI <t> [c] (MOVHreg x)) // match: (SRAI [c] (MOVHreg x))
// cond: c < 16 // cond: c < 16
// result: (SRAI [48+c] (SLLI <t> [48] x)) // result: (SRAI [48+c] (SLLI <typ.Int64> [48] x))
for { for {
t := v.Type
c := auxIntToInt64(v.AuxInt) c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpRISCV64MOVHreg { if v_0.Op != OpRISCV64MOVHreg {
break break
@ -131,17 +128,16 @@ func rewriteValueRISCV64latelower_OpRISCV64SRAI(v *Value) bool {
} }
v.reset(OpRISCV64SRAI) v.reset(OpRISCV64SRAI)
v.AuxInt = int64ToAuxInt(48 + c) v.AuxInt = int64ToAuxInt(48 + c)
v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t) v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.Int64)
v0.AuxInt = int64ToAuxInt(48) v0.AuxInt = int64ToAuxInt(48)
v0.AddArg(x) v0.AddArg(x)
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (SRAI <t> [c] (MOVWreg x)) // match: (SRAI [c] (MOVWreg x))
// cond: c < 32 // cond: c < 32
// result: (SRAI [32+c] (SLLI <t> [32] x)) // result: (SRAI [32+c] (SLLI <typ.Int64> [32] x))
for { for {
t := v.Type
c := auxIntToInt64(v.AuxInt) c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpRISCV64MOVWreg { if v_0.Op != OpRISCV64MOVWreg {
break break
@ -152,7 +148,7 @@ func rewriteValueRISCV64latelower_OpRISCV64SRAI(v *Value) bool {
} }
v.reset(OpRISCV64SRAI) v.reset(OpRISCV64SRAI)
v.AuxInt = int64ToAuxInt(32 + c) v.AuxInt = int64ToAuxInt(32 + c)
v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t) v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.Int64)
v0.AuxInt = int64ToAuxInt(32) v0.AuxInt = int64ToAuxInt(32)
v0.AddArg(x) v0.AddArg(x)
v.AddArg(v0) v.AddArg(v0)
@ -173,11 +169,11 @@ func rewriteValueRISCV64latelower_OpRISCV64SRAI(v *Value) bool {
func rewriteValueRISCV64latelower_OpRISCV64SRLI(v *Value) bool { func rewriteValueRISCV64latelower_OpRISCV64SRLI(v *Value) bool {
v_0 := v.Args[0] v_0 := v.Args[0]
b := v.Block b := v.Block
// match: (SRLI <t> [c] (MOVBUreg x)) typ := &b.Func.Config.Types
// match: (SRLI [c] (MOVBUreg x))
// cond: c < 8 // cond: c < 8
// result: (SRLI [56+c] (SLLI <t> [56] x)) // result: (SRLI [56+c] (SLLI <typ.UInt64> [56] x))
for { for {
t := v.Type
c := auxIntToInt64(v.AuxInt) c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpRISCV64MOVBUreg { if v_0.Op != OpRISCV64MOVBUreg {
break break
@ -188,17 +184,16 @@ func rewriteValueRISCV64latelower_OpRISCV64SRLI(v *Value) bool {
} }
v.reset(OpRISCV64SRLI) v.reset(OpRISCV64SRLI)
v.AuxInt = int64ToAuxInt(56 + c) v.AuxInt = int64ToAuxInt(56 + c)
v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t) v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
v0.AuxInt = int64ToAuxInt(56) v0.AuxInt = int64ToAuxInt(56)
v0.AddArg(x) v0.AddArg(x)
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (SRLI <t> [c] (MOVHUreg x)) // match: (SRLI [c] (MOVHUreg x))
// cond: c < 16 // cond: c < 16
// result: (SRLI [48+c] (SLLI <t> [48] x)) // result: (SRLI [48+c] (SLLI <typ.UInt64> [48] x))
for { for {
t := v.Type
c := auxIntToInt64(v.AuxInt) c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpRISCV64MOVHUreg { if v_0.Op != OpRISCV64MOVHUreg {
break break
@ -209,17 +204,16 @@ func rewriteValueRISCV64latelower_OpRISCV64SRLI(v *Value) bool {
} }
v.reset(OpRISCV64SRLI) v.reset(OpRISCV64SRLI)
v.AuxInt = int64ToAuxInt(48 + c) v.AuxInt = int64ToAuxInt(48 + c)
v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t) v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
v0.AuxInt = int64ToAuxInt(48) v0.AuxInt = int64ToAuxInt(48)
v0.AddArg(x) v0.AddArg(x)
v.AddArg(v0) v.AddArg(v0)
return true return true
} }
// match: (SRLI <t> [c] (MOVWUreg x)) // match: (SRLI [c] (MOVWUreg x))
// cond: c < 32 // cond: c < 32
// result: (SRLI [32+c] (SLLI <t> [32] x)) // result: (SRLI [32+c] (SLLI <typ.UInt64> [32] x))
for { for {
t := v.Type
c := auxIntToInt64(v.AuxInt) c := auxIntToInt64(v.AuxInt)
if v_0.Op != OpRISCV64MOVWUreg { if v_0.Op != OpRISCV64MOVWUreg {
break break
@ -230,7 +224,7 @@ func rewriteValueRISCV64latelower_OpRISCV64SRLI(v *Value) bool {
} }
v.reset(OpRISCV64SRLI) v.reset(OpRISCV64SRLI)
v.AuxInt = int64ToAuxInt(32 + c) v.AuxInt = int64ToAuxInt(32 + c)
v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, t) v0 := b.NewValue0(v.Pos, OpRISCV64SLLI, typ.UInt64)
v0.AuxInt = int64ToAuxInt(32) v0.AuxInt = int64ToAuxInt(32)
v0.AddArg(x) v0.AddArg(x)
v.AddArg(v0) v.AddArg(v0)