1
0
mirror of https://github.com/golang/go synced 2024-11-17 13:04:54 -07:00

cmd/compile: optimize unsigned comparisons to 0/1 on amd64

Plus a bonus optimization I noticed while working on this.

There are no functions (besides the rewrite rules) whose text size
increases as a result of this change.

Updates #21439

The following per-package text size stats were generated by parsing the
output of compiling with -S and summing the function size reported on the
STEXT line. This gives a far more accurate picture of the impact
on generated code than merely looking at the object file size changes
or the resulting binary size changes. The latter are below, for reference.

file                                          before  after   Δ       %       
runtime.s                                     477257  476417  -840    -0.176% 
math.s                                        35985   35976   -9      -0.025% 
vendor/golang.org/x/net/dns/dnsmessage.s      87314   87232   -82     -0.094% 
debug/dwarf.s                                 108444  108432  -12     -0.011% 
regexp.s                                      64535   64467   -68     -0.105% 
internal/xcoff.s                              23175   22945   -230    -0.992% 
cmd/vendor/golang.org/x/arch/arm/armasm.s     45263   45260   -3      -0.007% 
cmd/vendor/golang.org/x/arch/arm64/arm64asm.s 118140  118135  -5      -0.004% 
cmd/internal/obj/arm64.s                      151502  151498  -4      -0.003% 
cmd/compile/internal/ssa.s                    6061483 6063120 +1637   +0.027% 
total                                         9321728 9322112 +384    +0.004% 

file      before    after     Δ       %       
go        15188916  15184820  -4096   -0.027% 
addr2line 4315984   4311888   -4096   -0.095% 
cgo       4836088   4831992   -4096   -0.085% 
compile   24506008  24493720  -12288  -0.050% 
doc       4680952   4676856   -4096   -0.088% 
link      6605336   6601240   -4096   -0.062% 
pprof     14776756  14772660  -4096   -0.028% 
total     135250956 135214092 -36864  -0.027% 

Change-Id: I1243a098a08db452f7d1eb0998e241c9b199e2b4
Reviewed-on: https://go-review.googlesource.com/c/go/+/213058
Run-TryBot: Josh Bleecher Snyder <josharian@gmail.com>
TryBot-Result: Gobot Gobot <gobot@golang.org>
Reviewed-by: Keith Randall <khr@golang.org>
This commit is contained in:
Josh Bleecher Snyder 2019-12-31 23:12:33 -08:00
parent 117826903a
commit 18053b7131
2 changed files with 232 additions and 0 deletions

View File

@ -567,6 +567,12 @@
(NE (TESTB (SETAE cmp) (SETAE cmp)) yes no) -> (UGE cmp yes no)
(NE (TESTB (SETO cmp) (SETO cmp)) yes no) -> (OS cmp yes no)
// Unsigned comparisons to 0/1
(ULT (TEST(Q|L|W|B) x x) yes no) -> (First no yes)
(UGE (TEST(Q|L|W|B) x x) yes no) -> (First yes no)
(SETB (TEST(Q|L|W|B) x x)) -> (ConstBool [0])
(SETAE (TEST(Q|L|W|B) x x)) -> (ConstBool [1])
// Recognize bit tests: a&(1<<b) != 0 for b suitably bounded
// Note that BTx instructions use the carry bit, so we need to convert tests for zero flag
// into tests for carry flags.
@ -1311,6 +1317,8 @@
(CMPWconst (ANDLconst _ [m]) [n]) && 0 <= int16(m) && int16(m) < int16(n) -> (FlagLT_ULT)
(CMPBconst (ANDLconst _ [m]) [n]) && 0 <= int8(m) && int8(m) < int8(n) -> (FlagLT_ULT)
(TEST(Q|L)const [c] (MOV(Q|L)const [c])) -> (FlagEQ)
// TODO: DIVxU also.
// Absorb flag constants into SBB ops.

View File

@ -27249,6 +27249,62 @@ func rewriteValueAMD64_OpAMD64SETA(v *Value) bool {
}
func rewriteValueAMD64_OpAMD64SETAE(v *Value) bool {
v_0 := v.Args[0]
// match: (SETAE (TESTQ x x))
// result: (ConstBool [1])
for {
if v_0.Op != OpAMD64TESTQ {
break
}
x := v_0.Args[1]
if x != v_0.Args[0] {
break
}
v.reset(OpConstBool)
v.AuxInt = 1
return true
}
// match: (SETAE (TESTL x x))
// result: (ConstBool [1])
for {
if v_0.Op != OpAMD64TESTL {
break
}
x := v_0.Args[1]
if x != v_0.Args[0] {
break
}
v.reset(OpConstBool)
v.AuxInt = 1
return true
}
// match: (SETAE (TESTW x x))
// result: (ConstBool [1])
for {
if v_0.Op != OpAMD64TESTW {
break
}
x := v_0.Args[1]
if x != v_0.Args[0] {
break
}
v.reset(OpConstBool)
v.AuxInt = 1
return true
}
// match: (SETAE (TESTB x x))
// result: (ConstBool [1])
for {
if v_0.Op != OpAMD64TESTB {
break
}
x := v_0.Args[1]
if x != v_0.Args[0] {
break
}
v.reset(OpConstBool)
v.AuxInt = 1
return true
}
// match: (SETAE (InvertFlags x))
// result: (SETBE x)
for {
@ -27666,6 +27722,62 @@ func rewriteValueAMD64_OpAMD64SETAstore(v *Value) bool {
}
func rewriteValueAMD64_OpAMD64SETB(v *Value) bool {
v_0 := v.Args[0]
// match: (SETB (TESTQ x x))
// result: (ConstBool [0])
for {
if v_0.Op != OpAMD64TESTQ {
break
}
x := v_0.Args[1]
if x != v_0.Args[0] {
break
}
v.reset(OpConstBool)
v.AuxInt = 0
return true
}
// match: (SETB (TESTL x x))
// result: (ConstBool [0])
for {
if v_0.Op != OpAMD64TESTL {
break
}
x := v_0.Args[1]
if x != v_0.Args[0] {
break
}
v.reset(OpConstBool)
v.AuxInt = 0
return true
}
// match: (SETB (TESTW x x))
// result: (ConstBool [0])
for {
if v_0.Op != OpAMD64TESTW {
break
}
x := v_0.Args[1]
if x != v_0.Args[0] {
break
}
v.reset(OpConstBool)
v.AuxInt = 0
return true
}
// match: (SETB (TESTB x x))
// result: (ConstBool [0])
for {
if v_0.Op != OpAMD64TESTB {
break
}
x := v_0.Args[1]
if x != v_0.Args[0] {
break
}
v.reset(OpConstBool)
v.AuxInt = 0
return true
}
// match: (SETB (InvertFlags x))
// result: (SETA x)
for {
@ -33171,6 +33283,16 @@ func rewriteValueAMD64_OpAMD64TESTL(v *Value) bool {
}
func rewriteValueAMD64_OpAMD64TESTLconst(v *Value) bool {
v_0 := v.Args[0]
// match: (TESTLconst [c] (MOVLconst [c]))
// result: (FlagEQ)
for {
c := v.AuxInt
if v_0.Op != OpAMD64MOVLconst || v_0.AuxInt != c {
break
}
v.reset(OpAMD64FlagEQ)
return true
}
// match: (TESTLconst [-1] x)
// cond: x.Op != OpAMD64MOVLconst
// result: (TESTL x x)
@ -33246,6 +33368,16 @@ func rewriteValueAMD64_OpAMD64TESTQ(v *Value) bool {
}
func rewriteValueAMD64_OpAMD64TESTQconst(v *Value) bool {
v_0 := v.Args[0]
// match: (TESTQconst [c] (MOVQconst [c]))
// result: (FlagEQ)
for {
c := v.AuxInt
if v_0.Op != OpAMD64MOVQconst || v_0.AuxInt != c {
break
}
v.reset(OpAMD64FlagEQ)
return true
}
// match: (TESTQconst [-1] x)
// cond: x.Op != OpAMD64MOVQconst
// result: (TESTQ x x)
@ -41959,6 +42091,50 @@ func rewriteBlockAMD64(b *Block) bool {
return true
}
case BlockAMD64UGE:
// match: (UGE (TESTQ x x) yes no)
// result: (First yes no)
for b.Controls[0].Op == OpAMD64TESTQ {
v_0 := b.Controls[0]
x := v_0.Args[1]
if x != v_0.Args[0] {
break
}
b.Reset(BlockFirst)
return true
}
// match: (UGE (TESTL x x) yes no)
// result: (First yes no)
for b.Controls[0].Op == OpAMD64TESTL {
v_0 := b.Controls[0]
x := v_0.Args[1]
if x != v_0.Args[0] {
break
}
b.Reset(BlockFirst)
return true
}
// match: (UGE (TESTW x x) yes no)
// result: (First yes no)
for b.Controls[0].Op == OpAMD64TESTW {
v_0 := b.Controls[0]
x := v_0.Args[1]
if x != v_0.Args[0] {
break
}
b.Reset(BlockFirst)
return true
}
// match: (UGE (TESTB x x) yes no)
// result: (First yes no)
for b.Controls[0].Op == OpAMD64TESTB {
v_0 := b.Controls[0]
x := v_0.Args[1]
if x != v_0.Args[0] {
break
}
b.Reset(BlockFirst)
return true
}
// match: (UGE (InvertFlags cmp) yes no)
// result: (ULE cmp yes no)
for b.Controls[0].Op == OpAMD64InvertFlags {
@ -42086,6 +42262,54 @@ func rewriteBlockAMD64(b *Block) bool {
return true
}
case BlockAMD64ULT:
// match: (ULT (TESTQ x x) yes no)
// result: (First no yes)
for b.Controls[0].Op == OpAMD64TESTQ {
v_0 := b.Controls[0]
x := v_0.Args[1]
if x != v_0.Args[0] {
break
}
b.Reset(BlockFirst)
b.swapSuccessors()
return true
}
// match: (ULT (TESTL x x) yes no)
// result: (First no yes)
for b.Controls[0].Op == OpAMD64TESTL {
v_0 := b.Controls[0]
x := v_0.Args[1]
if x != v_0.Args[0] {
break
}
b.Reset(BlockFirst)
b.swapSuccessors()
return true
}
// match: (ULT (TESTW x x) yes no)
// result: (First no yes)
for b.Controls[0].Op == OpAMD64TESTW {
v_0 := b.Controls[0]
x := v_0.Args[1]
if x != v_0.Args[0] {
break
}
b.Reset(BlockFirst)
b.swapSuccessors()
return true
}
// match: (ULT (TESTB x x) yes no)
// result: (First no yes)
for b.Controls[0].Op == OpAMD64TESTB {
v_0 := b.Controls[0]
x := v_0.Args[1]
if x != v_0.Args[0] {
break
}
b.Reset(BlockFirst)
b.swapSuccessors()
return true
}
// match: (ULT (InvertFlags cmp) yes no)
// result: (UGT cmp yes no)
for b.Controls[0].Op == OpAMD64InvertFlags {