1
0
mirror of https://github.com/golang/go synced 2024-11-11 19:01:37 -07:00

cmd/compile: simplify prove pass

We don't need noLimit checks in a bunch of places.
Also simplify folding of provable constant results.

At this point in the CL stack, compilebench reports no performance
changes. The only thing of note is that binaries got a bit smaller.

name                      old text-bytes    new text-bytes    delta
HelloSize                       960kB ± 0%        952kB ± 0%  -0.83%  (p=0.000 n=10+10)
CmdGoSize                      12.3MB ± 0%       12.1MB ± 0%  -1.53%  (p=0.000 n=10+10)

Change-Id: Id4be75eec0f8c93f2f3b93a8521ce2278ee2ee2c
Reviewed-on: https://go-review.googlesource.com/c/go/+/599197
Reviewed-by: David Chase <drchase@google.com>
LUCI-TryBot-Result: Go LUCI <golang-scoped@luci-project-accounts.iam.gserviceaccount.com>
Reviewed-by: Michael Knyszek <mknyszek@google.com>
This commit is contained in:
khr@golang.org 2024-07-11 19:38:33 -07:00 committed by Keith Randall
parent aba16d17c5
commit 9b4268c3df
3 changed files with 76 additions and 74 deletions

View File

@ -975,13 +975,13 @@ func (ft *factsTable) update(parent *Block, v, w *Value, d domain, r relation) {
//
// Useful for i > 0; s[i-1].
lim := ft.limits[x.ID]
if lim != noLimit && ((d == signed && lim.min > opMin[v.Op]) || (d == unsigned && lim.umin > 0)) {
if (d == signed && lim.min > opMin[v.Op]) || (d == unsigned && lim.umin > 0) {
ft.update(parent, x, w, d, gt)
}
} else if x, delta := isConstDelta(w); x != nil && delta == 1 {
// v >= x+1 && x < max ⇒ v > x
lim := ft.limits[x.ID]
if lim != noLimit && ((d == signed && lim.max < opMax[w.Op]) || (d == unsigned && lim.umax < opUMax[w.Op])) {
if (d == signed && lim.max < opMax[w.Op]) || (d == unsigned && lim.umax < opUMax[w.Op]) {
ft.update(parent, v, x, d, gt)
}
}
@ -995,7 +995,8 @@ func (ft *factsTable) update(parent *Block, v, w *Value, d domain, r relation) {
parent.Func.Warnl(parent.Pos, "x+d %s w; x:%v %v delta:%v w:%v d:%v", r, x, parent.String(), delta, w.AuxInt, d)
}
underflow := true
if l := ft.limits[x.ID]; l != noLimit && delta < 0 {
if delta < 0 {
l := ft.limits[x.ID]
if (x.Type.Size() == 8 && l.min >= math.MinInt64-delta) ||
(x.Type.Size() == 4 && l.min >= math.MinInt32-delta) {
underflow = false
@ -1061,16 +1062,15 @@ func (ft *factsTable) update(parent *Block, v, w *Value, d domain, r relation) {
if r == gt {
min++
}
if l := ft.limits[x.ID]; l != noLimit {
if l.max <= min {
if r&eq == 0 || l.max < min {
// x>min (x>=min) is impossible, so it must be x<=max
ft.signedMax(x, max)
}
} else if l.min > max {
// x<=max is impossible, so it must be x>min
ft.signedMin(x, min)
l := ft.limits[x.ID]
if l.max <= min {
if r&eq == 0 || l.max < min {
// x>min (x>=min) is impossible, so it must be x<=max
ft.signedMax(x, max)
}
} else if l.min > max {
// x<=max is impossible, so it must be x>min
ft.signedMin(x, min)
}
}
}
@ -1969,9 +1969,6 @@ func simplifyBlock(sdom SparseTree, ft *factsTable, b *Block) {
// slicemask(x + y)
// if x is larger than -y (y is negative), then slicemask is -1.
lim := ft.limits[x.ID]
if lim == noLimit {
break
}
if lim.umin > uint64(-delta) {
if v.Args[0].Op == OpAdd64 {
v.reset(OpConst64)
@ -1989,9 +1986,6 @@ func simplifyBlock(sdom SparseTree, ft *factsTable, b *Block) {
// Capture that information here for use in arch-specific optimizations.
x := v.Args[0]
lim := ft.limits[x.ID]
if lim == noLimit {
break
}
if lim.umin > 0 || lim.min > 0 || lim.max < 0 {
if b.Func.pass.debug > 0 {
b.Func.Warnl(v.Pos, "Proved %v non-zero", v.Op)
@ -2038,9 +2032,6 @@ func simplifyBlock(sdom SparseTree, ft *factsTable, b *Block) {
// is strictly less than the number of bits in a.
by := v.Args[1]
lim := ft.limits[by.ID]
if lim == noLimit {
break
}
bits := 8 * v.Args[0].Type.Size()
if lim.umax < uint64(bits) || (lim.max < bits && ft.isNonNegative(by)) {
if by.isGenericIntConst() {
@ -2070,8 +2061,7 @@ func simplifyBlock(sdom SparseTree, ft *factsTable, b *Block) {
divrLim := ft.limits[divr.ID]
divd := v.Args[0]
divdLim := ft.limits[divd.ID]
if (divrLim != noLimit && (divrLim.max < -1 || divrLim.min > -1)) ||
(divdLim != noLimit && divdLim.min > mostNegativeDividend[v.Op]) {
if divrLim.max < -1 || divrLim.min > -1 || divdLim.min > mostNegativeDividend[v.Op] {
// See DivisionNeedsFixUp in rewrite.go.
// v.AuxInt = 1 means we have proved both that the divisor is not -1
// and that the dividend is not the most negative integer,
@ -2085,51 +2075,46 @@ func simplifyBlock(sdom SparseTree, ft *factsTable, b *Block) {
// Fold provable constant results.
// Helps in cases where we reuse a value after branching on its equality.
for i, arg := range v.Args {
switch arg.Op {
case OpConst64, OpConst32, OpConst16, OpConst8, OpConstBool, OpConstNil:
continue
}
lim := ft.limits[arg.ID]
if lim == noLimit {
continue
}
var constValue int64
typ := arg.Type
bits := 8 * typ.Size()
switch {
case lim.min == lim.max:
constValue = lim.min
case lim.umin == lim.umax:
// truncate then sign extand
switch bits {
case 64:
constValue = int64(lim.umin)
case 32:
constValue = int64(int32(lim.umin))
case 16:
constValue = int64(int16(lim.umin))
case 8:
constValue = int64(int8(lim.umin))
default:
panic("unexpected integer size")
}
constValue = int64(lim.umin)
default:
continue
}
var c *Value
switch arg.Op {
case OpConst64, OpConst32, OpConst16, OpConst8, OpConstBool, OpConstNil:
continue
}
typ := arg.Type
f := b.Func
switch bits {
case 64:
c = f.ConstInt64(typ, constValue)
case 32:
c = f.ConstInt32(typ, int32(constValue))
case 16:
c = f.ConstInt16(typ, int16(constValue))
case 8:
var c *Value
switch {
case typ.IsBoolean():
c = f.ConstBool(typ, constValue != 0)
case typ.IsInteger() && typ.Size() == 1:
c = f.ConstInt8(typ, int8(constValue))
case typ.IsInteger() && typ.Size() == 2:
c = f.ConstInt16(typ, int16(constValue))
case typ.IsInteger() && typ.Size() == 4:
c = f.ConstInt32(typ, int32(constValue))
case typ.IsInteger() && typ.Size() == 8:
c = f.ConstInt64(typ, constValue)
case typ.IsPtrShaped():
if constValue == 0 {
c = f.ConstNil(typ)
} else {
// Not sure how this might happen, but if it
// does, just skip it.
continue
}
default:
panic("unexpected integer size")
// Not sure how this might happen, but if it
// does, just skip it.
continue
}
v.SetArg(i, c)
if b.Func.pass.debug > 1 {

View File

@ -217,53 +217,53 @@ func TestSetInvGeFp64(x float64, y float64) bool {
}
func TestLogicalCompareZero(x *[64]uint64) {
// ppc64x:"ANDCC",^"AND"
b := x[0]&3
if b!=0 {
b := x[0] & 3
if b != 0 {
x[0] = b
}
// ppc64x:"ANDCC",^"AND"
b = x[1]&x[2]
if b!=0 {
b = x[1] & x[2]
if b != 0 {
x[1] = b
}
// ppc64x:"ANDNCC",^"ANDN"
b = x[1]&^x[2]
if b!=0 {
b = x[1] &^ x[2]
if b != 0 {
x[1] = b
}
// ppc64x:"ORCC",^"OR"
b = x[3]|x[4]
if b!=0 {
b = x[3] | x[4]
if b != 0 {
x[3] = b
}
// ppc64x:"SUBCC",^"SUB"
b = x[5]-x[6]
if b!=0 {
b = x[5] - x[6]
if b != 0 {
x[5] = b
}
// ppc64x:"NORCC",^"NOR"
b = ^(x[5]|x[6])
if b!=0 {
b = ^(x[5] | x[6])
if b != 0 {
x[5] = b
}
// ppc64x:"XORCC",^"XOR"
b = x[7]^x[8]
if b!=0 {
b = x[7] ^ x[8]
if b != 0 {
x[7] = b
}
// ppc64x:"ADDCC",^"ADD"
b = x[9]+x[10]
if b!=0 {
b = x[9] + x[10]
if b != 0 {
x[9] = b
}
// ppc64x:"NEGCC",^"NEG"
b = -x[11]
if b!=0 {
if b != 0 {
x[11] = b
}
// ppc64x:"CNTLZDCC",^"CNTLZD"
b = uint64(bits.LeadingZeros64(x[12]))
if b!=0 {
if b != 0 {
x[12] = b
}
@ -274,3 +274,10 @@ func TestLogicalCompareZero(x *[64]uint64) {
}
}
func constantWrite(b bool, p *bool) {
if b {
// amd64:`MOVB\t[$]1, \(`
*p = b
}
}

View File

@ -53,3 +53,13 @@ func combine4slice(p *[4][]byte, a, b, c, d []byte) {
// arm64:-`.*runtime[.]gcWriteBarrier`
p[3] = d
}
func trickyWriteNil(p *int, q **int) {
if p == nil {
// We change "= p" to "= 0" in the prove pass, which
// means we have one less pointer that needs to go
// into the write barrier buffer.
// amd64:`.*runtime[.]gcWriteBarrier1`
*q = p
}
}