diff --git a/src/cmd/compile/internal/ssa/gen/generic.rules b/src/cmd/compile/internal/ssa/gen/generic.rules index 6dbe9b47d01..d5cc107fab7 100644 --- a/src/cmd/compile/internal/ssa/gen/generic.rules +++ b/src/cmd/compile/internal/ssa/gen/generic.rules @@ -515,15 +515,18 @@ // simplifications (Or(64|32|16|8) x x) => x -(Or(64|32|16|8) (Const(64|32|16|8) [0]) x) => x +(Or(64|32|16|8) (Const(64|32|16|8) [0]) x) => x (Or(64|32|16|8) (Const(64|32|16|8) [-1]) _) => (Const(64|32|16|8) [-1]) +(Or(64|32|16|8) (Com(64|32|16|8) x) x) => (Const(64|32|16|8) [-1]) (And(64|32|16|8) x x) => x (And(64|32|16|8) (Const(64|32|16|8) [-1]) x) => x -(And(64|32|16|8) (Const(64|32|16|8) [0]) _) => (Const(64|32|16|8) [0]) +(And(64|32|16|8) (Const(64|32|16|8) [0]) _) => (Const(64|32|16|8) [0]) +(And(64|32|16|8) (Com(64|32|16|8) x) x) => (Const(64|32|16|8) [0]) (Xor(64|32|16|8) x x) => (Const(64|32|16|8) [0]) (Xor(64|32|16|8) (Const(64|32|16|8) [0]) x) => x +(Xor(64|32|16|8) (Com(64|32|16|8) x) x) => (Const(64|32|16|8) [-1]) (Add(64|32|16|8) (Const(64|32|16|8) [0]) x) => x (Sub(64|32|16|8) x x) => (Const(64|32|16|8) [0]) @@ -533,6 +536,13 @@ (Com(64|32|16|8) (Const(64|32|16|8) [c])) => (Const(64|32|16|8) [^c]) (Neg(64|32|16|8) (Sub(64|32|16|8) x y)) => (Sub(64|32|16|8) y x) +(Add(64|32|16|8) x (Neg(64|32|16|8) y)) => (Sub(64|32|16|8) x y) + +(Xor(64|32|16|8) (Const(64|32|16|8) [-1]) x) => (Com(64|32|16|8) x) + +(Sub(64|32|16|8) (Neg(64|32|16|8) x) (Com(64|32|16|8) x)) => (Const(64|32|16|8) [1]) +(Sub(64|32|16|8) (Com(64|32|16|8) x) (Neg(64|32|16|8) x)) => (Const(64|32|16|8) [-1]) +(Add(64|32|16|8) (Com(64|32|16|8) x) x) => (Const(64|32|16|8) [-1]) // ^(x-1) == ^x+1 == -x (Add(64|32|16|8) (Const(64|32|16|8) [1]) (Com(64|32|16|8) x)) => (Neg(64|32|16|8) x) diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go index fbf227562a9..f61b6ca3ece 100644 --- a/src/cmd/compile/internal/ssa/rewritegeneric.go +++ b/src/cmd/compile/internal/ssa/rewritegeneric.go @@ -519,6 +519,38 @@ func rewriteValuegeneric_OpAdd16(v *Value) bool { } break } + // match: (Add16 x (Neg16 y)) + // result: (Sub16 x y) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + x := v_0 + if v_1.Op != OpNeg16 { + continue + } + y := v_1.Args[0] + v.reset(OpSub16) + v.AddArg2(x, y) + return true + } + break + } + // match: (Add16 (Com16 x) x) + // result: (Const16 [-1]) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpCom16 { + continue + } + x := v_0.Args[0] + if x != v_1 { + continue + } + v.reset(OpConst16) + v.AuxInt = int16ToAuxInt(-1) + return true + } + break + } // match: (Add16 (Const16 [1]) (Com16 x)) // result: (Neg16 x) for { @@ -764,6 +796,38 @@ func rewriteValuegeneric_OpAdd32(v *Value) bool { } break } + // match: (Add32 x (Neg32 y)) + // result: (Sub32 x y) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + x := v_0 + if v_1.Op != OpNeg32 { + continue + } + y := v_1.Args[0] + v.reset(OpSub32) + v.AddArg2(x, y) + return true + } + break + } + // match: (Add32 (Com32 x) x) + // result: (Const32 [-1]) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpCom32 { + continue + } + x := v_0.Args[0] + if x != v_1 { + continue + } + v.reset(OpConst32) + v.AuxInt = int32ToAuxInt(-1) + return true + } + break + } // match: (Add32 (Const32 [1]) (Com32 x)) // result: (Neg32 x) for { @@ -1036,6 +1100,38 @@ func rewriteValuegeneric_OpAdd64(v *Value) bool { } break } + // match: (Add64 x (Neg64 y)) + // result: (Sub64 x y) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + x := v_0 + if v_1.Op != OpNeg64 { + continue + } + y := v_1.Args[0] + v.reset(OpSub64) + v.AddArg2(x, y) + return true + } + break + } + // match: (Add64 (Com64 x) x) + // result: (Const64 [-1]) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpCom64 { + continue + } + x := v_0.Args[0] + if x != v_1 { + continue + } + v.reset(OpConst64) + v.AuxInt = int64ToAuxInt(-1) + return true + } + break + } // match: (Add64 (Const64 [1]) (Com64 x)) // result: (Neg64 x) for { @@ -1308,6 +1404,38 @@ func rewriteValuegeneric_OpAdd8(v *Value) bool { } break } + // match: (Add8 x (Neg8 y)) + // result: (Sub8 x y) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + x := v_0 + if v_1.Op != OpNeg8 { + continue + } + y := v_1.Args[0] + v.reset(OpSub8) + v.AddArg2(x, y) + return true + } + break + } + // match: (Add8 (Com8 x) x) + // result: (Const8 [-1]) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpCom8 { + continue + } + x := v_0.Args[0] + if x != v_1 { + continue + } + v.reset(OpConst8) + v.AuxInt = int8ToAuxInt(-1) + return true + } + break + } // match: (Add8 (Const8 [1]) (Com8 x)) // result: (Neg8 x) for { @@ -1630,6 +1758,23 @@ func rewriteValuegeneric_OpAnd16(v *Value) bool { } break } + // match: (And16 (Com16 x) x) + // result: (Const16 [0]) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpCom16 { + continue + } + x := v_0.Args[0] + if x != v_1 { + continue + } + v.reset(OpConst16) + v.AuxInt = int16ToAuxInt(0) + return true + } + break + } // match: (And16 x (And16 x y)) // result: (And16 x y) for { @@ -1828,6 +1973,23 @@ func rewriteValuegeneric_OpAnd32(v *Value) bool { } break } + // match: (And32 (Com32 x) x) + // result: (Const32 [0]) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpCom32 { + continue + } + x := v_0.Args[0] + if x != v_1 { + continue + } + v.reset(OpConst32) + v.AuxInt = int32ToAuxInt(0) + return true + } + break + } // match: (And32 x (And32 x y)) // result: (And32 x y) for { @@ -2026,6 +2188,23 @@ func rewriteValuegeneric_OpAnd64(v *Value) bool { } break } + // match: (And64 (Com64 x) x) + // result: (Const64 [0]) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpCom64 { + continue + } + x := v_0.Args[0] + if x != v_1 { + continue + } + v.reset(OpConst64) + v.AuxInt = int64ToAuxInt(0) + return true + } + break + } // match: (And64 x (And64 x y)) // result: (And64 x y) for { @@ -2224,6 +2403,23 @@ func rewriteValuegeneric_OpAnd8(v *Value) bool { } break } + // match: (And8 (Com8 x) x) + // result: (Const8 [0]) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpCom8 { + continue + } + x := v_0.Args[0] + if x != v_1 { + continue + } + v.reset(OpConst8) + v.AuxInt = int8ToAuxInt(0) + return true + } + break + } // match: (And8 x (And8 x y)) // result: (And8 x y) for { @@ -16964,6 +17160,23 @@ func rewriteValuegeneric_OpOr16(v *Value) bool { } break } + // match: (Or16 (Com16 x) x) + // result: (Const16 [-1]) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpCom16 { + continue + } + x := v_0.Args[0] + if x != v_1 { + continue + } + v.reset(OpConst16) + v.AuxInt = int16ToAuxInt(-1) + return true + } + break + } // match: (Or16 x (Or16 x y)) // result: (Or16 x y) for { @@ -17142,6 +17355,23 @@ func rewriteValuegeneric_OpOr32(v *Value) bool { } break } + // match: (Or32 (Com32 x) x) + // result: (Const32 [-1]) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpCom32 { + continue + } + x := v_0.Args[0] + if x != v_1 { + continue + } + v.reset(OpConst32) + v.AuxInt = int32ToAuxInt(-1) + return true + } + break + } // match: (Or32 x (Or32 x y)) // result: (Or32 x y) for { @@ -17320,6 +17550,23 @@ func rewriteValuegeneric_OpOr64(v *Value) bool { } break } + // match: (Or64 (Com64 x) x) + // result: (Const64 [-1]) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpCom64 { + continue + } + x := v_0.Args[0] + if x != v_1 { + continue + } + v.reset(OpConst64) + v.AuxInt = int64ToAuxInt(-1) + return true + } + break + } // match: (Or64 x (Or64 x y)) // result: (Or64 x y) for { @@ -17498,6 +17745,23 @@ func rewriteValuegeneric_OpOr8(v *Value) bool { } break } + // match: (Or8 (Com8 x) x) + // result: (Const8 [-1]) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpCom8 { + continue + } + x := v_0.Args[0] + if x != v_1 { + continue + } + v.reset(OpConst8) + v.AuxInt = int8ToAuxInt(-1) + return true + } + break + } // match: (Or8 x (Or8 x y)) // result: (Or8 x y) for { @@ -22994,6 +23258,34 @@ func rewriteValuegeneric_OpSub16(v *Value) bool { v.AuxInt = int16ToAuxInt(0) return true } + // match: (Sub16 (Neg16 x) (Com16 x)) + // result: (Const16 [1]) + for { + if v_0.Op != OpNeg16 { + break + } + x := v_0.Args[0] + if v_1.Op != OpCom16 || x != v_1.Args[0] { + break + } + v.reset(OpConst16) + v.AuxInt = int16ToAuxInt(1) + return true + } + // match: (Sub16 (Com16 x) (Neg16 x)) + // result: (Const16 [-1]) + for { + if v_0.Op != OpCom16 { + break + } + x := v_0.Args[0] + if v_1.Op != OpNeg16 || x != v_1.Args[0] { + break + } + v.reset(OpConst16) + v.AuxInt = int16ToAuxInt(-1) + return true + } // match: (Sub16 (Add16 x y) x) // result: y for { @@ -23309,6 +23601,34 @@ func rewriteValuegeneric_OpSub32(v *Value) bool { v.AuxInt = int32ToAuxInt(0) return true } + // match: (Sub32 (Neg32 x) (Com32 x)) + // result: (Const32 [1]) + for { + if v_0.Op != OpNeg32 { + break + } + x := v_0.Args[0] + if v_1.Op != OpCom32 || x != v_1.Args[0] { + break + } + v.reset(OpConst32) + v.AuxInt = int32ToAuxInt(1) + return true + } + // match: (Sub32 (Com32 x) (Neg32 x)) + // result: (Const32 [-1]) + for { + if v_0.Op != OpCom32 { + break + } + x := v_0.Args[0] + if v_1.Op != OpNeg32 || x != v_1.Args[0] { + break + } + v.reset(OpConst32) + v.AuxInt = int32ToAuxInt(-1) + return true + } // match: (Sub32 (Add32 x y) x) // result: y for { @@ -23648,6 +23968,34 @@ func rewriteValuegeneric_OpSub64(v *Value) bool { v.AuxInt = int64ToAuxInt(0) return true } + // match: (Sub64 (Neg64 x) (Com64 x)) + // result: (Const64 [1]) + for { + if v_0.Op != OpNeg64 { + break + } + x := v_0.Args[0] + if v_1.Op != OpCom64 || x != v_1.Args[0] { + break + } + v.reset(OpConst64) + v.AuxInt = int64ToAuxInt(1) + return true + } + // match: (Sub64 (Com64 x) (Neg64 x)) + // result: (Const64 [-1]) + for { + if v_0.Op != OpCom64 { + break + } + x := v_0.Args[0] + if v_1.Op != OpNeg64 || x != v_1.Args[0] { + break + } + v.reset(OpConst64) + v.AuxInt = int64ToAuxInt(-1) + return true + } // match: (Sub64 (Add64 x y) x) // result: y for { @@ -23987,6 +24335,34 @@ func rewriteValuegeneric_OpSub8(v *Value) bool { v.AuxInt = int8ToAuxInt(0) return true } + // match: (Sub8 (Neg8 x) (Com8 x)) + // result: (Const8 [1]) + for { + if v_0.Op != OpNeg8 { + break + } + x := v_0.Args[0] + if v_1.Op != OpCom8 || x != v_1.Args[0] { + break + } + v.reset(OpConst8) + v.AuxInt = int8ToAuxInt(1) + return true + } + // match: (Sub8 (Com8 x) (Neg8 x)) + // result: (Const8 [-1]) + for { + if v_0.Op != OpCom8 { + break + } + x := v_0.Args[0] + if v_1.Op != OpNeg8 || x != v_1.Args[0] { + break + } + v.reset(OpConst8) + v.AuxInt = int8ToAuxInt(-1) + return true + } // match: (Sub8 (Add8 x y) x) // result: y for { @@ -24714,6 +25090,37 @@ func rewriteValuegeneric_OpXor16(v *Value) bool { } break } + // match: (Xor16 (Com16 x) x) + // result: (Const16 [-1]) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpCom16 { + continue + } + x := v_0.Args[0] + if x != v_1 { + continue + } + v.reset(OpConst16) + v.AuxInt = int16ToAuxInt(-1) + return true + } + break + } + // match: (Xor16 (Const16 [-1]) x) + // result: (Com16 x) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != -1 { + continue + } + x := v_1 + v.reset(OpCom16) + v.AddArg(x) + return true + } + break + } // match: (Xor16 x (Xor16 x y)) // result: y for { @@ -24845,6 +25252,37 @@ func rewriteValuegeneric_OpXor32(v *Value) bool { } break } + // match: (Xor32 (Com32 x) x) + // result: (Const32 [-1]) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpCom32 { + continue + } + x := v_0.Args[0] + if x != v_1 { + continue + } + v.reset(OpConst32) + v.AuxInt = int32ToAuxInt(-1) + return true + } + break + } + // match: (Xor32 (Const32 [-1]) x) + // result: (Com32 x) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != -1 { + continue + } + x := v_1 + v.reset(OpCom32) + v.AddArg(x) + return true + } + break + } // match: (Xor32 x (Xor32 x y)) // result: y for { @@ -24976,6 +25414,37 @@ func rewriteValuegeneric_OpXor64(v *Value) bool { } break } + // match: (Xor64 (Com64 x) x) + // result: (Const64 [-1]) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpCom64 { + continue + } + x := v_0.Args[0] + if x != v_1 { + continue + } + v.reset(OpConst64) + v.AuxInt = int64ToAuxInt(-1) + return true + } + break + } + // match: (Xor64 (Const64 [-1]) x) + // result: (Com64 x) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != -1 { + continue + } + x := v_1 + v.reset(OpCom64) + v.AddArg(x) + return true + } + break + } // match: (Xor64 x (Xor64 x y)) // result: y for { @@ -25107,6 +25576,37 @@ func rewriteValuegeneric_OpXor8(v *Value) bool { } break } + // match: (Xor8 (Com8 x) x) + // result: (Const8 [-1]) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpCom8 { + continue + } + x := v_0.Args[0] + if x != v_1 { + continue + } + v.reset(OpConst8) + v.AuxInt = int8ToAuxInt(-1) + return true + } + break + } + // match: (Xor8 (Const8 [-1]) x) + // result: (Com8 x) + for { + for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { + if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != -1 { + continue + } + x := v_1 + v.reset(OpCom8) + v.AddArg(x) + return true + } + break + } // match: (Xor8 x (Xor8 x y)) // result: y for {