diff --git a/src/cmd/compile/internal/ssa/gen/generic.rules b/src/cmd/compile/internal/ssa/gen/generic.rules index dacc2007c8..3270ec1534 100644 --- a/src/cmd/compile/internal/ssa/gen/generic.rules +++ b/src/cmd/compile/internal/ssa/gen/generic.rules @@ -414,6 +414,55 @@ (Neg32 (Sub32 x y)) -> (Sub32 y x) (Neg64 (Sub64 x y)) -> (Sub64 y x) +(And64 x (And64 x y)) -> (And64 x y) +(And32 x (And32 x y)) -> (And32 x y) +(And16 x (And16 x y)) -> (And16 x y) +(And8 x (And8 x y)) -> (And8 x y) +(And64 x (And64 y x)) -> (And64 x y) +(And32 x (And32 y x)) -> (And32 x y) +(And16 x (And16 y x)) -> (And16 x y) +(And8 x (And8 y x)) -> (And8 x y) +(And64 (And64 x y) x) -> (And64 x y) +(And32 (And32 x y) x) -> (And32 x y) +(And16 (And16 x y) x) -> (And16 x y) +(And8 (And8 x y) x) -> (And8 x y) +(And64 (And64 x y) y) -> (And64 x y) +(And32 (And32 x y) y) -> (And32 x y) +(And16 (And16 x y) y) -> (And16 x y) +(And8 (And8 x y) y) -> (And8 x y) +(Or64 x (Or64 x y)) -> (Or64 x y) +(Or32 x (Or32 x y)) -> (Or32 x y) +(Or16 x (Or16 x y)) -> (Or16 x y) +(Or8 x (Or8 x y)) -> (Or8 x y) +(Or64 x (Or64 y x)) -> (Or64 x y) +(Or32 x (Or32 y x)) -> (Or32 x y) +(Or16 x (Or16 y x)) -> (Or16 x y) +(Or8 x (Or8 y x)) -> (Or8 x y) +(Or64 (Or64 x y) x) -> (Or64 x y) +(Or32 (Or32 x y) x) -> (Or32 x y) +(Or16 (Or16 x y) x) -> (Or16 x y) +(Or8 (Or8 x y) x) -> (Or8 x y) +(Or64 (Or64 x y) y) -> (Or64 x y) +(Or32 (Or32 x y) y) -> (Or32 x y) +(Or16 (Or16 x y) y) -> (Or16 x y) +(Or8 (Or8 x y) y) -> (Or8 x y) +(Xor64 x (Xor64 x y)) -> y +(Xor32 x (Xor32 x y)) -> y +(Xor16 x (Xor16 x y)) -> y +(Xor8 x (Xor8 x y)) -> y +(Xor64 x (Xor64 y x)) -> y +(Xor32 x (Xor32 y x)) -> y +(Xor16 x (Xor16 y x)) -> y +(Xor8 x (Xor8 y x)) -> y +(Xor64 (Xor64 x y) x) -> y +(Xor32 (Xor32 x y) x) -> y +(Xor16 (Xor16 x y) x) -> y +(Xor8 (Xor8 x y) x) -> y +(Xor64 (Xor64 x y) y) -> x +(Xor32 (Xor32 x y) y) -> x +(Xor16 (Xor16 x y) y) -> x +(Xor8 (Xor8 x y) y) -> x + (Trunc64to8 (And64 (Const64 [y]) x)) && y&0xFF == 0xFF -> (Trunc64to8 x) (Trunc64to16 (And64 (Const64 [y]) x)) && y&0xFFFF == 0xFFFF -> (Trunc64to16 x) (Trunc64to32 (And64 (Const64 [y]) x)) && y&0xFFFFFFFF == 0xFFFFFFFF -> (Trunc64to32 x) diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go index 9b0f43c414..54a6815c93 100644 --- a/src/cmd/compile/internal/ssa/rewritegeneric.go +++ b/src/cmd/compile/internal/ssa/rewritegeneric.go @@ -732,6 +732,78 @@ func rewriteValuegeneric_OpAnd16(v *Value, config *Config) bool { v.AuxInt = 0 return true } + // match: (And16 x (And16 x y)) + // cond: + // result: (And16 x y) + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpAnd16 { + break + } + if x != v_1.Args[0] { + break + } + y := v_1.Args[1] + v.reset(OpAnd16) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (And16 x (And16 y x)) + // cond: + // result: (And16 x y) + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpAnd16 { + break + } + y := v_1.Args[0] + if x != v_1.Args[1] { + break + } + v.reset(OpAnd16) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (And16 (And16 x y) x) + // cond: + // result: (And16 x y) + for { + v_0 := v.Args[0] + if v_0.Op != OpAnd16 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if x != v.Args[1] { + break + } + v.reset(OpAnd16) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (And16 (And16 x y) y) + // cond: + // result: (And16 x y) + for { + v_0 := v.Args[0] + if v_0.Op != OpAnd16 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if y != v.Args[1] { + break + } + v.reset(OpAnd16) + v.AddArg(x) + v.AddArg(y) + return true + } return false } func rewriteValuegeneric_OpAnd32(v *Value, config *Config) bool { @@ -803,6 +875,78 @@ func rewriteValuegeneric_OpAnd32(v *Value, config *Config) bool { v.AuxInt = 0 return true } + // match: (And32 x (And32 x y)) + // cond: + // result: (And32 x y) + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpAnd32 { + break + } + if x != v_1.Args[0] { + break + } + y := v_1.Args[1] + v.reset(OpAnd32) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (And32 x (And32 y x)) + // cond: + // result: (And32 x y) + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpAnd32 { + break + } + y := v_1.Args[0] + if x != v_1.Args[1] { + break + } + v.reset(OpAnd32) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (And32 (And32 x y) x) + // cond: + // result: (And32 x y) + for { + v_0 := v.Args[0] + if v_0.Op != OpAnd32 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if x != v.Args[1] { + break + } + v.reset(OpAnd32) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (And32 (And32 x y) y) + // cond: + // result: (And32 x y) + for { + v_0 := v.Args[0] + if v_0.Op != OpAnd32 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if y != v.Args[1] { + break + } + v.reset(OpAnd32) + v.AddArg(x) + v.AddArg(y) + return true + } return false } func rewriteValuegeneric_OpAnd64(v *Value, config *Config) bool { @@ -874,6 +1018,78 @@ func rewriteValuegeneric_OpAnd64(v *Value, config *Config) bool { v.AuxInt = 0 return true } + // match: (And64 x (And64 x y)) + // cond: + // result: (And64 x y) + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpAnd64 { + break + } + if x != v_1.Args[0] { + break + } + y := v_1.Args[1] + v.reset(OpAnd64) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (And64 x (And64 y x)) + // cond: + // result: (And64 x y) + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpAnd64 { + break + } + y := v_1.Args[0] + if x != v_1.Args[1] { + break + } + v.reset(OpAnd64) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (And64 (And64 x y) x) + // cond: + // result: (And64 x y) + for { + v_0 := v.Args[0] + if v_0.Op != OpAnd64 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if x != v.Args[1] { + break + } + v.reset(OpAnd64) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (And64 (And64 x y) y) + // cond: + // result: (And64 x y) + for { + v_0 := v.Args[0] + if v_0.Op != OpAnd64 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if y != v.Args[1] { + break + } + v.reset(OpAnd64) + v.AddArg(x) + v.AddArg(y) + return true + } // match: (And64 (Const64 [y]) x) // cond: nlz(y) + nto(y) == 64 && nto(y) >= 32 // result: (Rsh64Ux64 (Lsh64x64 x (Const64 [nlz(y)])) (Const64 [nlz(y)])) @@ -997,6 +1213,78 @@ func rewriteValuegeneric_OpAnd8(v *Value, config *Config) bool { v.AuxInt = 0 return true } + // match: (And8 x (And8 x y)) + // cond: + // result: (And8 x y) + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpAnd8 { + break + } + if x != v_1.Args[0] { + break + } + y := v_1.Args[1] + v.reset(OpAnd8) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (And8 x (And8 y x)) + // cond: + // result: (And8 x y) + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpAnd8 { + break + } + y := v_1.Args[0] + if x != v_1.Args[1] { + break + } + v.reset(OpAnd8) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (And8 (And8 x y) x) + // cond: + // result: (And8 x y) + for { + v_0 := v.Args[0] + if v_0.Op != OpAnd8 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if x != v.Args[1] { + break + } + v.reset(OpAnd8) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (And8 (And8 x y) y) + // cond: + // result: (And8 x y) + for { + v_0 := v.Args[0] + if v_0.Op != OpAnd8 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if y != v.Args[1] { + break + } + v.reset(OpAnd8) + v.AddArg(x) + v.AddArg(y) + return true + } return false } func rewriteValuegeneric_OpArg(v *Value, config *Config) bool { @@ -5739,6 +6027,78 @@ func rewriteValuegeneric_OpOr16(v *Value, config *Config) bool { v.AuxInt = -1 return true } + // match: (Or16 x (Or16 x y)) + // cond: + // result: (Or16 x y) + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpOr16 { + break + } + if x != v_1.Args[0] { + break + } + y := v_1.Args[1] + v.reset(OpOr16) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (Or16 x (Or16 y x)) + // cond: + // result: (Or16 x y) + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpOr16 { + break + } + y := v_1.Args[0] + if x != v_1.Args[1] { + break + } + v.reset(OpOr16) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (Or16 (Or16 x y) x) + // cond: + // result: (Or16 x y) + for { + v_0 := v.Args[0] + if v_0.Op != OpOr16 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if x != v.Args[1] { + break + } + v.reset(OpOr16) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (Or16 (Or16 x y) y) + // cond: + // result: (Or16 x y) + for { + v_0 := v.Args[0] + if v_0.Op != OpOr16 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if y != v.Args[1] { + break + } + v.reset(OpOr16) + v.AddArg(x) + v.AddArg(y) + return true + } return false } func rewriteValuegeneric_OpOr32(v *Value, config *Config) bool { @@ -5810,6 +6170,78 @@ func rewriteValuegeneric_OpOr32(v *Value, config *Config) bool { v.AuxInt = -1 return true } + // match: (Or32 x (Or32 x y)) + // cond: + // result: (Or32 x y) + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpOr32 { + break + } + if x != v_1.Args[0] { + break + } + y := v_1.Args[1] + v.reset(OpOr32) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (Or32 x (Or32 y x)) + // cond: + // result: (Or32 x y) + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpOr32 { + break + } + y := v_1.Args[0] + if x != v_1.Args[1] { + break + } + v.reset(OpOr32) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (Or32 (Or32 x y) x) + // cond: + // result: (Or32 x y) + for { + v_0 := v.Args[0] + if v_0.Op != OpOr32 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if x != v.Args[1] { + break + } + v.reset(OpOr32) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (Or32 (Or32 x y) y) + // cond: + // result: (Or32 x y) + for { + v_0 := v.Args[0] + if v_0.Op != OpOr32 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if y != v.Args[1] { + break + } + v.reset(OpOr32) + v.AddArg(x) + v.AddArg(y) + return true + } return false } func rewriteValuegeneric_OpOr64(v *Value, config *Config) bool { @@ -5881,6 +6313,78 @@ func rewriteValuegeneric_OpOr64(v *Value, config *Config) bool { v.AuxInt = -1 return true } + // match: (Or64 x (Or64 x y)) + // cond: + // result: (Or64 x y) + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpOr64 { + break + } + if x != v_1.Args[0] { + break + } + y := v_1.Args[1] + v.reset(OpOr64) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (Or64 x (Or64 y x)) + // cond: + // result: (Or64 x y) + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpOr64 { + break + } + y := v_1.Args[0] + if x != v_1.Args[1] { + break + } + v.reset(OpOr64) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (Or64 (Or64 x y) x) + // cond: + // result: (Or64 x y) + for { + v_0 := v.Args[0] + if v_0.Op != OpOr64 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if x != v.Args[1] { + break + } + v.reset(OpOr64) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (Or64 (Or64 x y) y) + // cond: + // result: (Or64 x y) + for { + v_0 := v.Args[0] + if v_0.Op != OpOr64 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if y != v.Args[1] { + break + } + v.reset(OpOr64) + v.AddArg(x) + v.AddArg(y) + return true + } return false } func rewriteValuegeneric_OpOr8(v *Value, config *Config) bool { @@ -5952,6 +6456,78 @@ func rewriteValuegeneric_OpOr8(v *Value, config *Config) bool { v.AuxInt = -1 return true } + // match: (Or8 x (Or8 x y)) + // cond: + // result: (Or8 x y) + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpOr8 { + break + } + if x != v_1.Args[0] { + break + } + y := v_1.Args[1] + v.reset(OpOr8) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (Or8 x (Or8 y x)) + // cond: + // result: (Or8 x y) + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpOr8 { + break + } + y := v_1.Args[0] + if x != v_1.Args[1] { + break + } + v.reset(OpOr8) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (Or8 (Or8 x y) x) + // cond: + // result: (Or8 x y) + for { + v_0 := v.Args[0] + if v_0.Op != OpOr8 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if x != v.Args[1] { + break + } + v.reset(OpOr8) + v.AddArg(x) + v.AddArg(y) + return true + } + // match: (Or8 (Or8 x y) y) + // cond: + // result: (Or8 x y) + for { + v_0 := v.Args[0] + if v_0.Op != OpOr8 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if y != v.Args[1] { + break + } + v.reset(OpOr8) + v.AddArg(x) + v.AddArg(y) + return true + } return false } func rewriteValuegeneric_OpPhi(v *Value, config *Config) bool { @@ -8941,6 +9517,78 @@ func rewriteValuegeneric_OpXor16(v *Value, config *Config) bool { v.AddArg(x) return true } + // match: (Xor16 x (Xor16 x y)) + // cond: + // result: y + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpXor16 { + break + } + if x != v_1.Args[0] { + break + } + y := v_1.Args[1] + v.reset(OpCopy) + v.Type = y.Type + v.AddArg(y) + return true + } + // match: (Xor16 x (Xor16 y x)) + // cond: + // result: y + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpXor16 { + break + } + y := v_1.Args[0] + if x != v_1.Args[1] { + break + } + v.reset(OpCopy) + v.Type = y.Type + v.AddArg(y) + return true + } + // match: (Xor16 (Xor16 x y) x) + // cond: + // result: y + for { + v_0 := v.Args[0] + if v_0.Op != OpXor16 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if x != v.Args[1] { + break + } + v.reset(OpCopy) + v.Type = y.Type + v.AddArg(y) + return true + } + // match: (Xor16 (Xor16 x y) y) + // cond: + // result: x + for { + v_0 := v.Args[0] + if v_0.Op != OpXor16 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if y != v.Args[1] { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } return false } func rewriteValuegeneric_OpXor32(v *Value, config *Config) bool { @@ -8996,6 +9644,78 @@ func rewriteValuegeneric_OpXor32(v *Value, config *Config) bool { v.AddArg(x) return true } + // match: (Xor32 x (Xor32 x y)) + // cond: + // result: y + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpXor32 { + break + } + if x != v_1.Args[0] { + break + } + y := v_1.Args[1] + v.reset(OpCopy) + v.Type = y.Type + v.AddArg(y) + return true + } + // match: (Xor32 x (Xor32 y x)) + // cond: + // result: y + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpXor32 { + break + } + y := v_1.Args[0] + if x != v_1.Args[1] { + break + } + v.reset(OpCopy) + v.Type = y.Type + v.AddArg(y) + return true + } + // match: (Xor32 (Xor32 x y) x) + // cond: + // result: y + for { + v_0 := v.Args[0] + if v_0.Op != OpXor32 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if x != v.Args[1] { + break + } + v.reset(OpCopy) + v.Type = y.Type + v.AddArg(y) + return true + } + // match: (Xor32 (Xor32 x y) y) + // cond: + // result: x + for { + v_0 := v.Args[0] + if v_0.Op != OpXor32 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if y != v.Args[1] { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } return false } func rewriteValuegeneric_OpXor64(v *Value, config *Config) bool { @@ -9051,6 +9771,78 @@ func rewriteValuegeneric_OpXor64(v *Value, config *Config) bool { v.AddArg(x) return true } + // match: (Xor64 x (Xor64 x y)) + // cond: + // result: y + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpXor64 { + break + } + if x != v_1.Args[0] { + break + } + y := v_1.Args[1] + v.reset(OpCopy) + v.Type = y.Type + v.AddArg(y) + return true + } + // match: (Xor64 x (Xor64 y x)) + // cond: + // result: y + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpXor64 { + break + } + y := v_1.Args[0] + if x != v_1.Args[1] { + break + } + v.reset(OpCopy) + v.Type = y.Type + v.AddArg(y) + return true + } + // match: (Xor64 (Xor64 x y) x) + // cond: + // result: y + for { + v_0 := v.Args[0] + if v_0.Op != OpXor64 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if x != v.Args[1] { + break + } + v.reset(OpCopy) + v.Type = y.Type + v.AddArg(y) + return true + } + // match: (Xor64 (Xor64 x y) y) + // cond: + // result: x + for { + v_0 := v.Args[0] + if v_0.Op != OpXor64 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if y != v.Args[1] { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } return false } func rewriteValuegeneric_OpXor8(v *Value, config *Config) bool { @@ -9106,6 +9898,78 @@ func rewriteValuegeneric_OpXor8(v *Value, config *Config) bool { v.AddArg(x) return true } + // match: (Xor8 x (Xor8 x y)) + // cond: + // result: y + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpXor8 { + break + } + if x != v_1.Args[0] { + break + } + y := v_1.Args[1] + v.reset(OpCopy) + v.Type = y.Type + v.AddArg(y) + return true + } + // match: (Xor8 x (Xor8 y x)) + // cond: + // result: y + for { + x := v.Args[0] + v_1 := v.Args[1] + if v_1.Op != OpXor8 { + break + } + y := v_1.Args[0] + if x != v_1.Args[1] { + break + } + v.reset(OpCopy) + v.Type = y.Type + v.AddArg(y) + return true + } + // match: (Xor8 (Xor8 x y) x) + // cond: + // result: y + for { + v_0 := v.Args[0] + if v_0.Op != OpXor8 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if x != v.Args[1] { + break + } + v.reset(OpCopy) + v.Type = y.Type + v.AddArg(y) + return true + } + // match: (Xor8 (Xor8 x y) y) + // cond: + // result: x + for { + v_0 := v.Args[0] + if v_0.Op != OpXor8 { + break + } + x := v_0.Args[0] + y := v_0.Args[1] + if y != v.Args[1] { + break + } + v.reset(OpCopy) + v.Type = x.Type + v.AddArg(x) + return true + } return false } func rewriteBlockgeneric(b *Block) bool {