diff --git a/src/cmd/compile/internal/ssa/gen/generic.rules b/src/cmd/compile/internal/ssa/gen/generic.rules index 9a66312c90..edd8259de3 100644 --- a/src/cmd/compile/internal/ssa/gen/generic.rules +++ b/src/cmd/compile/internal/ssa/gen/generic.rules @@ -343,136 +343,136 @@ // rewrite shifts of 8/16/32 bit consts into 64 bit consts to reduce // the number of the other rewrite rules for const shifts -(Lsh64x32 x (Const32 [c])) -> (Lsh64x64 x (Const64 [int64(uint32(c))])) -(Lsh64x16 x (Const16 [c])) -> (Lsh64x64 x (Const64 [int64(uint16(c))])) -(Lsh64x8 x (Const8 [c])) -> (Lsh64x64 x (Const64 [int64(uint8(c))])) -(Rsh64x32 x (Const32 [c])) -> (Rsh64x64 x (Const64 [int64(uint32(c))])) -(Rsh64x16 x (Const16 [c])) -> (Rsh64x64 x (Const64 [int64(uint16(c))])) -(Rsh64x8 x (Const8 [c])) -> (Rsh64x64 x (Const64 [int64(uint8(c))])) -(Rsh64Ux32 x (Const32 [c])) -> (Rsh64Ux64 x (Const64 [int64(uint32(c))])) -(Rsh64Ux16 x (Const16 [c])) -> (Rsh64Ux64 x (Const64 [int64(uint16(c))])) -(Rsh64Ux8 x (Const8 [c])) -> (Rsh64Ux64 x (Const64 [int64(uint8(c))])) +(Lsh64x32 x (Const32 [c])) => (Lsh64x64 x (Const64 [int64(uint32(c))])) +(Lsh64x16 x (Const16 [c])) => (Lsh64x64 x (Const64 [int64(uint16(c))])) +(Lsh64x8 x (Const8 [c])) => (Lsh64x64 x (Const64 [int64(uint8(c))])) +(Rsh64x32 x (Const32 [c])) => (Rsh64x64 x (Const64 [int64(uint32(c))])) +(Rsh64x16 x (Const16 [c])) => (Rsh64x64 x (Const64 [int64(uint16(c))])) +(Rsh64x8 x (Const8 [c])) => (Rsh64x64 x (Const64 [int64(uint8(c))])) +(Rsh64Ux32 x (Const32 [c])) => (Rsh64Ux64 x (Const64 [int64(uint32(c))])) +(Rsh64Ux16 x (Const16 [c])) => (Rsh64Ux64 x (Const64 [int64(uint16(c))])) +(Rsh64Ux8 x (Const8 [c])) => (Rsh64Ux64 x (Const64 [int64(uint8(c))])) -(Lsh32x32 x (Const32 [c])) -> (Lsh32x64 x (Const64 [int64(uint32(c))])) -(Lsh32x16 x (Const16 [c])) -> (Lsh32x64 x (Const64 [int64(uint16(c))])) -(Lsh32x8 x (Const8 [c])) -> (Lsh32x64 x (Const64 [int64(uint8(c))])) -(Rsh32x32 x (Const32 [c])) -> (Rsh32x64 x (Const64 [int64(uint32(c))])) -(Rsh32x16 x (Const16 [c])) -> (Rsh32x64 x (Const64 [int64(uint16(c))])) -(Rsh32x8 x (Const8 [c])) -> (Rsh32x64 x (Const64 [int64(uint8(c))])) -(Rsh32Ux32 x (Const32 [c])) -> (Rsh32Ux64 x (Const64 [int64(uint32(c))])) -(Rsh32Ux16 x (Const16 [c])) -> (Rsh32Ux64 x (Const64 [int64(uint16(c))])) -(Rsh32Ux8 x (Const8 [c])) -> (Rsh32Ux64 x (Const64 [int64(uint8(c))])) +(Lsh32x32 x (Const32 [c])) => (Lsh32x64 x (Const64 [int64(uint32(c))])) +(Lsh32x16 x (Const16 [c])) => (Lsh32x64 x (Const64 [int64(uint16(c))])) +(Lsh32x8 x (Const8 [c])) => (Lsh32x64 x (Const64 [int64(uint8(c))])) +(Rsh32x32 x (Const32 [c])) => (Rsh32x64 x (Const64 [int64(uint32(c))])) +(Rsh32x16 x (Const16 [c])) => (Rsh32x64 x (Const64 [int64(uint16(c))])) +(Rsh32x8 x (Const8 [c])) => (Rsh32x64 x (Const64 [int64(uint8(c))])) +(Rsh32Ux32 x (Const32 [c])) => (Rsh32Ux64 x (Const64 [int64(uint32(c))])) +(Rsh32Ux16 x (Const16 [c])) => (Rsh32Ux64 x (Const64 [int64(uint16(c))])) +(Rsh32Ux8 x (Const8 [c])) => (Rsh32Ux64 x (Const64 [int64(uint8(c))])) -(Lsh16x32 x (Const32 [c])) -> (Lsh16x64 x (Const64 [int64(uint32(c))])) -(Lsh16x16 x (Const16 [c])) -> (Lsh16x64 x (Const64 [int64(uint16(c))])) -(Lsh16x8 x (Const8 [c])) -> (Lsh16x64 x (Const64 [int64(uint8(c))])) -(Rsh16x32 x (Const32 [c])) -> (Rsh16x64 x (Const64 [int64(uint32(c))])) -(Rsh16x16 x (Const16 [c])) -> (Rsh16x64 x (Const64 [int64(uint16(c))])) -(Rsh16x8 x (Const8 [c])) -> (Rsh16x64 x (Const64 [int64(uint8(c))])) -(Rsh16Ux32 x (Const32 [c])) -> (Rsh16Ux64 x (Const64 [int64(uint32(c))])) -(Rsh16Ux16 x (Const16 [c])) -> (Rsh16Ux64 x (Const64 [int64(uint16(c))])) -(Rsh16Ux8 x (Const8 [c])) -> (Rsh16Ux64 x (Const64 [int64(uint8(c))])) +(Lsh16x32 x (Const32 [c])) => (Lsh16x64 x (Const64 [int64(uint32(c))])) +(Lsh16x16 x (Const16 [c])) => (Lsh16x64 x (Const64 [int64(uint16(c))])) +(Lsh16x8 x (Const8 [c])) => (Lsh16x64 x (Const64 [int64(uint8(c))])) +(Rsh16x32 x (Const32 [c])) => (Rsh16x64 x (Const64 [int64(uint32(c))])) +(Rsh16x16 x (Const16 [c])) => (Rsh16x64 x (Const64 [int64(uint16(c))])) +(Rsh16x8 x (Const8 [c])) => (Rsh16x64 x (Const64 [int64(uint8(c))])) +(Rsh16Ux32 x (Const32 [c])) => (Rsh16Ux64 x (Const64 [int64(uint32(c))])) +(Rsh16Ux16 x (Const16 [c])) => (Rsh16Ux64 x (Const64 [int64(uint16(c))])) +(Rsh16Ux8 x (Const8 [c])) => (Rsh16Ux64 x (Const64 [int64(uint8(c))])) -(Lsh8x32 x (Const32 [c])) -> (Lsh8x64 x (Const64 [int64(uint32(c))])) -(Lsh8x16 x (Const16 [c])) -> (Lsh8x64 x (Const64 [int64(uint16(c))])) -(Lsh8x8 x (Const8 [c])) -> (Lsh8x64 x (Const64 [int64(uint8(c))])) -(Rsh8x32 x (Const32 [c])) -> (Rsh8x64 x (Const64 [int64(uint32(c))])) -(Rsh8x16 x (Const16 [c])) -> (Rsh8x64 x (Const64 [int64(uint16(c))])) -(Rsh8x8 x (Const8 [c])) -> (Rsh8x64 x (Const64 [int64(uint8(c))])) -(Rsh8Ux32 x (Const32 [c])) -> (Rsh8Ux64 x (Const64 [int64(uint32(c))])) -(Rsh8Ux16 x (Const16 [c])) -> (Rsh8Ux64 x (Const64 [int64(uint16(c))])) -(Rsh8Ux8 x (Const8 [c])) -> (Rsh8Ux64 x (Const64 [int64(uint8(c))])) +(Lsh8x32 x (Const32 [c])) => (Lsh8x64 x (Const64 [int64(uint32(c))])) +(Lsh8x16 x (Const16 [c])) => (Lsh8x64 x (Const64 [int64(uint16(c))])) +(Lsh8x8 x (Const8 [c])) => (Lsh8x64 x (Const64 [int64(uint8(c))])) +(Rsh8x32 x (Const32 [c])) => (Rsh8x64 x (Const64 [int64(uint32(c))])) +(Rsh8x16 x (Const16 [c])) => (Rsh8x64 x (Const64 [int64(uint16(c))])) +(Rsh8x8 x (Const8 [c])) => (Rsh8x64 x (Const64 [int64(uint8(c))])) +(Rsh8Ux32 x (Const32 [c])) => (Rsh8Ux64 x (Const64 [int64(uint32(c))])) +(Rsh8Ux16 x (Const16 [c])) => (Rsh8Ux64 x (Const64 [int64(uint16(c))])) +(Rsh8Ux8 x (Const8 [c])) => (Rsh8Ux64 x (Const64 [int64(uint8(c))])) // shifts by zero -(Lsh(64|32|16|8)x64 x (Const64 [0])) -> x -(Rsh(64|32|16|8)x64 x (Const64 [0])) -> x -(Rsh(64|32|16|8)Ux64 x (Const64 [0])) -> x +(Lsh(64|32|16|8)x64 x (Const64 [0])) => x +(Rsh(64|32|16|8)x64 x (Const64 [0])) => x +(Rsh(64|32|16|8)Ux64 x (Const64 [0])) => x // rotates by multiples of register width -(RotateLeft64 x (Const64 [c])) && c%64 == 0 -> x -(RotateLeft32 x (Const32 [c])) && c%32 == 0 -> x -(RotateLeft16 x (Const16 [c])) && c%16 == 0 -> x -(RotateLeft8 x (Const8 [c])) && c%8 == 0 -> x +(RotateLeft64 x (Const64 [c])) && c%64 == 0 => x +(RotateLeft32 x (Const32 [c])) && c%32 == 0 => x +(RotateLeft16 x (Const16 [c])) && c%16 == 0 => x +(RotateLeft8 x (Const8 [c])) && c%8 == 0 => x // zero shifted -(Lsh64x(64|32|16|8) (Const64 [0]) _) -> (Const64 [0]) -(Rsh64x(64|32|16|8) (Const64 [0]) _) -> (Const64 [0]) -(Rsh64Ux(64|32|16|8) (Const64 [0]) _) -> (Const64 [0]) -(Lsh32x(64|32|16|8) (Const32 [0]) _) -> (Const32 [0]) -(Rsh32x(64|32|16|8) (Const32 [0]) _) -> (Const32 [0]) -(Rsh32Ux(64|32|16|8) (Const32 [0]) _) -> (Const32 [0]) -(Lsh16x(64|32|16|8) (Const16 [0]) _) -> (Const16 [0]) -(Rsh16x(64|32|16|8) (Const16 [0]) _) -> (Const16 [0]) -(Rsh16Ux(64|32|16|8) (Const16 [0]) _) -> (Const16 [0]) -(Lsh8x(64|32|16|8) (Const8 [0]) _) -> (Const8 [0]) -(Rsh8x(64|32|16|8) (Const8 [0]) _) -> (Const8 [0]) -(Rsh8Ux(64|32|16|8) (Const8 [0]) _) -> (Const8 [0]) +(Lsh64x(64|32|16|8) (Const64 [0]) _) => (Const64 [0]) +(Rsh64x(64|32|16|8) (Const64 [0]) _) => (Const64 [0]) +(Rsh64Ux(64|32|16|8) (Const64 [0]) _) => (Const64 [0]) +(Lsh32x(64|32|16|8) (Const32 [0]) _) => (Const32 [0]) +(Rsh32x(64|32|16|8) (Const32 [0]) _) => (Const32 [0]) +(Rsh32Ux(64|32|16|8) (Const32 [0]) _) => (Const32 [0]) +(Lsh16x(64|32|16|8) (Const16 [0]) _) => (Const16 [0]) +(Rsh16x(64|32|16|8) (Const16 [0]) _) => (Const16 [0]) +(Rsh16Ux(64|32|16|8) (Const16 [0]) _) => (Const16 [0]) +(Lsh8x(64|32|16|8) (Const8 [0]) _) => (Const8 [0]) +(Rsh8x(64|32|16|8) (Const8 [0]) _) => (Const8 [0]) +(Rsh8Ux(64|32|16|8) (Const8 [0]) _) => (Const8 [0]) // large left shifts of all values, and right shifts of unsigned values -((Lsh64|Rsh64U)x64 _ (Const64 [c])) && uint64(c) >= 64 -> (Const64 [0]) -((Lsh32|Rsh32U)x64 _ (Const64 [c])) && uint64(c) >= 32 -> (Const32 [0]) -((Lsh16|Rsh16U)x64 _ (Const64 [c])) && uint64(c) >= 16 -> (Const16 [0]) -((Lsh8|Rsh8U)x64 _ (Const64 [c])) && uint64(c) >= 8 -> (Const8 [0]) +((Lsh64|Rsh64U)x64 _ (Const64 [c])) && uint64(c) >= 64 => (Const64 [0]) +((Lsh32|Rsh32U)x64 _ (Const64 [c])) && uint64(c) >= 32 => (Const32 [0]) +((Lsh16|Rsh16U)x64 _ (Const64 [c])) && uint64(c) >= 16 => (Const16 [0]) +((Lsh8|Rsh8U)x64 _ (Const64 [c])) && uint64(c) >= 8 => (Const8 [0]) // combine const shifts -(Lsh64x64 (Lsh64x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Lsh64x64 x (Const64 [c+d])) -(Lsh32x64 (Lsh32x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Lsh32x64 x (Const64 [c+d])) -(Lsh16x64 (Lsh16x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Lsh16x64 x (Const64 [c+d])) -(Lsh8x64 (Lsh8x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Lsh8x64 x (Const64 [c+d])) +(Lsh64x64 (Lsh64x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) => (Lsh64x64 x (Const64 [c+d])) +(Lsh32x64 (Lsh32x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) => (Lsh32x64 x (Const64 [c+d])) +(Lsh16x64 (Lsh16x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) => (Lsh16x64 x (Const64 [c+d])) +(Lsh8x64 (Lsh8x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) => (Lsh8x64 x (Const64 [c+d])) -(Rsh64x64 (Rsh64x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh64x64 x (Const64 [c+d])) -(Rsh32x64 (Rsh32x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh32x64 x (Const64 [c+d])) -(Rsh16x64 (Rsh16x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh16x64 x (Const64 [c+d])) -(Rsh8x64 (Rsh8x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh8x64 x (Const64 [c+d])) +(Rsh64x64 (Rsh64x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) => (Rsh64x64 x (Const64 [c+d])) +(Rsh32x64 (Rsh32x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) => (Rsh32x64 x (Const64 [c+d])) +(Rsh16x64 (Rsh16x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) => (Rsh16x64 x (Const64 [c+d])) +(Rsh8x64 (Rsh8x64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) => (Rsh8x64 x (Const64 [c+d])) -(Rsh64Ux64 (Rsh64Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh64Ux64 x (Const64 [c+d])) -(Rsh32Ux64 (Rsh32Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh32Ux64 x (Const64 [c+d])) -(Rsh16Ux64 (Rsh16Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh16Ux64 x (Const64 [c+d])) -(Rsh8Ux64 (Rsh8Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) -> (Rsh8Ux64 x (Const64 [c+d])) +(Rsh64Ux64 (Rsh64Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) => (Rsh64Ux64 x (Const64 [c+d])) +(Rsh32Ux64 (Rsh32Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) => (Rsh32Ux64 x (Const64 [c+d])) +(Rsh16Ux64 (Rsh16Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) => (Rsh16Ux64 x (Const64 [c+d])) +(Rsh8Ux64 (Rsh8Ux64 x (Const64 [c])) (Const64 [d])) && !uaddOvf(c,d) => (Rsh8Ux64 x (Const64 [c+d])) // Remove signed right shift before an unsigned right shift that extracts the sign bit. -(Rsh8Ux64 (Rsh8x64 x _) (Const64 [7] )) -> (Rsh8Ux64 x (Const64 [7] )) -(Rsh16Ux64 (Rsh16x64 x _) (Const64 [15])) -> (Rsh16Ux64 x (Const64 [15])) -(Rsh32Ux64 (Rsh32x64 x _) (Const64 [31])) -> (Rsh32Ux64 x (Const64 [31])) -(Rsh64Ux64 (Rsh64x64 x _) (Const64 [63])) -> (Rsh64Ux64 x (Const64 [63])) +(Rsh8Ux64 (Rsh8x64 x _) (Const64 [7] )) => (Rsh8Ux64 x (Const64 [7] )) +(Rsh16Ux64 (Rsh16x64 x _) (Const64 [15])) => (Rsh16Ux64 x (Const64 [15])) +(Rsh32Ux64 (Rsh32x64 x _) (Const64 [31])) => (Rsh32Ux64 x (Const64 [31])) +(Rsh64Ux64 (Rsh64x64 x _) (Const64 [63])) => (Rsh64Ux64 x (Const64 [63])) // ((x >> c1) << c2) >> c3 (Rsh(64|32|16|8)Ux64 (Lsh(64|32|16|8)x64 (Rsh(64|32|16|8)Ux64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) - -> (Rsh(64|32|16|8)Ux64 x (Const64 [c1-c2+c3])) + => (Rsh(64|32|16|8)Ux64 x (Const64 [c1-c2+c3])) // ((x << c1) >> c2) << c3 (Lsh(64|32|16|8)x64 (Rsh(64|32|16|8)Ux64 (Lsh(64|32|16|8)x64 x (Const64 [c1])) (Const64 [c2])) (Const64 [c3])) && uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3) - -> (Lsh(64|32|16|8)x64 x (Const64 [c1-c2+c3])) + => (Lsh(64|32|16|8)x64 x (Const64 [c1-c2+c3])) // (x >> c) & uppermask = 0 -(And64 (Const64 [m]) (Rsh64Ux64 _ (Const64 [c]))) && c >= 64-ntz(m) -> (Const64 [0]) -(And32 (Const32 [m]) (Rsh32Ux64 _ (Const64 [c]))) && c >= 64-ntz(m) -> (Const32 [0]) -(And16 (Const16 [m]) (Rsh16Ux64 _ (Const64 [c]))) && c >= 64-ntz(m) -> (Const16 [0]) -(And8 (Const8 [m]) (Rsh8Ux64 _ (Const64 [c]))) && c >= 64-ntz(m) -> (Const8 [0]) +(And64 (Const64 [m]) (Rsh64Ux64 _ (Const64 [c]))) && c >= int64(64-ntz64(m)) => (Const64 [0]) +(And32 (Const32 [m]) (Rsh32Ux64 _ (Const64 [c]))) && c >= int64(64-ntz32(m)) => (Const32 [0]) +(And16 (Const16 [m]) (Rsh16Ux64 _ (Const64 [c]))) && c >= int64(64-ntz16(m)) => (Const16 [0]) +(And8 (Const8 [m]) (Rsh8Ux64 _ (Const64 [c]))) && c >= int64(64-ntz8(m)) => (Const8 [0]) // (x << c) & lowermask = 0 -(And64 (Const64 [m]) (Lsh64x64 _ (Const64 [c]))) && c >= 64-nlz(m) -> (Const64 [0]) -(And32 (Const32 [m]) (Lsh32x64 _ (Const64 [c]))) && c >= 64-nlz(m) -> (Const32 [0]) -(And16 (Const16 [m]) (Lsh16x64 _ (Const64 [c]))) && c >= 64-nlz(m) -> (Const16 [0]) -(And8 (Const8 [m]) (Lsh8x64 _ (Const64 [c]))) && c >= 64-nlz(m) -> (Const8 [0]) +(And64 (Const64 [m]) (Lsh64x64 _ (Const64 [c]))) && c >= int64(64-nlz64(m)) => (Const64 [0]) +(And32 (Const32 [m]) (Lsh32x64 _ (Const64 [c]))) && c >= int64(32-nlz32(m)) => (Const32 [0]) +(And16 (Const16 [m]) (Lsh16x64 _ (Const64 [c]))) && c >= int64(16-nlz16(m)) => (Const16 [0]) +(And8 (Const8 [m]) (Lsh8x64 _ (Const64 [c]))) && c >= int64(8-nlz8(m)) => (Const8 [0]) // replace shifts with zero extensions -(Rsh16Ux64 (Lsh16x64 x (Const64 [8])) (Const64 [8])) -> (ZeroExt8to16 (Trunc16to8 x)) -(Rsh32Ux64 (Lsh32x64 x (Const64 [24])) (Const64 [24])) -> (ZeroExt8to32 (Trunc32to8 x)) -(Rsh64Ux64 (Lsh64x64 x (Const64 [56])) (Const64 [56])) -> (ZeroExt8to64 (Trunc64to8 x)) -(Rsh32Ux64 (Lsh32x64 x (Const64 [16])) (Const64 [16])) -> (ZeroExt16to32 (Trunc32to16 x)) -(Rsh64Ux64 (Lsh64x64 x (Const64 [48])) (Const64 [48])) -> (ZeroExt16to64 (Trunc64to16 x)) -(Rsh64Ux64 (Lsh64x64 x (Const64 [32])) (Const64 [32])) -> (ZeroExt32to64 (Trunc64to32 x)) +(Rsh16Ux64 (Lsh16x64 x (Const64 [8])) (Const64 [8])) => (ZeroExt8to16 (Trunc16to8 x)) +(Rsh32Ux64 (Lsh32x64 x (Const64 [24])) (Const64 [24])) => (ZeroExt8to32 (Trunc32to8 x)) +(Rsh64Ux64 (Lsh64x64 x (Const64 [56])) (Const64 [56])) => (ZeroExt8to64 (Trunc64to8 x)) +(Rsh32Ux64 (Lsh32x64 x (Const64 [16])) (Const64 [16])) => (ZeroExt16to32 (Trunc32to16 x)) +(Rsh64Ux64 (Lsh64x64 x (Const64 [48])) (Const64 [48])) => (ZeroExt16to64 (Trunc64to16 x)) +(Rsh64Ux64 (Lsh64x64 x (Const64 [32])) (Const64 [32])) => (ZeroExt32to64 (Trunc64to32 x)) // replace shifts with sign extensions -(Rsh16x64 (Lsh16x64 x (Const64 [8])) (Const64 [8])) -> (SignExt8to16 (Trunc16to8 x)) -(Rsh32x64 (Lsh32x64 x (Const64 [24])) (Const64 [24])) -> (SignExt8to32 (Trunc32to8 x)) -(Rsh64x64 (Lsh64x64 x (Const64 [56])) (Const64 [56])) -> (SignExt8to64 (Trunc64to8 x)) -(Rsh32x64 (Lsh32x64 x (Const64 [16])) (Const64 [16])) -> (SignExt16to32 (Trunc32to16 x)) -(Rsh64x64 (Lsh64x64 x (Const64 [48])) (Const64 [48])) -> (SignExt16to64 (Trunc64to16 x)) -(Rsh64x64 (Lsh64x64 x (Const64 [32])) (Const64 [32])) -> (SignExt32to64 (Trunc64to32 x)) +(Rsh16x64 (Lsh16x64 x (Const64 [8])) (Const64 [8])) => (SignExt8to16 (Trunc16to8 x)) +(Rsh32x64 (Lsh32x64 x (Const64 [24])) (Const64 [24])) => (SignExt8to32 (Trunc32to8 x)) +(Rsh64x64 (Lsh64x64 x (Const64 [56])) (Const64 [56])) => (SignExt8to64 (Trunc64to8 x)) +(Rsh32x64 (Lsh32x64 x (Const64 [16])) (Const64 [16])) => (SignExt16to32 (Trunc32to16 x)) +(Rsh64x64 (Lsh64x64 x (Const64 [48])) (Const64 [48])) => (SignExt16to64 (Trunc64to16 x)) +(Rsh64x64 (Lsh64x64 x (Const64 [32])) (Const64 [32])) => (SignExt32to64 (Trunc64to32 x)) // constant comparisons (Eq(64|32|16|8) (Const(64|32|16|8) [c]) (Const(64|32|16|8) [d])) -> (ConstBool [b2i(c == d)]) diff --git a/src/cmd/compile/internal/ssa/rewrite.go b/src/cmd/compile/internal/ssa/rewrite.go index 9644042fe6..62580bceb8 100644 --- a/src/cmd/compile/internal/ssa/rewrite.go +++ b/src/cmd/compile/internal/ssa/rewrite.go @@ -384,9 +384,11 @@ func isSameSym(sym interface{}, name string) bool { } // nlz returns the number of leading zeros. -func nlz(x int64) int64 { - return int64(bits.LeadingZeros64(uint64(x))) -} +func nlz(x int64) int64 { return int64(bits.LeadingZeros64(uint64(x))) } // TODO: remove when no longer used +func nlz64(x int64) int { return bits.LeadingZeros64(uint64(x)) } +func nlz32(x int32) int { return bits.LeadingZeros32(uint32(x)) } +func nlz16(x int16) int { return bits.LeadingZeros16(uint16(x)) } +func nlz8(x int8) int { return bits.LeadingZeros8(uint8(x)) } // ntzX returns the number of trailing zeros. func ntz(x int64) int64 { return int64(bits.TrailingZeros64(uint64(x))) } // TODO: remove when no longer used diff --git a/src/cmd/compile/internal/ssa/rewritegeneric.go b/src/cmd/compile/internal/ssa/rewritegeneric.go index 1fabe738d4..214de6448d 100644 --- a/src/cmd/compile/internal/ssa/rewritegeneric.go +++ b/src/cmd/compile/internal/ssa/rewritegeneric.go @@ -1571,14 +1571,14 @@ func rewriteValuegeneric_OpAnd16(v *Value) bool { break } // match: (And16 (Const16 [m]) (Rsh16Ux64 _ (Const64 [c]))) - // cond: c >= 64-ntz(m) + // cond: c >= int64(64-ntz16(m)) // result: (Const16 [0]) for { for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { if v_0.Op != OpConst16 { continue } - m := v_0.AuxInt + m := auxIntToInt16(v_0.AuxInt) if v_1.Op != OpRsh16Ux64 { continue } @@ -1587,25 +1587,25 @@ func rewriteValuegeneric_OpAnd16(v *Value) bool { if v_1_1.Op != OpConst64 { continue } - c := v_1_1.AuxInt - if !(c >= 64-ntz(m)) { + c := auxIntToInt64(v_1_1.AuxInt) + if !(c >= int64(64-ntz16(m))) { continue } v.reset(OpConst16) - v.AuxInt = 0 + v.AuxInt = int16ToAuxInt(0) return true } break } // match: (And16 (Const16 [m]) (Lsh16x64 _ (Const64 [c]))) - // cond: c >= 64-nlz(m) + // cond: c >= int64(16-nlz16(m)) // result: (Const16 [0]) for { for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { if v_0.Op != OpConst16 { continue } - m := v_0.AuxInt + m := auxIntToInt16(v_0.AuxInt) if v_1.Op != OpLsh16x64 { continue } @@ -1614,12 +1614,12 @@ func rewriteValuegeneric_OpAnd16(v *Value) bool { if v_1_1.Op != OpConst64 { continue } - c := v_1_1.AuxInt - if !(c >= 64-nlz(m)) { + c := auxIntToInt64(v_1_1.AuxInt) + if !(c >= int64(16-nlz16(m))) { continue } v.reset(OpConst16) - v.AuxInt = 0 + v.AuxInt = int16ToAuxInt(0) return true } break @@ -1769,14 +1769,14 @@ func rewriteValuegeneric_OpAnd32(v *Value) bool { break } // match: (And32 (Const32 [m]) (Rsh32Ux64 _ (Const64 [c]))) - // cond: c >= 64-ntz(m) + // cond: c >= int64(64-ntz32(m)) // result: (Const32 [0]) for { for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { if v_0.Op != OpConst32 { continue } - m := v_0.AuxInt + m := auxIntToInt32(v_0.AuxInt) if v_1.Op != OpRsh32Ux64 { continue } @@ -1785,25 +1785,25 @@ func rewriteValuegeneric_OpAnd32(v *Value) bool { if v_1_1.Op != OpConst64 { continue } - c := v_1_1.AuxInt - if !(c >= 64-ntz(m)) { + c := auxIntToInt64(v_1_1.AuxInt) + if !(c >= int64(64-ntz32(m))) { continue } v.reset(OpConst32) - v.AuxInt = 0 + v.AuxInt = int32ToAuxInt(0) return true } break } // match: (And32 (Const32 [m]) (Lsh32x64 _ (Const64 [c]))) - // cond: c >= 64-nlz(m) + // cond: c >= int64(32-nlz32(m)) // result: (Const32 [0]) for { for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { if v_0.Op != OpConst32 { continue } - m := v_0.AuxInt + m := auxIntToInt32(v_0.AuxInt) if v_1.Op != OpLsh32x64 { continue } @@ -1812,12 +1812,12 @@ func rewriteValuegeneric_OpAnd32(v *Value) bool { if v_1_1.Op != OpConst64 { continue } - c := v_1_1.AuxInt - if !(c >= 64-nlz(m)) { + c := auxIntToInt64(v_1_1.AuxInt) + if !(c >= int64(32-nlz32(m))) { continue } v.reset(OpConst32) - v.AuxInt = 0 + v.AuxInt = int32ToAuxInt(0) return true } break @@ -1967,14 +1967,14 @@ func rewriteValuegeneric_OpAnd64(v *Value) bool { break } // match: (And64 (Const64 [m]) (Rsh64Ux64 _ (Const64 [c]))) - // cond: c >= 64-ntz(m) + // cond: c >= int64(64-ntz64(m)) // result: (Const64 [0]) for { for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { if v_0.Op != OpConst64 { continue } - m := v_0.AuxInt + m := auxIntToInt64(v_0.AuxInt) if v_1.Op != OpRsh64Ux64 { continue } @@ -1983,25 +1983,25 @@ func rewriteValuegeneric_OpAnd64(v *Value) bool { if v_1_1.Op != OpConst64 { continue } - c := v_1_1.AuxInt - if !(c >= 64-ntz(m)) { + c := auxIntToInt64(v_1_1.AuxInt) + if !(c >= int64(64-ntz64(m))) { continue } v.reset(OpConst64) - v.AuxInt = 0 + v.AuxInt = int64ToAuxInt(0) return true } break } // match: (And64 (Const64 [m]) (Lsh64x64 _ (Const64 [c]))) - // cond: c >= 64-nlz(m) + // cond: c >= int64(64-nlz64(m)) // result: (Const64 [0]) for { for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { if v_0.Op != OpConst64 { continue } - m := v_0.AuxInt + m := auxIntToInt64(v_0.AuxInt) if v_1.Op != OpLsh64x64 { continue } @@ -2010,12 +2010,12 @@ func rewriteValuegeneric_OpAnd64(v *Value) bool { if v_1_1.Op != OpConst64 { continue } - c := v_1_1.AuxInt - if !(c >= 64-nlz(m)) { + c := auxIntToInt64(v_1_1.AuxInt) + if !(c >= int64(64-nlz64(m))) { continue } v.reset(OpConst64) - v.AuxInt = 0 + v.AuxInt = int64ToAuxInt(0) return true } break @@ -2165,14 +2165,14 @@ func rewriteValuegeneric_OpAnd8(v *Value) bool { break } // match: (And8 (Const8 [m]) (Rsh8Ux64 _ (Const64 [c]))) - // cond: c >= 64-ntz(m) + // cond: c >= int64(64-ntz8(m)) // result: (Const8 [0]) for { for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { if v_0.Op != OpConst8 { continue } - m := v_0.AuxInt + m := auxIntToInt8(v_0.AuxInt) if v_1.Op != OpRsh8Ux64 { continue } @@ -2181,25 +2181,25 @@ func rewriteValuegeneric_OpAnd8(v *Value) bool { if v_1_1.Op != OpConst64 { continue } - c := v_1_1.AuxInt - if !(c >= 64-ntz(m)) { + c := auxIntToInt64(v_1_1.AuxInt) + if !(c >= int64(64-ntz8(m))) { continue } v.reset(OpConst8) - v.AuxInt = 0 + v.AuxInt = int8ToAuxInt(0) return true } break } // match: (And8 (Const8 [m]) (Lsh8x64 _ (Const64 [c]))) - // cond: c >= 64-nlz(m) + // cond: c >= int64(8-nlz8(m)) // result: (Const8 [0]) for { for _i0 := 0; _i0 <= 1; _i0, v_0, v_1 = _i0+1, v_1, v_0 { if v_0.Op != OpConst8 { continue } - m := v_0.AuxInt + m := auxIntToInt8(v_0.AuxInt) if v_1.Op != OpLsh8x64 { continue } @@ -2208,12 +2208,12 @@ func rewriteValuegeneric_OpAnd8(v *Value) bool { if v_1_1.Op != OpConst64 { continue } - c := v_1_1.AuxInt - if !(c >= 64-nlz(m)) { + c := auxIntToInt64(v_1_1.AuxInt) + if !(c >= int64(8-nlz8(m))) { continue } v.reset(OpConst8) - v.AuxInt = 0 + v.AuxInt = int8ToAuxInt(0) return true } break @@ -10918,21 +10918,21 @@ func rewriteValuegeneric_OpLsh16x16(v *Value) bool { if v_1.Op != OpConst16 { break } - c := v_1.AuxInt + c := auxIntToInt16(v_1.AuxInt) v.reset(OpLsh16x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint16(c)) + v0.AuxInt = int64ToAuxInt(int64(uint16(c))) v.AddArg2(x, v0) return true } // match: (Lsh16x16 (Const16 [0]) _) // result: (Const16 [0]) for { - if v_0.Op != OpConst16 || v_0.AuxInt != 0 { + if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 { break } v.reset(OpConst16) - v.AuxInt = 0 + v.AuxInt = int16ToAuxInt(0) return true } return false @@ -10949,21 +10949,21 @@ func rewriteValuegeneric_OpLsh16x32(v *Value) bool { if v_1.Op != OpConst32 { break } - c := v_1.AuxInt + c := auxIntToInt32(v_1.AuxInt) v.reset(OpLsh16x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint32(c)) + v0.AuxInt = int64ToAuxInt(int64(uint32(c))) v.AddArg2(x, v0) return true } // match: (Lsh16x32 (Const16 [0]) _) // result: (Const16 [0]) for { - if v_0.Op != OpConst16 || v_0.AuxInt != 0 { + if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 { break } v.reset(OpConst16) - v.AuxInt = 0 + v.AuxInt = int16ToAuxInt(0) return true } return false @@ -10992,7 +10992,7 @@ func rewriteValuegeneric_OpLsh16x64(v *Value) bool { // result: x for { x := v_0 - if v_1.Op != OpConst64 || v_1.AuxInt != 0 { + if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 { break } v.copyOf(x) @@ -11001,11 +11001,11 @@ func rewriteValuegeneric_OpLsh16x64(v *Value) bool { // match: (Lsh16x64 (Const16 [0]) _) // result: (Const16 [0]) for { - if v_0.Op != OpConst16 || v_0.AuxInt != 0 { + if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 { break } v.reset(OpConst16) - v.AuxInt = 0 + v.AuxInt = int16ToAuxInt(0) return true } // match: (Lsh16x64 _ (Const64 [c])) @@ -11015,12 +11015,12 @@ func rewriteValuegeneric_OpLsh16x64(v *Value) bool { if v_1.Op != OpConst64 { break } - c := v_1.AuxInt + c := auxIntToInt64(v_1.AuxInt) if !(uint64(c) >= 16) { break } v.reset(OpConst16) - v.AuxInt = 0 + v.AuxInt = int16ToAuxInt(0) return true } // match: (Lsh16x64 (Lsh16x64 x (Const64 [c])) (Const64 [d])) @@ -11037,17 +11037,17 @@ func rewriteValuegeneric_OpLsh16x64(v *Value) bool { if v_0_1.Op != OpConst64 { break } - c := v_0_1.AuxInt + c := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - d := v_1.AuxInt + d := auxIntToInt64(v_1.AuxInt) if !(!uaddOvf(c, d)) { break } v.reset(OpLsh16x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = c + d + v0.AuxInt = int64ToAuxInt(c + d) v.AddArg2(x, v0) return true } @@ -11069,22 +11069,22 @@ func rewriteValuegeneric_OpLsh16x64(v *Value) bool { if v_0_0_1.Op != OpConst64 { break } - c1 := v_0_0_1.AuxInt + c1 := auxIntToInt64(v_0_0_1.AuxInt) v_0_1 := v_0.Args[1] if v_0_1.Op != OpConst64 { break } - c2 := v_0_1.AuxInt + c2 := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - c3 := v_1.AuxInt + c3 := auxIntToInt64(v_1.AuxInt) if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { break } v.reset(OpLsh16x64) v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) - v0.AuxInt = c1 - c2 + c3 + v0.AuxInt = int64ToAuxInt(c1 - c2 + c3) v.AddArg2(x, v0) return true } @@ -11102,21 +11102,21 @@ func rewriteValuegeneric_OpLsh16x8(v *Value) bool { if v_1.Op != OpConst8 { break } - c := v_1.AuxInt + c := auxIntToInt8(v_1.AuxInt) v.reset(OpLsh16x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint8(c)) + v0.AuxInt = int64ToAuxInt(int64(uint8(c))) v.AddArg2(x, v0) return true } // match: (Lsh16x8 (Const16 [0]) _) // result: (Const16 [0]) for { - if v_0.Op != OpConst16 || v_0.AuxInt != 0 { + if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 { break } v.reset(OpConst16) - v.AuxInt = 0 + v.AuxInt = int16ToAuxInt(0) return true } return false @@ -11133,21 +11133,21 @@ func rewriteValuegeneric_OpLsh32x16(v *Value) bool { if v_1.Op != OpConst16 { break } - c := v_1.AuxInt + c := auxIntToInt16(v_1.AuxInt) v.reset(OpLsh32x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint16(c)) + v0.AuxInt = int64ToAuxInt(int64(uint16(c))) v.AddArg2(x, v0) return true } // match: (Lsh32x16 (Const32 [0]) _) // result: (Const32 [0]) for { - if v_0.Op != OpConst32 || v_0.AuxInt != 0 { + if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 { break } v.reset(OpConst32) - v.AuxInt = 0 + v.AuxInt = int32ToAuxInt(0) return true } return false @@ -11164,21 +11164,21 @@ func rewriteValuegeneric_OpLsh32x32(v *Value) bool { if v_1.Op != OpConst32 { break } - c := v_1.AuxInt + c := auxIntToInt32(v_1.AuxInt) v.reset(OpLsh32x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint32(c)) + v0.AuxInt = int64ToAuxInt(int64(uint32(c))) v.AddArg2(x, v0) return true } // match: (Lsh32x32 (Const32 [0]) _) // result: (Const32 [0]) for { - if v_0.Op != OpConst32 || v_0.AuxInt != 0 { + if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 { break } v.reset(OpConst32) - v.AuxInt = 0 + v.AuxInt = int32ToAuxInt(0) return true } return false @@ -11207,7 +11207,7 @@ func rewriteValuegeneric_OpLsh32x64(v *Value) bool { // result: x for { x := v_0 - if v_1.Op != OpConst64 || v_1.AuxInt != 0 { + if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 { break } v.copyOf(x) @@ -11216,11 +11216,11 @@ func rewriteValuegeneric_OpLsh32x64(v *Value) bool { // match: (Lsh32x64 (Const32 [0]) _) // result: (Const32 [0]) for { - if v_0.Op != OpConst32 || v_0.AuxInt != 0 { + if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 { break } v.reset(OpConst32) - v.AuxInt = 0 + v.AuxInt = int32ToAuxInt(0) return true } // match: (Lsh32x64 _ (Const64 [c])) @@ -11230,12 +11230,12 @@ func rewriteValuegeneric_OpLsh32x64(v *Value) bool { if v_1.Op != OpConst64 { break } - c := v_1.AuxInt + c := auxIntToInt64(v_1.AuxInt) if !(uint64(c) >= 32) { break } v.reset(OpConst32) - v.AuxInt = 0 + v.AuxInt = int32ToAuxInt(0) return true } // match: (Lsh32x64 (Lsh32x64 x (Const64 [c])) (Const64 [d])) @@ -11252,17 +11252,17 @@ func rewriteValuegeneric_OpLsh32x64(v *Value) bool { if v_0_1.Op != OpConst64 { break } - c := v_0_1.AuxInt + c := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - d := v_1.AuxInt + d := auxIntToInt64(v_1.AuxInt) if !(!uaddOvf(c, d)) { break } v.reset(OpLsh32x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = c + d + v0.AuxInt = int64ToAuxInt(c + d) v.AddArg2(x, v0) return true } @@ -11284,22 +11284,22 @@ func rewriteValuegeneric_OpLsh32x64(v *Value) bool { if v_0_0_1.Op != OpConst64 { break } - c1 := v_0_0_1.AuxInt + c1 := auxIntToInt64(v_0_0_1.AuxInt) v_0_1 := v_0.Args[1] if v_0_1.Op != OpConst64 { break } - c2 := v_0_1.AuxInt + c2 := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - c3 := v_1.AuxInt + c3 := auxIntToInt64(v_1.AuxInt) if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { break } v.reset(OpLsh32x64) v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) - v0.AuxInt = c1 - c2 + c3 + v0.AuxInt = int64ToAuxInt(c1 - c2 + c3) v.AddArg2(x, v0) return true } @@ -11317,21 +11317,21 @@ func rewriteValuegeneric_OpLsh32x8(v *Value) bool { if v_1.Op != OpConst8 { break } - c := v_1.AuxInt + c := auxIntToInt8(v_1.AuxInt) v.reset(OpLsh32x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint8(c)) + v0.AuxInt = int64ToAuxInt(int64(uint8(c))) v.AddArg2(x, v0) return true } // match: (Lsh32x8 (Const32 [0]) _) // result: (Const32 [0]) for { - if v_0.Op != OpConst32 || v_0.AuxInt != 0 { + if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 { break } v.reset(OpConst32) - v.AuxInt = 0 + v.AuxInt = int32ToAuxInt(0) return true } return false @@ -11348,21 +11348,21 @@ func rewriteValuegeneric_OpLsh64x16(v *Value) bool { if v_1.Op != OpConst16 { break } - c := v_1.AuxInt + c := auxIntToInt16(v_1.AuxInt) v.reset(OpLsh64x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint16(c)) + v0.AuxInt = int64ToAuxInt(int64(uint16(c))) v.AddArg2(x, v0) return true } // match: (Lsh64x16 (Const64 [0]) _) // result: (Const64 [0]) for { - if v_0.Op != OpConst64 || v_0.AuxInt != 0 { + if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 { break } v.reset(OpConst64) - v.AuxInt = 0 + v.AuxInt = int64ToAuxInt(0) return true } return false @@ -11379,21 +11379,21 @@ func rewriteValuegeneric_OpLsh64x32(v *Value) bool { if v_1.Op != OpConst32 { break } - c := v_1.AuxInt + c := auxIntToInt32(v_1.AuxInt) v.reset(OpLsh64x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint32(c)) + v0.AuxInt = int64ToAuxInt(int64(uint32(c))) v.AddArg2(x, v0) return true } // match: (Lsh64x32 (Const64 [0]) _) // result: (Const64 [0]) for { - if v_0.Op != OpConst64 || v_0.AuxInt != 0 { + if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 { break } v.reset(OpConst64) - v.AuxInt = 0 + v.AuxInt = int64ToAuxInt(0) return true } return false @@ -11422,7 +11422,7 @@ func rewriteValuegeneric_OpLsh64x64(v *Value) bool { // result: x for { x := v_0 - if v_1.Op != OpConst64 || v_1.AuxInt != 0 { + if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 { break } v.copyOf(x) @@ -11431,11 +11431,11 @@ func rewriteValuegeneric_OpLsh64x64(v *Value) bool { // match: (Lsh64x64 (Const64 [0]) _) // result: (Const64 [0]) for { - if v_0.Op != OpConst64 || v_0.AuxInt != 0 { + if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 { break } v.reset(OpConst64) - v.AuxInt = 0 + v.AuxInt = int64ToAuxInt(0) return true } // match: (Lsh64x64 _ (Const64 [c])) @@ -11445,12 +11445,12 @@ func rewriteValuegeneric_OpLsh64x64(v *Value) bool { if v_1.Op != OpConst64 { break } - c := v_1.AuxInt + c := auxIntToInt64(v_1.AuxInt) if !(uint64(c) >= 64) { break } v.reset(OpConst64) - v.AuxInt = 0 + v.AuxInt = int64ToAuxInt(0) return true } // match: (Lsh64x64 (Lsh64x64 x (Const64 [c])) (Const64 [d])) @@ -11467,17 +11467,17 @@ func rewriteValuegeneric_OpLsh64x64(v *Value) bool { if v_0_1.Op != OpConst64 { break } - c := v_0_1.AuxInt + c := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - d := v_1.AuxInt + d := auxIntToInt64(v_1.AuxInt) if !(!uaddOvf(c, d)) { break } v.reset(OpLsh64x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = c + d + v0.AuxInt = int64ToAuxInt(c + d) v.AddArg2(x, v0) return true } @@ -11499,22 +11499,22 @@ func rewriteValuegeneric_OpLsh64x64(v *Value) bool { if v_0_0_1.Op != OpConst64 { break } - c1 := v_0_0_1.AuxInt + c1 := auxIntToInt64(v_0_0_1.AuxInt) v_0_1 := v_0.Args[1] if v_0_1.Op != OpConst64 { break } - c2 := v_0_1.AuxInt + c2 := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - c3 := v_1.AuxInt + c3 := auxIntToInt64(v_1.AuxInt) if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { break } v.reset(OpLsh64x64) v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) - v0.AuxInt = c1 - c2 + c3 + v0.AuxInt = int64ToAuxInt(c1 - c2 + c3) v.AddArg2(x, v0) return true } @@ -11532,21 +11532,21 @@ func rewriteValuegeneric_OpLsh64x8(v *Value) bool { if v_1.Op != OpConst8 { break } - c := v_1.AuxInt + c := auxIntToInt8(v_1.AuxInt) v.reset(OpLsh64x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint8(c)) + v0.AuxInt = int64ToAuxInt(int64(uint8(c))) v.AddArg2(x, v0) return true } // match: (Lsh64x8 (Const64 [0]) _) // result: (Const64 [0]) for { - if v_0.Op != OpConst64 || v_0.AuxInt != 0 { + if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 { break } v.reset(OpConst64) - v.AuxInt = 0 + v.AuxInt = int64ToAuxInt(0) return true } return false @@ -11563,21 +11563,21 @@ func rewriteValuegeneric_OpLsh8x16(v *Value) bool { if v_1.Op != OpConst16 { break } - c := v_1.AuxInt + c := auxIntToInt16(v_1.AuxInt) v.reset(OpLsh8x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint16(c)) + v0.AuxInt = int64ToAuxInt(int64(uint16(c))) v.AddArg2(x, v0) return true } // match: (Lsh8x16 (Const8 [0]) _) // result: (Const8 [0]) for { - if v_0.Op != OpConst8 || v_0.AuxInt != 0 { + if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 { break } v.reset(OpConst8) - v.AuxInt = 0 + v.AuxInt = int8ToAuxInt(0) return true } return false @@ -11594,21 +11594,21 @@ func rewriteValuegeneric_OpLsh8x32(v *Value) bool { if v_1.Op != OpConst32 { break } - c := v_1.AuxInt + c := auxIntToInt32(v_1.AuxInt) v.reset(OpLsh8x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint32(c)) + v0.AuxInt = int64ToAuxInt(int64(uint32(c))) v.AddArg2(x, v0) return true } // match: (Lsh8x32 (Const8 [0]) _) // result: (Const8 [0]) for { - if v_0.Op != OpConst8 || v_0.AuxInt != 0 { + if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 { break } v.reset(OpConst8) - v.AuxInt = 0 + v.AuxInt = int8ToAuxInt(0) return true } return false @@ -11637,7 +11637,7 @@ func rewriteValuegeneric_OpLsh8x64(v *Value) bool { // result: x for { x := v_0 - if v_1.Op != OpConst64 || v_1.AuxInt != 0 { + if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 { break } v.copyOf(x) @@ -11646,11 +11646,11 @@ func rewriteValuegeneric_OpLsh8x64(v *Value) bool { // match: (Lsh8x64 (Const8 [0]) _) // result: (Const8 [0]) for { - if v_0.Op != OpConst8 || v_0.AuxInt != 0 { + if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 { break } v.reset(OpConst8) - v.AuxInt = 0 + v.AuxInt = int8ToAuxInt(0) return true } // match: (Lsh8x64 _ (Const64 [c])) @@ -11660,12 +11660,12 @@ func rewriteValuegeneric_OpLsh8x64(v *Value) bool { if v_1.Op != OpConst64 { break } - c := v_1.AuxInt + c := auxIntToInt64(v_1.AuxInt) if !(uint64(c) >= 8) { break } v.reset(OpConst8) - v.AuxInt = 0 + v.AuxInt = int8ToAuxInt(0) return true } // match: (Lsh8x64 (Lsh8x64 x (Const64 [c])) (Const64 [d])) @@ -11682,17 +11682,17 @@ func rewriteValuegeneric_OpLsh8x64(v *Value) bool { if v_0_1.Op != OpConst64 { break } - c := v_0_1.AuxInt + c := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - d := v_1.AuxInt + d := auxIntToInt64(v_1.AuxInt) if !(!uaddOvf(c, d)) { break } v.reset(OpLsh8x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = c + d + v0.AuxInt = int64ToAuxInt(c + d) v.AddArg2(x, v0) return true } @@ -11714,22 +11714,22 @@ func rewriteValuegeneric_OpLsh8x64(v *Value) bool { if v_0_0_1.Op != OpConst64 { break } - c1 := v_0_0_1.AuxInt + c1 := auxIntToInt64(v_0_0_1.AuxInt) v_0_1 := v_0.Args[1] if v_0_1.Op != OpConst64 { break } - c2 := v_0_1.AuxInt + c2 := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - c3 := v_1.AuxInt + c3 := auxIntToInt64(v_1.AuxInt) if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { break } v.reset(OpLsh8x64) v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) - v0.AuxInt = c1 - c2 + c3 + v0.AuxInt = int64ToAuxInt(c1 - c2 + c3) v.AddArg2(x, v0) return true } @@ -11747,21 +11747,21 @@ func rewriteValuegeneric_OpLsh8x8(v *Value) bool { if v_1.Op != OpConst8 { break } - c := v_1.AuxInt + c := auxIntToInt8(v_1.AuxInt) v.reset(OpLsh8x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint8(c)) + v0.AuxInt = int64ToAuxInt(int64(uint8(c))) v.AddArg2(x, v0) return true } // match: (Lsh8x8 (Const8 [0]) _) // result: (Const8 [0]) for { - if v_0.Op != OpConst8 || v_0.AuxInt != 0 { + if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 { break } v.reset(OpConst8) - v.AuxInt = 0 + v.AuxInt = int8ToAuxInt(0) return true } return false @@ -18687,7 +18687,7 @@ func rewriteValuegeneric_OpRotateLeft16(v *Value) bool { if v_1.Op != OpConst16 { break } - c := v_1.AuxInt + c := auxIntToInt16(v_1.AuxInt) if !(c%16 == 0) { break } @@ -18707,7 +18707,7 @@ func rewriteValuegeneric_OpRotateLeft32(v *Value) bool { if v_1.Op != OpConst32 { break } - c := v_1.AuxInt + c := auxIntToInt32(v_1.AuxInt) if !(c%32 == 0) { break } @@ -18727,7 +18727,7 @@ func rewriteValuegeneric_OpRotateLeft64(v *Value) bool { if v_1.Op != OpConst64 { break } - c := v_1.AuxInt + c := auxIntToInt64(v_1.AuxInt) if !(c%64 == 0) { break } @@ -18747,7 +18747,7 @@ func rewriteValuegeneric_OpRotateLeft8(v *Value) bool { if v_1.Op != OpConst8 { break } - c := v_1.AuxInt + c := auxIntToInt8(v_1.AuxInt) if !(c%8 == 0) { break } @@ -18796,21 +18796,21 @@ func rewriteValuegeneric_OpRsh16Ux16(v *Value) bool { if v_1.Op != OpConst16 { break } - c := v_1.AuxInt + c := auxIntToInt16(v_1.AuxInt) v.reset(OpRsh16Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint16(c)) + v0.AuxInt = int64ToAuxInt(int64(uint16(c))) v.AddArg2(x, v0) return true } // match: (Rsh16Ux16 (Const16 [0]) _) // result: (Const16 [0]) for { - if v_0.Op != OpConst16 || v_0.AuxInt != 0 { + if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 { break } v.reset(OpConst16) - v.AuxInt = 0 + v.AuxInt = int16ToAuxInt(0) return true } return false @@ -18827,21 +18827,21 @@ func rewriteValuegeneric_OpRsh16Ux32(v *Value) bool { if v_1.Op != OpConst32 { break } - c := v_1.AuxInt + c := auxIntToInt32(v_1.AuxInt) v.reset(OpRsh16Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint32(c)) + v0.AuxInt = int64ToAuxInt(int64(uint32(c))) v.AddArg2(x, v0) return true } // match: (Rsh16Ux32 (Const16 [0]) _) // result: (Const16 [0]) for { - if v_0.Op != OpConst16 || v_0.AuxInt != 0 { + if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 { break } v.reset(OpConst16) - v.AuxInt = 0 + v.AuxInt = int16ToAuxInt(0) return true } return false @@ -18870,7 +18870,7 @@ func rewriteValuegeneric_OpRsh16Ux64(v *Value) bool { // result: x for { x := v_0 - if v_1.Op != OpConst64 || v_1.AuxInt != 0 { + if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 { break } v.copyOf(x) @@ -18879,11 +18879,11 @@ func rewriteValuegeneric_OpRsh16Ux64(v *Value) bool { // match: (Rsh16Ux64 (Const16 [0]) _) // result: (Const16 [0]) for { - if v_0.Op != OpConst16 || v_0.AuxInt != 0 { + if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 { break } v.reset(OpConst16) - v.AuxInt = 0 + v.AuxInt = int16ToAuxInt(0) return true } // match: (Rsh16Ux64 _ (Const64 [c])) @@ -18893,12 +18893,12 @@ func rewriteValuegeneric_OpRsh16Ux64(v *Value) bool { if v_1.Op != OpConst64 { break } - c := v_1.AuxInt + c := auxIntToInt64(v_1.AuxInt) if !(uint64(c) >= 16) { break } v.reset(OpConst16) - v.AuxInt = 0 + v.AuxInt = int16ToAuxInt(0) return true } // match: (Rsh16Ux64 (Rsh16Ux64 x (Const64 [c])) (Const64 [d])) @@ -18915,17 +18915,17 @@ func rewriteValuegeneric_OpRsh16Ux64(v *Value) bool { if v_0_1.Op != OpConst64 { break } - c := v_0_1.AuxInt + c := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - d := v_1.AuxInt + d := auxIntToInt64(v_1.AuxInt) if !(!uaddOvf(c, d)) { break } v.reset(OpRsh16Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = c + d + v0.AuxInt = int64ToAuxInt(c + d) v.AddArg2(x, v0) return true } @@ -18940,12 +18940,12 @@ func rewriteValuegeneric_OpRsh16Ux64(v *Value) bool { break } t := v_1.Type - if v_1.AuxInt != 15 { + if auxIntToInt64(v_1.AuxInt) != 15 { break } v.reset(OpRsh16Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = 15 + v0.AuxInt = int64ToAuxInt(15) v.AddArg2(x, v0) return true } @@ -18967,22 +18967,22 @@ func rewriteValuegeneric_OpRsh16Ux64(v *Value) bool { if v_0_0_1.Op != OpConst64 { break } - c1 := v_0_0_1.AuxInt + c1 := auxIntToInt64(v_0_0_1.AuxInt) v_0_1 := v_0.Args[1] if v_0_1.Op != OpConst64 { break } - c2 := v_0_1.AuxInt + c2 := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - c3 := v_1.AuxInt + c3 := auxIntToInt64(v_1.AuxInt) if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { break } v.reset(OpRsh16Ux64) v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) - v0.AuxInt = c1 - c2 + c3 + v0.AuxInt = int64ToAuxInt(c1 - c2 + c3) v.AddArg2(x, v0) return true } @@ -18995,7 +18995,7 @@ func rewriteValuegeneric_OpRsh16Ux64(v *Value) bool { _ = v_0.Args[1] x := v_0.Args[0] v_0_1 := v_0.Args[1] - if v_0_1.Op != OpConst64 || v_0_1.AuxInt != 8 || v_1.Op != OpConst64 || v_1.AuxInt != 8 { + if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 8 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 8 { break } v.reset(OpZeroExt8to16) @@ -19018,21 +19018,21 @@ func rewriteValuegeneric_OpRsh16Ux8(v *Value) bool { if v_1.Op != OpConst8 { break } - c := v_1.AuxInt + c := auxIntToInt8(v_1.AuxInt) v.reset(OpRsh16Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint8(c)) + v0.AuxInt = int64ToAuxInt(int64(uint8(c))) v.AddArg2(x, v0) return true } // match: (Rsh16Ux8 (Const16 [0]) _) // result: (Const16 [0]) for { - if v_0.Op != OpConst16 || v_0.AuxInt != 0 { + if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 { break } v.reset(OpConst16) - v.AuxInt = 0 + v.AuxInt = int16ToAuxInt(0) return true } return false @@ -19049,21 +19049,21 @@ func rewriteValuegeneric_OpRsh16x16(v *Value) bool { if v_1.Op != OpConst16 { break } - c := v_1.AuxInt + c := auxIntToInt16(v_1.AuxInt) v.reset(OpRsh16x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint16(c)) + v0.AuxInt = int64ToAuxInt(int64(uint16(c))) v.AddArg2(x, v0) return true } // match: (Rsh16x16 (Const16 [0]) _) // result: (Const16 [0]) for { - if v_0.Op != OpConst16 || v_0.AuxInt != 0 { + if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 { break } v.reset(OpConst16) - v.AuxInt = 0 + v.AuxInt = int16ToAuxInt(0) return true } return false @@ -19080,21 +19080,21 @@ func rewriteValuegeneric_OpRsh16x32(v *Value) bool { if v_1.Op != OpConst32 { break } - c := v_1.AuxInt + c := auxIntToInt32(v_1.AuxInt) v.reset(OpRsh16x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint32(c)) + v0.AuxInt = int64ToAuxInt(int64(uint32(c))) v.AddArg2(x, v0) return true } // match: (Rsh16x32 (Const16 [0]) _) // result: (Const16 [0]) for { - if v_0.Op != OpConst16 || v_0.AuxInt != 0 { + if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 { break } v.reset(OpConst16) - v.AuxInt = 0 + v.AuxInt = int16ToAuxInt(0) return true } return false @@ -19123,7 +19123,7 @@ func rewriteValuegeneric_OpRsh16x64(v *Value) bool { // result: x for { x := v_0 - if v_1.Op != OpConst64 || v_1.AuxInt != 0 { + if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 { break } v.copyOf(x) @@ -19132,11 +19132,11 @@ func rewriteValuegeneric_OpRsh16x64(v *Value) bool { // match: (Rsh16x64 (Const16 [0]) _) // result: (Const16 [0]) for { - if v_0.Op != OpConst16 || v_0.AuxInt != 0 { + if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 { break } v.reset(OpConst16) - v.AuxInt = 0 + v.AuxInt = int16ToAuxInt(0) return true } // match: (Rsh16x64 (Rsh16x64 x (Const64 [c])) (Const64 [d])) @@ -19153,17 +19153,17 @@ func rewriteValuegeneric_OpRsh16x64(v *Value) bool { if v_0_1.Op != OpConst64 { break } - c := v_0_1.AuxInt + c := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - d := v_1.AuxInt + d := auxIntToInt64(v_1.AuxInt) if !(!uaddOvf(c, d)) { break } v.reset(OpRsh16x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = c + d + v0.AuxInt = int64ToAuxInt(c + d) v.AddArg2(x, v0) return true } @@ -19176,7 +19176,7 @@ func rewriteValuegeneric_OpRsh16x64(v *Value) bool { _ = v_0.Args[1] x := v_0.Args[0] v_0_1 := v_0.Args[1] - if v_0_1.Op != OpConst64 || v_0_1.AuxInt != 8 || v_1.Op != OpConst64 || v_1.AuxInt != 8 { + if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 8 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 8 { break } v.reset(OpSignExt8to16) @@ -19199,21 +19199,21 @@ func rewriteValuegeneric_OpRsh16x8(v *Value) bool { if v_1.Op != OpConst8 { break } - c := v_1.AuxInt + c := auxIntToInt8(v_1.AuxInt) v.reset(OpRsh16x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint8(c)) + v0.AuxInt = int64ToAuxInt(int64(uint8(c))) v.AddArg2(x, v0) return true } // match: (Rsh16x8 (Const16 [0]) _) // result: (Const16 [0]) for { - if v_0.Op != OpConst16 || v_0.AuxInt != 0 { + if v_0.Op != OpConst16 || auxIntToInt16(v_0.AuxInt) != 0 { break } v.reset(OpConst16) - v.AuxInt = 0 + v.AuxInt = int16ToAuxInt(0) return true } return false @@ -19230,21 +19230,21 @@ func rewriteValuegeneric_OpRsh32Ux16(v *Value) bool { if v_1.Op != OpConst16 { break } - c := v_1.AuxInt + c := auxIntToInt16(v_1.AuxInt) v.reset(OpRsh32Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint16(c)) + v0.AuxInt = int64ToAuxInt(int64(uint16(c))) v.AddArg2(x, v0) return true } // match: (Rsh32Ux16 (Const32 [0]) _) // result: (Const32 [0]) for { - if v_0.Op != OpConst32 || v_0.AuxInt != 0 { + if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 { break } v.reset(OpConst32) - v.AuxInt = 0 + v.AuxInt = int32ToAuxInt(0) return true } return false @@ -19261,21 +19261,21 @@ func rewriteValuegeneric_OpRsh32Ux32(v *Value) bool { if v_1.Op != OpConst32 { break } - c := v_1.AuxInt + c := auxIntToInt32(v_1.AuxInt) v.reset(OpRsh32Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint32(c)) + v0.AuxInt = int64ToAuxInt(int64(uint32(c))) v.AddArg2(x, v0) return true } // match: (Rsh32Ux32 (Const32 [0]) _) // result: (Const32 [0]) for { - if v_0.Op != OpConst32 || v_0.AuxInt != 0 { + if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 { break } v.reset(OpConst32) - v.AuxInt = 0 + v.AuxInt = int32ToAuxInt(0) return true } return false @@ -19304,7 +19304,7 @@ func rewriteValuegeneric_OpRsh32Ux64(v *Value) bool { // result: x for { x := v_0 - if v_1.Op != OpConst64 || v_1.AuxInt != 0 { + if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 { break } v.copyOf(x) @@ -19313,11 +19313,11 @@ func rewriteValuegeneric_OpRsh32Ux64(v *Value) bool { // match: (Rsh32Ux64 (Const32 [0]) _) // result: (Const32 [0]) for { - if v_0.Op != OpConst32 || v_0.AuxInt != 0 { + if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 { break } v.reset(OpConst32) - v.AuxInt = 0 + v.AuxInt = int32ToAuxInt(0) return true } // match: (Rsh32Ux64 _ (Const64 [c])) @@ -19327,12 +19327,12 @@ func rewriteValuegeneric_OpRsh32Ux64(v *Value) bool { if v_1.Op != OpConst64 { break } - c := v_1.AuxInt + c := auxIntToInt64(v_1.AuxInt) if !(uint64(c) >= 32) { break } v.reset(OpConst32) - v.AuxInt = 0 + v.AuxInt = int32ToAuxInt(0) return true } // match: (Rsh32Ux64 (Rsh32Ux64 x (Const64 [c])) (Const64 [d])) @@ -19349,17 +19349,17 @@ func rewriteValuegeneric_OpRsh32Ux64(v *Value) bool { if v_0_1.Op != OpConst64 { break } - c := v_0_1.AuxInt + c := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - d := v_1.AuxInt + d := auxIntToInt64(v_1.AuxInt) if !(!uaddOvf(c, d)) { break } v.reset(OpRsh32Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = c + d + v0.AuxInt = int64ToAuxInt(c + d) v.AddArg2(x, v0) return true } @@ -19374,12 +19374,12 @@ func rewriteValuegeneric_OpRsh32Ux64(v *Value) bool { break } t := v_1.Type - if v_1.AuxInt != 31 { + if auxIntToInt64(v_1.AuxInt) != 31 { break } v.reset(OpRsh32Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = 31 + v0.AuxInt = int64ToAuxInt(31) v.AddArg2(x, v0) return true } @@ -19401,22 +19401,22 @@ func rewriteValuegeneric_OpRsh32Ux64(v *Value) bool { if v_0_0_1.Op != OpConst64 { break } - c1 := v_0_0_1.AuxInt + c1 := auxIntToInt64(v_0_0_1.AuxInt) v_0_1 := v_0.Args[1] if v_0_1.Op != OpConst64 { break } - c2 := v_0_1.AuxInt + c2 := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - c3 := v_1.AuxInt + c3 := auxIntToInt64(v_1.AuxInt) if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { break } v.reset(OpRsh32Ux64) v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) - v0.AuxInt = c1 - c2 + c3 + v0.AuxInt = int64ToAuxInt(c1 - c2 + c3) v.AddArg2(x, v0) return true } @@ -19429,7 +19429,7 @@ func rewriteValuegeneric_OpRsh32Ux64(v *Value) bool { _ = v_0.Args[1] x := v_0.Args[0] v_0_1 := v_0.Args[1] - if v_0_1.Op != OpConst64 || v_0_1.AuxInt != 24 || v_1.Op != OpConst64 || v_1.AuxInt != 24 { + if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 24 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 24 { break } v.reset(OpZeroExt8to32) @@ -19447,7 +19447,7 @@ func rewriteValuegeneric_OpRsh32Ux64(v *Value) bool { _ = v_0.Args[1] x := v_0.Args[0] v_0_1 := v_0.Args[1] - if v_0_1.Op != OpConst64 || v_0_1.AuxInt != 16 || v_1.Op != OpConst64 || v_1.AuxInt != 16 { + if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 16 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 16 { break } v.reset(OpZeroExt16to32) @@ -19470,21 +19470,21 @@ func rewriteValuegeneric_OpRsh32Ux8(v *Value) bool { if v_1.Op != OpConst8 { break } - c := v_1.AuxInt + c := auxIntToInt8(v_1.AuxInt) v.reset(OpRsh32Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint8(c)) + v0.AuxInt = int64ToAuxInt(int64(uint8(c))) v.AddArg2(x, v0) return true } // match: (Rsh32Ux8 (Const32 [0]) _) // result: (Const32 [0]) for { - if v_0.Op != OpConst32 || v_0.AuxInt != 0 { + if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 { break } v.reset(OpConst32) - v.AuxInt = 0 + v.AuxInt = int32ToAuxInt(0) return true } return false @@ -19501,21 +19501,21 @@ func rewriteValuegeneric_OpRsh32x16(v *Value) bool { if v_1.Op != OpConst16 { break } - c := v_1.AuxInt + c := auxIntToInt16(v_1.AuxInt) v.reset(OpRsh32x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint16(c)) + v0.AuxInt = int64ToAuxInt(int64(uint16(c))) v.AddArg2(x, v0) return true } // match: (Rsh32x16 (Const32 [0]) _) // result: (Const32 [0]) for { - if v_0.Op != OpConst32 || v_0.AuxInt != 0 { + if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 { break } v.reset(OpConst32) - v.AuxInt = 0 + v.AuxInt = int32ToAuxInt(0) return true } return false @@ -19532,21 +19532,21 @@ func rewriteValuegeneric_OpRsh32x32(v *Value) bool { if v_1.Op != OpConst32 { break } - c := v_1.AuxInt + c := auxIntToInt32(v_1.AuxInt) v.reset(OpRsh32x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint32(c)) + v0.AuxInt = int64ToAuxInt(int64(uint32(c))) v.AddArg2(x, v0) return true } // match: (Rsh32x32 (Const32 [0]) _) // result: (Const32 [0]) for { - if v_0.Op != OpConst32 || v_0.AuxInt != 0 { + if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 { break } v.reset(OpConst32) - v.AuxInt = 0 + v.AuxInt = int32ToAuxInt(0) return true } return false @@ -19575,7 +19575,7 @@ func rewriteValuegeneric_OpRsh32x64(v *Value) bool { // result: x for { x := v_0 - if v_1.Op != OpConst64 || v_1.AuxInt != 0 { + if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 { break } v.copyOf(x) @@ -19584,11 +19584,11 @@ func rewriteValuegeneric_OpRsh32x64(v *Value) bool { // match: (Rsh32x64 (Const32 [0]) _) // result: (Const32 [0]) for { - if v_0.Op != OpConst32 || v_0.AuxInt != 0 { + if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 { break } v.reset(OpConst32) - v.AuxInt = 0 + v.AuxInt = int32ToAuxInt(0) return true } // match: (Rsh32x64 (Rsh32x64 x (Const64 [c])) (Const64 [d])) @@ -19605,17 +19605,17 @@ func rewriteValuegeneric_OpRsh32x64(v *Value) bool { if v_0_1.Op != OpConst64 { break } - c := v_0_1.AuxInt + c := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - d := v_1.AuxInt + d := auxIntToInt64(v_1.AuxInt) if !(!uaddOvf(c, d)) { break } v.reset(OpRsh32x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = c + d + v0.AuxInt = int64ToAuxInt(c + d) v.AddArg2(x, v0) return true } @@ -19628,7 +19628,7 @@ func rewriteValuegeneric_OpRsh32x64(v *Value) bool { _ = v_0.Args[1] x := v_0.Args[0] v_0_1 := v_0.Args[1] - if v_0_1.Op != OpConst64 || v_0_1.AuxInt != 24 || v_1.Op != OpConst64 || v_1.AuxInt != 24 { + if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 24 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 24 { break } v.reset(OpSignExt8to32) @@ -19646,7 +19646,7 @@ func rewriteValuegeneric_OpRsh32x64(v *Value) bool { _ = v_0.Args[1] x := v_0.Args[0] v_0_1 := v_0.Args[1] - if v_0_1.Op != OpConst64 || v_0_1.AuxInt != 16 || v_1.Op != OpConst64 || v_1.AuxInt != 16 { + if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 16 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 16 { break } v.reset(OpSignExt16to32) @@ -19669,21 +19669,21 @@ func rewriteValuegeneric_OpRsh32x8(v *Value) bool { if v_1.Op != OpConst8 { break } - c := v_1.AuxInt + c := auxIntToInt8(v_1.AuxInt) v.reset(OpRsh32x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint8(c)) + v0.AuxInt = int64ToAuxInt(int64(uint8(c))) v.AddArg2(x, v0) return true } // match: (Rsh32x8 (Const32 [0]) _) // result: (Const32 [0]) for { - if v_0.Op != OpConst32 || v_0.AuxInt != 0 { + if v_0.Op != OpConst32 || auxIntToInt32(v_0.AuxInt) != 0 { break } v.reset(OpConst32) - v.AuxInt = 0 + v.AuxInt = int32ToAuxInt(0) return true } return false @@ -19700,21 +19700,21 @@ func rewriteValuegeneric_OpRsh64Ux16(v *Value) bool { if v_1.Op != OpConst16 { break } - c := v_1.AuxInt + c := auxIntToInt16(v_1.AuxInt) v.reset(OpRsh64Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint16(c)) + v0.AuxInt = int64ToAuxInt(int64(uint16(c))) v.AddArg2(x, v0) return true } // match: (Rsh64Ux16 (Const64 [0]) _) // result: (Const64 [0]) for { - if v_0.Op != OpConst64 || v_0.AuxInt != 0 { + if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 { break } v.reset(OpConst64) - v.AuxInt = 0 + v.AuxInt = int64ToAuxInt(0) return true } return false @@ -19731,21 +19731,21 @@ func rewriteValuegeneric_OpRsh64Ux32(v *Value) bool { if v_1.Op != OpConst32 { break } - c := v_1.AuxInt + c := auxIntToInt32(v_1.AuxInt) v.reset(OpRsh64Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint32(c)) + v0.AuxInt = int64ToAuxInt(int64(uint32(c))) v.AddArg2(x, v0) return true } // match: (Rsh64Ux32 (Const64 [0]) _) // result: (Const64 [0]) for { - if v_0.Op != OpConst64 || v_0.AuxInt != 0 { + if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 { break } v.reset(OpConst64) - v.AuxInt = 0 + v.AuxInt = int64ToAuxInt(0) return true } return false @@ -19774,7 +19774,7 @@ func rewriteValuegeneric_OpRsh64Ux64(v *Value) bool { // result: x for { x := v_0 - if v_1.Op != OpConst64 || v_1.AuxInt != 0 { + if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 { break } v.copyOf(x) @@ -19783,11 +19783,11 @@ func rewriteValuegeneric_OpRsh64Ux64(v *Value) bool { // match: (Rsh64Ux64 (Const64 [0]) _) // result: (Const64 [0]) for { - if v_0.Op != OpConst64 || v_0.AuxInt != 0 { + if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 { break } v.reset(OpConst64) - v.AuxInt = 0 + v.AuxInt = int64ToAuxInt(0) return true } // match: (Rsh64Ux64 _ (Const64 [c])) @@ -19797,12 +19797,12 @@ func rewriteValuegeneric_OpRsh64Ux64(v *Value) bool { if v_1.Op != OpConst64 { break } - c := v_1.AuxInt + c := auxIntToInt64(v_1.AuxInt) if !(uint64(c) >= 64) { break } v.reset(OpConst64) - v.AuxInt = 0 + v.AuxInt = int64ToAuxInt(0) return true } // match: (Rsh64Ux64 (Rsh64Ux64 x (Const64 [c])) (Const64 [d])) @@ -19819,17 +19819,17 @@ func rewriteValuegeneric_OpRsh64Ux64(v *Value) bool { if v_0_1.Op != OpConst64 { break } - c := v_0_1.AuxInt + c := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - d := v_1.AuxInt + d := auxIntToInt64(v_1.AuxInt) if !(!uaddOvf(c, d)) { break } v.reset(OpRsh64Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = c + d + v0.AuxInt = int64ToAuxInt(c + d) v.AddArg2(x, v0) return true } @@ -19844,12 +19844,12 @@ func rewriteValuegeneric_OpRsh64Ux64(v *Value) bool { break } t := v_1.Type - if v_1.AuxInt != 63 { + if auxIntToInt64(v_1.AuxInt) != 63 { break } v.reset(OpRsh64Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = 63 + v0.AuxInt = int64ToAuxInt(63) v.AddArg2(x, v0) return true } @@ -19871,22 +19871,22 @@ func rewriteValuegeneric_OpRsh64Ux64(v *Value) bool { if v_0_0_1.Op != OpConst64 { break } - c1 := v_0_0_1.AuxInt + c1 := auxIntToInt64(v_0_0_1.AuxInt) v_0_1 := v_0.Args[1] if v_0_1.Op != OpConst64 { break } - c2 := v_0_1.AuxInt + c2 := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - c3 := v_1.AuxInt + c3 := auxIntToInt64(v_1.AuxInt) if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { break } v.reset(OpRsh64Ux64) v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) - v0.AuxInt = c1 - c2 + c3 + v0.AuxInt = int64ToAuxInt(c1 - c2 + c3) v.AddArg2(x, v0) return true } @@ -19899,7 +19899,7 @@ func rewriteValuegeneric_OpRsh64Ux64(v *Value) bool { _ = v_0.Args[1] x := v_0.Args[0] v_0_1 := v_0.Args[1] - if v_0_1.Op != OpConst64 || v_0_1.AuxInt != 56 || v_1.Op != OpConst64 || v_1.AuxInt != 56 { + if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 56 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 56 { break } v.reset(OpZeroExt8to64) @@ -19917,7 +19917,7 @@ func rewriteValuegeneric_OpRsh64Ux64(v *Value) bool { _ = v_0.Args[1] x := v_0.Args[0] v_0_1 := v_0.Args[1] - if v_0_1.Op != OpConst64 || v_0_1.AuxInt != 48 || v_1.Op != OpConst64 || v_1.AuxInt != 48 { + if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 48 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 48 { break } v.reset(OpZeroExt16to64) @@ -19935,7 +19935,7 @@ func rewriteValuegeneric_OpRsh64Ux64(v *Value) bool { _ = v_0.Args[1] x := v_0.Args[0] v_0_1 := v_0.Args[1] - if v_0_1.Op != OpConst64 || v_0_1.AuxInt != 32 || v_1.Op != OpConst64 || v_1.AuxInt != 32 { + if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 32 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 32 { break } v.reset(OpZeroExt32to64) @@ -19958,21 +19958,21 @@ func rewriteValuegeneric_OpRsh64Ux8(v *Value) bool { if v_1.Op != OpConst8 { break } - c := v_1.AuxInt + c := auxIntToInt8(v_1.AuxInt) v.reset(OpRsh64Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint8(c)) + v0.AuxInt = int64ToAuxInt(int64(uint8(c))) v.AddArg2(x, v0) return true } // match: (Rsh64Ux8 (Const64 [0]) _) // result: (Const64 [0]) for { - if v_0.Op != OpConst64 || v_0.AuxInt != 0 { + if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 { break } v.reset(OpConst64) - v.AuxInt = 0 + v.AuxInt = int64ToAuxInt(0) return true } return false @@ -19989,21 +19989,21 @@ func rewriteValuegeneric_OpRsh64x16(v *Value) bool { if v_1.Op != OpConst16 { break } - c := v_1.AuxInt + c := auxIntToInt16(v_1.AuxInt) v.reset(OpRsh64x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint16(c)) + v0.AuxInt = int64ToAuxInt(int64(uint16(c))) v.AddArg2(x, v0) return true } // match: (Rsh64x16 (Const64 [0]) _) // result: (Const64 [0]) for { - if v_0.Op != OpConst64 || v_0.AuxInt != 0 { + if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 { break } v.reset(OpConst64) - v.AuxInt = 0 + v.AuxInt = int64ToAuxInt(0) return true } return false @@ -20020,21 +20020,21 @@ func rewriteValuegeneric_OpRsh64x32(v *Value) bool { if v_1.Op != OpConst32 { break } - c := v_1.AuxInt + c := auxIntToInt32(v_1.AuxInt) v.reset(OpRsh64x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint32(c)) + v0.AuxInt = int64ToAuxInt(int64(uint32(c))) v.AddArg2(x, v0) return true } // match: (Rsh64x32 (Const64 [0]) _) // result: (Const64 [0]) for { - if v_0.Op != OpConst64 || v_0.AuxInt != 0 { + if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 { break } v.reset(OpConst64) - v.AuxInt = 0 + v.AuxInt = int64ToAuxInt(0) return true } return false @@ -20063,7 +20063,7 @@ func rewriteValuegeneric_OpRsh64x64(v *Value) bool { // result: x for { x := v_0 - if v_1.Op != OpConst64 || v_1.AuxInt != 0 { + if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 { break } v.copyOf(x) @@ -20072,11 +20072,11 @@ func rewriteValuegeneric_OpRsh64x64(v *Value) bool { // match: (Rsh64x64 (Const64 [0]) _) // result: (Const64 [0]) for { - if v_0.Op != OpConst64 || v_0.AuxInt != 0 { + if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 { break } v.reset(OpConst64) - v.AuxInt = 0 + v.AuxInt = int64ToAuxInt(0) return true } // match: (Rsh64x64 (Rsh64x64 x (Const64 [c])) (Const64 [d])) @@ -20093,17 +20093,17 @@ func rewriteValuegeneric_OpRsh64x64(v *Value) bool { if v_0_1.Op != OpConst64 { break } - c := v_0_1.AuxInt + c := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - d := v_1.AuxInt + d := auxIntToInt64(v_1.AuxInt) if !(!uaddOvf(c, d)) { break } v.reset(OpRsh64x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = c + d + v0.AuxInt = int64ToAuxInt(c + d) v.AddArg2(x, v0) return true } @@ -20116,7 +20116,7 @@ func rewriteValuegeneric_OpRsh64x64(v *Value) bool { _ = v_0.Args[1] x := v_0.Args[0] v_0_1 := v_0.Args[1] - if v_0_1.Op != OpConst64 || v_0_1.AuxInt != 56 || v_1.Op != OpConst64 || v_1.AuxInt != 56 { + if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 56 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 56 { break } v.reset(OpSignExt8to64) @@ -20134,7 +20134,7 @@ func rewriteValuegeneric_OpRsh64x64(v *Value) bool { _ = v_0.Args[1] x := v_0.Args[0] v_0_1 := v_0.Args[1] - if v_0_1.Op != OpConst64 || v_0_1.AuxInt != 48 || v_1.Op != OpConst64 || v_1.AuxInt != 48 { + if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 48 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 48 { break } v.reset(OpSignExt16to64) @@ -20152,7 +20152,7 @@ func rewriteValuegeneric_OpRsh64x64(v *Value) bool { _ = v_0.Args[1] x := v_0.Args[0] v_0_1 := v_0.Args[1] - if v_0_1.Op != OpConst64 || v_0_1.AuxInt != 32 || v_1.Op != OpConst64 || v_1.AuxInt != 32 { + if v_0_1.Op != OpConst64 || auxIntToInt64(v_0_1.AuxInt) != 32 || v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 32 { break } v.reset(OpSignExt32to64) @@ -20175,21 +20175,21 @@ func rewriteValuegeneric_OpRsh64x8(v *Value) bool { if v_1.Op != OpConst8 { break } - c := v_1.AuxInt + c := auxIntToInt8(v_1.AuxInt) v.reset(OpRsh64x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint8(c)) + v0.AuxInt = int64ToAuxInt(int64(uint8(c))) v.AddArg2(x, v0) return true } // match: (Rsh64x8 (Const64 [0]) _) // result: (Const64 [0]) for { - if v_0.Op != OpConst64 || v_0.AuxInt != 0 { + if v_0.Op != OpConst64 || auxIntToInt64(v_0.AuxInt) != 0 { break } v.reset(OpConst64) - v.AuxInt = 0 + v.AuxInt = int64ToAuxInt(0) return true } return false @@ -20206,21 +20206,21 @@ func rewriteValuegeneric_OpRsh8Ux16(v *Value) bool { if v_1.Op != OpConst16 { break } - c := v_1.AuxInt + c := auxIntToInt16(v_1.AuxInt) v.reset(OpRsh8Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint16(c)) + v0.AuxInt = int64ToAuxInt(int64(uint16(c))) v.AddArg2(x, v0) return true } // match: (Rsh8Ux16 (Const8 [0]) _) // result: (Const8 [0]) for { - if v_0.Op != OpConst8 || v_0.AuxInt != 0 { + if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 { break } v.reset(OpConst8) - v.AuxInt = 0 + v.AuxInt = int8ToAuxInt(0) return true } return false @@ -20237,21 +20237,21 @@ func rewriteValuegeneric_OpRsh8Ux32(v *Value) bool { if v_1.Op != OpConst32 { break } - c := v_1.AuxInt + c := auxIntToInt32(v_1.AuxInt) v.reset(OpRsh8Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint32(c)) + v0.AuxInt = int64ToAuxInt(int64(uint32(c))) v.AddArg2(x, v0) return true } // match: (Rsh8Ux32 (Const8 [0]) _) // result: (Const8 [0]) for { - if v_0.Op != OpConst8 || v_0.AuxInt != 0 { + if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 { break } v.reset(OpConst8) - v.AuxInt = 0 + v.AuxInt = int8ToAuxInt(0) return true } return false @@ -20280,7 +20280,7 @@ func rewriteValuegeneric_OpRsh8Ux64(v *Value) bool { // result: x for { x := v_0 - if v_1.Op != OpConst64 || v_1.AuxInt != 0 { + if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 { break } v.copyOf(x) @@ -20289,11 +20289,11 @@ func rewriteValuegeneric_OpRsh8Ux64(v *Value) bool { // match: (Rsh8Ux64 (Const8 [0]) _) // result: (Const8 [0]) for { - if v_0.Op != OpConst8 || v_0.AuxInt != 0 { + if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 { break } v.reset(OpConst8) - v.AuxInt = 0 + v.AuxInt = int8ToAuxInt(0) return true } // match: (Rsh8Ux64 _ (Const64 [c])) @@ -20303,12 +20303,12 @@ func rewriteValuegeneric_OpRsh8Ux64(v *Value) bool { if v_1.Op != OpConst64 { break } - c := v_1.AuxInt + c := auxIntToInt64(v_1.AuxInt) if !(uint64(c) >= 8) { break } v.reset(OpConst8) - v.AuxInt = 0 + v.AuxInt = int8ToAuxInt(0) return true } // match: (Rsh8Ux64 (Rsh8Ux64 x (Const64 [c])) (Const64 [d])) @@ -20325,17 +20325,17 @@ func rewriteValuegeneric_OpRsh8Ux64(v *Value) bool { if v_0_1.Op != OpConst64 { break } - c := v_0_1.AuxInt + c := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - d := v_1.AuxInt + d := auxIntToInt64(v_1.AuxInt) if !(!uaddOvf(c, d)) { break } v.reset(OpRsh8Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = c + d + v0.AuxInt = int64ToAuxInt(c + d) v.AddArg2(x, v0) return true } @@ -20350,12 +20350,12 @@ func rewriteValuegeneric_OpRsh8Ux64(v *Value) bool { break } t := v_1.Type - if v_1.AuxInt != 7 { + if auxIntToInt64(v_1.AuxInt) != 7 { break } v.reset(OpRsh8Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = 7 + v0.AuxInt = int64ToAuxInt(7) v.AddArg2(x, v0) return true } @@ -20377,22 +20377,22 @@ func rewriteValuegeneric_OpRsh8Ux64(v *Value) bool { if v_0_0_1.Op != OpConst64 { break } - c1 := v_0_0_1.AuxInt + c1 := auxIntToInt64(v_0_0_1.AuxInt) v_0_1 := v_0.Args[1] if v_0_1.Op != OpConst64 { break } - c2 := v_0_1.AuxInt + c2 := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - c3 := v_1.AuxInt + c3 := auxIntToInt64(v_1.AuxInt) if !(uint64(c1) >= uint64(c2) && uint64(c3) >= uint64(c2) && !uaddOvf(c1-c2, c3)) { break } v.reset(OpRsh8Ux64) v0 := b.NewValue0(v.Pos, OpConst64, typ.UInt64) - v0.AuxInt = c1 - c2 + c3 + v0.AuxInt = int64ToAuxInt(c1 - c2 + c3) v.AddArg2(x, v0) return true } @@ -20410,21 +20410,21 @@ func rewriteValuegeneric_OpRsh8Ux8(v *Value) bool { if v_1.Op != OpConst8 { break } - c := v_1.AuxInt + c := auxIntToInt8(v_1.AuxInt) v.reset(OpRsh8Ux64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint8(c)) + v0.AuxInt = int64ToAuxInt(int64(uint8(c))) v.AddArg2(x, v0) return true } // match: (Rsh8Ux8 (Const8 [0]) _) // result: (Const8 [0]) for { - if v_0.Op != OpConst8 || v_0.AuxInt != 0 { + if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 { break } v.reset(OpConst8) - v.AuxInt = 0 + v.AuxInt = int8ToAuxInt(0) return true } return false @@ -20441,21 +20441,21 @@ func rewriteValuegeneric_OpRsh8x16(v *Value) bool { if v_1.Op != OpConst16 { break } - c := v_1.AuxInt + c := auxIntToInt16(v_1.AuxInt) v.reset(OpRsh8x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint16(c)) + v0.AuxInt = int64ToAuxInt(int64(uint16(c))) v.AddArg2(x, v0) return true } // match: (Rsh8x16 (Const8 [0]) _) // result: (Const8 [0]) for { - if v_0.Op != OpConst8 || v_0.AuxInt != 0 { + if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 { break } v.reset(OpConst8) - v.AuxInt = 0 + v.AuxInt = int8ToAuxInt(0) return true } return false @@ -20472,21 +20472,21 @@ func rewriteValuegeneric_OpRsh8x32(v *Value) bool { if v_1.Op != OpConst32 { break } - c := v_1.AuxInt + c := auxIntToInt32(v_1.AuxInt) v.reset(OpRsh8x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint32(c)) + v0.AuxInt = int64ToAuxInt(int64(uint32(c))) v.AddArg2(x, v0) return true } // match: (Rsh8x32 (Const8 [0]) _) // result: (Const8 [0]) for { - if v_0.Op != OpConst8 || v_0.AuxInt != 0 { + if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 { break } v.reset(OpConst8) - v.AuxInt = 0 + v.AuxInt = int8ToAuxInt(0) return true } return false @@ -20514,7 +20514,7 @@ func rewriteValuegeneric_OpRsh8x64(v *Value) bool { // result: x for { x := v_0 - if v_1.Op != OpConst64 || v_1.AuxInt != 0 { + if v_1.Op != OpConst64 || auxIntToInt64(v_1.AuxInt) != 0 { break } v.copyOf(x) @@ -20523,11 +20523,11 @@ func rewriteValuegeneric_OpRsh8x64(v *Value) bool { // match: (Rsh8x64 (Const8 [0]) _) // result: (Const8 [0]) for { - if v_0.Op != OpConst8 || v_0.AuxInt != 0 { + if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 { break } v.reset(OpConst8) - v.AuxInt = 0 + v.AuxInt = int8ToAuxInt(0) return true } // match: (Rsh8x64 (Rsh8x64 x (Const64 [c])) (Const64 [d])) @@ -20544,17 +20544,17 @@ func rewriteValuegeneric_OpRsh8x64(v *Value) bool { if v_0_1.Op != OpConst64 { break } - c := v_0_1.AuxInt + c := auxIntToInt64(v_0_1.AuxInt) if v_1.Op != OpConst64 { break } - d := v_1.AuxInt + d := auxIntToInt64(v_1.AuxInt) if !(!uaddOvf(c, d)) { break } v.reset(OpRsh8x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = c + d + v0.AuxInt = int64ToAuxInt(c + d) v.AddArg2(x, v0) return true } @@ -20572,21 +20572,21 @@ func rewriteValuegeneric_OpRsh8x8(v *Value) bool { if v_1.Op != OpConst8 { break } - c := v_1.AuxInt + c := auxIntToInt8(v_1.AuxInt) v.reset(OpRsh8x64) v0 := b.NewValue0(v.Pos, OpConst64, t) - v0.AuxInt = int64(uint8(c)) + v0.AuxInt = int64ToAuxInt(int64(uint8(c))) v.AddArg2(x, v0) return true } // match: (Rsh8x8 (Const8 [0]) _) // result: (Const8 [0]) for { - if v_0.Op != OpConst8 || v_0.AuxInt != 0 { + if v_0.Op != OpConst8 || auxIntToInt8(v_0.AuxInt) != 0 { break } v.reset(OpConst8) - v.AuxInt = 0 + v.AuxInt = int8ToAuxInt(0) return true } return false