1
0
mirror of https://github.com/golang/go synced 2024-11-22 21:00:04 -07:00

cmd/asm: add check for register and shift/extension combination on arm64

The current code lacks a check on whether the register and shift/extension
combination is valid, for example the follow instructions also compiles.
	ADD     F1<<1, R1, R3
	ADD     V1<<1, R1, R3
	MOVW    (R9)(F8.SXTW<<2), R19
	VST1    R4.D[1], (R0)

Actually only general registers can perform shift operations, and element
and arrangement extensions are only applicable to vector registers. This
CL adds a check for the register and shift/extension combination on arm64.

Change-Id: I93dd9343e92a66899cba8eaf4e0ac5430e94692b
Reviewed-on: https://go-review.googlesource.com/c/go/+/312571
Trust: eric fang <eric.fang@arm.com>
Reviewed-by: eric fang <eric.fang@arm.com>
Reviewed-by: Keith Randall <khr@golang.org>
Run-TryBot: eric fang <eric.fang@arm.com>
TryBot-Result: Go Bot <gobot@golang.org>
This commit is contained in:
eric fang 2021-04-21 03:12:20 +00:00
parent f439a76253
commit 9726c78539
3 changed files with 175 additions and 153 deletions

View File

@ -147,7 +147,16 @@ func arm64RegisterNumber(name string, n int16) (int16, bool) {
return 0, false return 0, false
} }
// ARM64RegisterExtension parses an ARM64 register with extension or arrangement. // ARM64RegisterShift constructs an ARM64 register with shift operation.
func ARM64RegisterShift(reg, op, count int16) (int64, error) {
// the base register of shift operations must be general register.
if reg > arm64.REG_R31 || reg < arm64.REG_R0 {
return 0, errors.New("invalid register for shift operation")
}
return int64(reg&31)<<16 | int64(op)<<22 | int64(uint16(count)), nil
}
// ARM64RegisterExtension constructs an ARM64 register with extension or arrangement.
func ARM64RegisterExtension(a *obj.Addr, ext string, reg, num int16, isAmount, isIndex bool) error { func ARM64RegisterExtension(a *obj.Addr, ext string, reg, num int16, isAmount, isIndex bool) error {
Rnum := (reg & 31) + int16(num<<5) Rnum := (reg & 31) + int16(num<<5)
if isAmount { if isAmount {
@ -155,154 +164,163 @@ func ARM64RegisterExtension(a *obj.Addr, ext string, reg, num int16, isAmount, i
return errors.New("index shift amount is out of range") return errors.New("index shift amount is out of range")
} }
} }
switch ext { if reg <= arm64.REG_R31 && reg >= arm64.REG_R0 {
case "UXTB": switch ext {
if !isAmount { case "UXTB":
return errors.New("invalid register extension") if !isAmount {
} return errors.New("invalid register extension")
if a.Type == obj.TYPE_MEM { }
return errors.New("invalid shift for the register offset addressing mode") if a.Type == obj.TYPE_MEM {
} return errors.New("invalid shift for the register offset addressing mode")
a.Reg = arm64.REG_UXTB + Rnum }
case "UXTH": a.Reg = arm64.REG_UXTB + Rnum
if !isAmount { case "UXTH":
return errors.New("invalid register extension") if !isAmount {
} return errors.New("invalid register extension")
if a.Type == obj.TYPE_MEM { }
return errors.New("invalid shift for the register offset addressing mode") if a.Type == obj.TYPE_MEM {
} return errors.New("invalid shift for the register offset addressing mode")
a.Reg = arm64.REG_UXTH + Rnum }
case "UXTW": a.Reg = arm64.REG_UXTH + Rnum
if !isAmount { case "UXTW":
return errors.New("invalid register extension") if !isAmount {
} return errors.New("invalid register extension")
// effective address of memory is a base register value and an offset register value. }
if a.Type == obj.TYPE_MEM { // effective address of memory is a base register value and an offset register value.
a.Index = arm64.REG_UXTW + Rnum if a.Type == obj.TYPE_MEM {
} else { a.Index = arm64.REG_UXTW + Rnum
a.Reg = arm64.REG_UXTW + Rnum } else {
} a.Reg = arm64.REG_UXTW + Rnum
case "UXTX": }
if !isAmount { case "UXTX":
return errors.New("invalid register extension") if !isAmount {
} return errors.New("invalid register extension")
if a.Type == obj.TYPE_MEM { }
return errors.New("invalid shift for the register offset addressing mode") if a.Type == obj.TYPE_MEM {
} return errors.New("invalid shift for the register offset addressing mode")
a.Reg = arm64.REG_UXTX + Rnum }
case "SXTB": a.Reg = arm64.REG_UXTX + Rnum
if !isAmount { case "SXTB":
return errors.New("invalid register extension") if !isAmount {
} return errors.New("invalid register extension")
a.Reg = arm64.REG_SXTB + Rnum }
case "SXTH": a.Reg = arm64.REG_SXTB + Rnum
if !isAmount { case "SXTH":
return errors.New("invalid register extension") if !isAmount {
} return errors.New("invalid register extension")
if a.Type == obj.TYPE_MEM { }
return errors.New("invalid shift for the register offset addressing mode") if a.Type == obj.TYPE_MEM {
} return errors.New("invalid shift for the register offset addressing mode")
a.Reg = arm64.REG_SXTH + Rnum }
case "SXTW": a.Reg = arm64.REG_SXTH + Rnum
if !isAmount { case "SXTW":
return errors.New("invalid register extension") if !isAmount {
} return errors.New("invalid register extension")
if a.Type == obj.TYPE_MEM { }
a.Index = arm64.REG_SXTW + Rnum if a.Type == obj.TYPE_MEM {
} else { a.Index = arm64.REG_SXTW + Rnum
a.Reg = arm64.REG_SXTW + Rnum } else {
} a.Reg = arm64.REG_SXTW + Rnum
case "SXTX": }
if !isAmount { case "SXTX":
return errors.New("invalid register extension") if !isAmount {
} return errors.New("invalid register extension")
if a.Type == obj.TYPE_MEM { }
a.Index = arm64.REG_SXTX + Rnum if a.Type == obj.TYPE_MEM {
} else { a.Index = arm64.REG_SXTX + Rnum
a.Reg = arm64.REG_SXTX + Rnum } else {
} a.Reg = arm64.REG_SXTX + Rnum
case "LSL": }
if !isAmount { case "LSL":
return errors.New("invalid register extension") if !isAmount {
} return errors.New("invalid register extension")
a.Index = arm64.REG_LSL + Rnum }
case "B8": a.Index = arm64.REG_LSL + Rnum
if isIndex { default:
return errors.New("invalid register extension") return errors.New("unsupported general register extension type: " + ext)
}
a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_8B & 15) << 5)
case "B16":
if isIndex {
return errors.New("invalid register extension")
}
a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_16B & 15) << 5)
case "H4":
if isIndex {
return errors.New("invalid register extension")
}
a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_4H & 15) << 5)
case "H8":
if isIndex {
return errors.New("invalid register extension")
}
a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_8H & 15) << 5)
case "S2":
if isIndex {
return errors.New("invalid register extension")
}
a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_2S & 15) << 5)
case "S4":
if isIndex {
return errors.New("invalid register extension")
}
a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_4S & 15) << 5)
case "D1":
if isIndex {
return errors.New("invalid register extension")
}
a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_1D & 15) << 5)
case "D2":
if isIndex {
return errors.New("invalid register extension")
}
a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_2D & 15) << 5)
case "Q1":
if isIndex {
return errors.New("invalid register extension")
}
a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_1Q & 15) << 5)
case "B":
if !isIndex {
return nil
}
a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_B & 15) << 5)
a.Index = num
case "H":
if !isIndex {
return nil
}
a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_H & 15) << 5)
a.Index = num
case "S":
if !isIndex {
return nil
}
a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_S & 15) << 5)
a.Index = num
case "D":
if !isIndex {
return nil
}
a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_D & 15) << 5)
a.Index = num
default:
return errors.New("unsupported register extension type: " + ext)
}
}
} else if reg <= arm64.REG_V31 && reg >= arm64.REG_V0 {
switch ext {
case "B8":
if isIndex {
return errors.New("invalid register extension")
}
a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_8B & 15) << 5)
case "B16":
if isIndex {
return errors.New("invalid register extension")
}
a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_16B & 15) << 5)
case "H4":
if isIndex {
return errors.New("invalid register extension")
}
a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_4H & 15) << 5)
case "H8":
if isIndex {
return errors.New("invalid register extension")
}
a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_8H & 15) << 5)
case "S2":
if isIndex {
return errors.New("invalid register extension")
}
a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_2S & 15) << 5)
case "S4":
if isIndex {
return errors.New("invalid register extension")
}
a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_4S & 15) << 5)
case "D1":
if isIndex {
return errors.New("invalid register extension")
}
a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_1D & 15) << 5)
case "D2":
if isIndex {
return errors.New("invalid register extension")
}
a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_2D & 15) << 5)
case "Q1":
if isIndex {
return errors.New("invalid register extension")
}
a.Reg = arm64.REG_ARNG + (reg & 31) + ((arm64.ARNG_1Q & 15) << 5)
case "B":
if !isIndex {
return nil
}
a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_B & 15) << 5)
a.Index = num
case "H":
if !isIndex {
return nil
}
a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_H & 15) << 5)
a.Index = num
case "S":
if !isIndex {
return nil
}
a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_S & 15) << 5)
a.Index = num
case "D":
if !isIndex {
return nil
}
a.Reg = arm64.REG_ELEM + (reg & 31) + ((arm64.ARNG_D & 15) << 5)
a.Index = num
default:
return errors.New("unsupported simd register extension type: " + ext)
}
} else {
return errors.New("invalid register and extension combination")
}
return nil return nil
} }
// ARM64RegisterArrangement parses an ARM64 vector register arrangement. // ARM64RegisterArrangement constructs an ARM64 vector register arrangement.
func ARM64RegisterArrangement(reg int16, name, arng string) (int64, error) { func ARM64RegisterArrangement(reg int16, name, arng string) (int64, error) {
var curQ, curSize uint16 var curQ, curSize uint16
if name[0] != 'V' { if name[0] != 'V' {

View File

@ -689,7 +689,11 @@ func (p *Parser) registerShift(name string, prefix rune) int64 {
p.errorf("unexpected %s in register shift", tok.String()) p.errorf("unexpected %s in register shift", tok.String())
} }
if p.arch.Family == sys.ARM64 { if p.arch.Family == sys.ARM64 {
return int64(r1&31)<<16 | int64(op)<<22 | int64(uint16(count)) off, err := arch.ARM64RegisterShift(r1, op, count)
if err != nil {
p.errorf(err.Error())
}
return off
} else { } else {
return int64((r1 & 15) | op<<5 | count) return int64((r1 & 15) | op<<5 | count)
} }

View File

@ -96,13 +96,13 @@ TEXT errors(SB),$0
VMOV V8.H[9], R3 // ERROR "register element index out of range 0 to 7" VMOV V8.H[9], R3 // ERROR "register element index out of range 0 to 7"
VMOV V8.S[4], R3 // ERROR "register element index out of range 0 to 3" VMOV V8.S[4], R3 // ERROR "register element index out of range 0 to 3"
VMOV V8.D[2], R3 // ERROR "register element index out of range 0 to 1" VMOV V8.D[2], R3 // ERROR "register element index out of range 0 to 1"
VDUP V8.B[16], R3.B16 // ERROR "register element index out of range 0 to 15" VDUP V8.B[16], V3.B16 // ERROR "register element index out of range 0 to 15"
VDUP V8.B[17], R3.B8 // ERROR "register element index out of range 0 to 15" VDUP V8.B[17], V3.B8 // ERROR "register element index out of range 0 to 15"
VDUP V8.H[9], R3.H4 // ERROR "register element index out of range 0 to 7" VDUP V8.H[9], V3.H4 // ERROR "register element index out of range 0 to 7"
VDUP V8.H[9], R3.H8 // ERROR "register element index out of range 0 to 7" VDUP V8.H[9], V3.H8 // ERROR "register element index out of range 0 to 7"
VDUP V8.S[4], R3.S2 // ERROR "register element index out of range 0 to 3" VDUP V8.S[4], V3.S2 // ERROR "register element index out of range 0 to 3"
VDUP V8.S[4], R3.S4 // ERROR "register element index out of range 0 to 3" VDUP V8.S[4], V3.S4 // ERROR "register element index out of range 0 to 3"
VDUP V8.D[2], R3.D2 // ERROR "register element index out of range 0 to 1" VDUP V8.D[2], V3.D2 // ERROR "register element index out of range 0 to 1"
VFMLA V1.D2, V12.D2, V3.S2 // ERROR "operand mismatch" VFMLA V1.D2, V12.D2, V3.S2 // ERROR "operand mismatch"
VFMLA V1.S2, V12.S2, V3.D2 // ERROR "operand mismatch" VFMLA V1.S2, V12.S2, V3.D2 // ERROR "operand mismatch"
VFMLA V1.S4, V12.S2, V3.D2 // ERROR "operand mismatch" VFMLA V1.S4, V12.S2, V3.D2 // ERROR "operand mismatch"