diff --git a/src/cmd/internal/obj/arm64/asm7.go b/src/cmd/internal/obj/arm64/asm7.go index 1a359f1921..70072cfba4 100644 --- a/src/cmd/internal/obj/arm64/asm7.go +++ b/src/cmd/internal/obj/arm64/asm7.go @@ -280,7 +280,7 @@ func MOVCONST(d int64, s int, rt int) uint32 { const ( // Optab.flag LFROM = 1 << 0 // p.From uses constant pool - LFROM3 = 1 << 1 // p.From3 uses constant pool + LFROM128 = 1 << 1 // p.From3<<64+p.From forms a 128-bit constant in literal pool LTO = 1 << 2 // p.To uses constant pool NOTUSETMP = 1 << 3 // p expands to multiple instructions, but does NOT use REGTMP ) @@ -419,7 +419,7 @@ var optab = []Optab{ {AMOVD, C_LACON, C_NONE, C_NONE, C_RSP, 34, 8, REGSP, LFROM, 0}, // Move a large constant to a vector register. - {AVMOVQ, C_VCON, C_NONE, C_VCON, C_VREG, 101, 4, 0, LFROM | LFROM3, 0}, + {AVMOVQ, C_VCON, C_NONE, C_VCON, C_VREG, 101, 4, 0, LFROM128, 0}, {AVMOVD, C_VCON, C_NONE, C_NONE, C_VREG, 101, 4, 0, LFROM, 0}, {AVMOVS, C_LCON, C_NONE, C_NONE, C_VREG, 101, 4, 0, LFROM, 0}, @@ -995,8 +995,8 @@ func span7(ctxt *obj.Link, cursym *obj.LSym, newprog obj.ProgAlloc) { if o.flag&LFROM != 0 { c.addpool(p, &p.From) } - if o.flag&LFROM3 != 0 { - c.addpool(p, p.GetFrom3()) + if o.flag&LFROM128 != 0 { + c.addpool128(p, &p.From, p.GetFrom3()) } if o.flag<O != 0 { c.addpool(p, &p.To) @@ -1201,6 +1201,36 @@ func (c *ctxt7) flushpool(p *obj.Prog, skip int) { } } +// addpool128 adds a 128-bit constant to literal pool by two consecutive DWORD +// instructions, the 128-bit constant is formed by ah.Offset<<64+al.Offset. +func (c *ctxt7) addpool128(p *obj.Prog, al, ah *obj.Addr) { + lit := al.Offset + q := c.newprog() + q.As = ADWORD + q.To.Type = obj.TYPE_CONST + q.To.Offset = lit + q.Pc = int64(c.pool.size) + + lit = ah.Offset + t := c.newprog() + t.As = ADWORD + t.To.Type = obj.TYPE_CONST + t.To.Offset = lit + t.Pc = int64(c.pool.size + 8) + q.Link = t + + if c.blitrl == nil { + c.blitrl = q + c.pool.start = uint32(p.Pc) + } else { + c.elitrl.Link = q + } + + c.elitrl = t + c.pool.size += 16 + p.Pool = q +} + /* * MOVD foo(SB), R is actually * MOVD addr, REGTMP diff --git a/src/cmd/internal/obj/arm64/asm_test.go b/src/cmd/internal/obj/arm64/asm_arm64_test.go similarity index 89% rename from src/cmd/internal/obj/arm64/asm_test.go rename to src/cmd/internal/obj/arm64/asm_arm64_test.go index 9efdb0217f..c6a00f5b94 100644 --- a/src/cmd/internal/obj/arm64/asm_test.go +++ b/src/cmd/internal/obj/arm64/asm_arm64_test.go @@ -47,7 +47,7 @@ func TestLarge(t *testing.T) { // assemble generated file cmd := exec.Command(testenv.GoToolPath(t), "tool", "asm", "-S", "-o", filepath.Join(dir, "test.o"), tmpfile) - cmd.Env = append(os.Environ(), "GOARCH=arm64", "GOOS=linux") + cmd.Env = append(os.Environ(), "GOOS=linux") out, err := cmd.CombinedOutput() if err != nil { t.Errorf("Assemble failed: %v, output: %s", err, out) @@ -62,7 +62,7 @@ func TestLarge(t *testing.T) { // build generated file cmd = exec.Command(testenv.GoToolPath(t), "tool", "asm", "-o", filepath.Join(dir, "x.o"), tmpfile) - cmd.Env = append(os.Environ(), "GOARCH=arm64", "GOOS=linux") + cmd.Env = append(os.Environ(), "GOOS=linux") out, err = cmd.CombinedOutput() if err != nil { t.Errorf("Build failed: %v, output: %s", err, out) @@ -96,7 +96,7 @@ func TestNoRet(t *testing.T) { t.Fatal(err) } cmd := exec.Command(testenv.GoToolPath(t), "tool", "asm", "-o", filepath.Join(dir, "x.o"), tmpfile) - cmd.Env = append(os.Environ(), "GOARCH=arm64", "GOOS=linux") + cmd.Env = append(os.Environ(), "GOOS=linux") if out, err := cmd.CombinedOutput(); err != nil { t.Errorf("%v\n%s", err, out) } @@ -134,7 +134,7 @@ func TestPCALIGN(t *testing.T) { t.Fatal(err) } cmd := exec.Command(testenv.GoToolPath(t), "tool", "asm", "-S", "-o", tmpout, tmpfile) - cmd.Env = append(os.Environ(), "GOARCH=arm64", "GOOS=linux") + cmd.Env = append(os.Environ(), "GOOS=linux") out, err := cmd.CombinedOutput() if err != nil { t.Errorf("The %s build failed: %v, output: %s", test.name, err, out) @@ -150,3 +150,13 @@ func TestPCALIGN(t *testing.T) { } } } + +func testvmovq() (r1, r2 uint64) + +// TestVMOVQ checks if the arm64 VMOVQ instruction is working properly. +func TestVMOVQ(t *testing.T) { + a, b := testvmovq() + if a != 0x7040201008040201 || b != 0x3040201008040201 { + t.Errorf("TestVMOVQ got: a=0x%x, b=0x%x, want: a=0x7040201008040201, b=0x3040201008040201", a, b) + } +} diff --git a/src/cmd/internal/obj/arm64/asm_arm64_test.s b/src/cmd/internal/obj/arm64/asm_arm64_test.s new file mode 100644 index 0000000000..9d337a4fd1 --- /dev/null +++ b/src/cmd/internal/obj/arm64/asm_arm64_test.s @@ -0,0 +1,14 @@ +// Copyright 2021 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +#include "textflag.h" + +// testvmovq() (r1, r2 uint64) +TEXT ·testvmovq(SB), NOSPLIT, $0-16 + VMOVQ $0x7040201008040201, $0x3040201008040201, V1 + VMOV V1.D[0], R0 + VMOV V1.D[1], R1 + MOVD R0, r1+0(FP) + MOVD R1, r2+8(FP) + RET