mirror of
https://github.com/golang/go
synced 2024-11-20 03:34:40 -07:00
f5d494bbdf
Currently its possible for the garbage collector to observe uninitialized memory or stale heap bitmap bits on weakly ordered architectures such as ARM and PPC. On such architectures, the stores that zero newly allocated memory and initialize its heap bitmap may move after a store in user code that makes the allocated object observable by the garbage collector. To fix this, add a "publication barrier" (also known as an "export barrier") before returning from mallocgc. This is a store/store barrier that ensures any write done by user code that makes the returned object observable to the garbage collector will be ordered after the initialization performed by mallocgc. No barrier is necessary on the reading side because of the data dependency between loading the pointer and loading the contents of the object. Fixes one of the issues raised in #9984. Change-Id: Ia3d96ad9c5fc7f4d342f5e05ec0ceae700cd17c8 Reviewed-on: https://go-review.googlesource.com/11083 Reviewed-by: Rick Hudson <rlh@golang.org> Reviewed-by: Dmitry Vyukov <dvyukov@google.com> Reviewed-by: Minux Ma <minux@golang.org> Reviewed-by: Martin Capitanio <capnm9@gmail.com> Reviewed-by: Russ Cox <rsc@golang.org>
48 lines
1.1 KiB
ArmAsm
48 lines
1.1 KiB
ArmAsm
// Copyright 2014 The Go Authors. All rights reserved.
|
|
// Use of this source code is governed by a BSD-style
|
|
// license that can be found in the LICENSE file.
|
|
|
|
// +build ppc64 ppc64le
|
|
|
|
#include "textflag.h"
|
|
|
|
// uint32 runtime·atomicload(uint32 volatile* addr)
|
|
TEXT ·atomicload(SB),NOSPLIT,$-8-12
|
|
MOVD addr+0(FP), R3
|
|
SYNC
|
|
MOVWZ 0(R3), R3
|
|
CMPW R3, R3, CR7
|
|
BC 4, 30, 1(PC) // bne- cr7,0x4
|
|
ISYNC
|
|
MOVW R3, ret+8(FP)
|
|
RET
|
|
|
|
// uint64 runtime·atomicload64(uint64 volatile* addr)
|
|
TEXT ·atomicload64(SB),NOSPLIT,$-8-16
|
|
MOVD addr+0(FP), R3
|
|
SYNC
|
|
MOVD 0(R3), R3
|
|
CMP R3, R3, CR7
|
|
BC 4, 30, 1(PC) // bne- cr7,0x4
|
|
ISYNC
|
|
MOVD R3, ret+8(FP)
|
|
RET
|
|
|
|
// void *runtime·atomicloadp(void *volatile *addr)
|
|
TEXT ·atomicloadp(SB),NOSPLIT,$-8-16
|
|
MOVD addr+0(FP), R3
|
|
SYNC
|
|
MOVD 0(R3), R3
|
|
CMP R3, R3, CR7
|
|
BC 4, 30, 1(PC) // bne- cr7,0x4
|
|
ISYNC
|
|
MOVD R3, ret+8(FP)
|
|
RET
|
|
|
|
TEXT ·publicationBarrier(SB),NOSPLIT,$-8-0
|
|
// LWSYNC is the "export" barrier recommended by Power ISA
|
|
// v2.07 book II, appendix B.2.2.2.
|
|
// LWSYNC is a load/load, load/store, and store/store barrier.
|
|
WORD $0x7c2004ac // LWSYNC
|
|
RET
|