mirror of
https://github.com/golang/go
synced 2024-11-20 06:54:42 -07:00
7b4df8f018
Add write barrier to atomic operations manipulating pointers. In general an atomic write of a pointer word may indicate racy accesses, so there is no strictly safe way to attempt to keep the shadow copy in sync with the real one. Instead, mark the shadow copy as not used. Redirect sync/atomic pointer routines back to the runtime ones, so that there is only one copy of the write barrier and shadow logic. In time we might consider doing this for most of the sync/atomic functions, but for now only the pointer routines need that treatment. Found with GODEBUG=wbshadow=1 mode. Eventually that will run automatically, but right now it still detects other missing write barriers. Change-Id: I852936b9a111a6cb9079cfaf6bd78b43016c0242 Reviewed-on: https://go-review.googlesource.com/2066 Reviewed-by: Rick Hudson <rlh@golang.org> Reviewed-by: Austin Clements <austin@google.com>
97 lines
3.5 KiB
Go
97 lines
3.5 KiB
Go
// Copyright 2009 The Go Authors. All rights reserved.
|
|
// Use of this source code is governed by a BSD-style
|
|
// license that can be found in the LICENSE file.
|
|
|
|
package runtime
|
|
|
|
import "unsafe"
|
|
|
|
// These functions cannot have go:noescape annotations,
|
|
// because while ptr does not escape, new does.
|
|
// If new is marked as not escaping, the compiler will make incorrect
|
|
// escape analysis decisions about the pointer value being stored.
|
|
// Instead, these are wrappers around the actual atomics (xchgp1 and so on)
|
|
// that use noescape to convey which arguments do not escape.
|
|
//
|
|
// Additionally, these functions must update the shadow heap for
|
|
// write barrier checking.
|
|
|
|
//go:nosplit
|
|
func atomicstorep(ptr unsafe.Pointer, new unsafe.Pointer) {
|
|
atomicstorep1(noescape(ptr), new)
|
|
writebarrierptr_nostore((*uintptr)(ptr), uintptr(new))
|
|
if mheap_.shadow_enabled {
|
|
writebarrierptr_noshadow((*uintptr)(noescape(ptr)))
|
|
}
|
|
}
|
|
|
|
//go:nosplit
|
|
func xchgp(ptr unsafe.Pointer, new unsafe.Pointer) unsafe.Pointer {
|
|
old := xchgp1(noescape(ptr), new)
|
|
writebarrierptr_nostore((*uintptr)(ptr), uintptr(new))
|
|
if mheap_.shadow_enabled {
|
|
writebarrierptr_noshadow((*uintptr)(noescape(ptr)))
|
|
}
|
|
return old
|
|
}
|
|
|
|
//go:nosplit
|
|
func casp(ptr *unsafe.Pointer, old, new unsafe.Pointer) bool {
|
|
if !casp1((*unsafe.Pointer)(noescape(unsafe.Pointer(ptr))), noescape(old), new) {
|
|
return false
|
|
}
|
|
writebarrierptr_nostore((*uintptr)(unsafe.Pointer(ptr)), uintptr(new))
|
|
if mheap_.shadow_enabled {
|
|
writebarrierptr_noshadow((*uintptr)(noescape(unsafe.Pointer(ptr))))
|
|
}
|
|
return true
|
|
}
|
|
|
|
// Like above, but implement in terms of sync/atomic's uintptr operations.
|
|
// We cannot just call the runtime routines, because the race detector expects
|
|
// to be able to intercept the sync/atomic forms but not the runtime forms.
|
|
|
|
//go:linkname sync_atomic_StoreUintptr sync/atomic.StoreUintptr
|
|
func sync_atomic_StoreUintptr(ptr *uintptr, new uintptr)
|
|
|
|
//go:linkname sync_atomic_StorePointer sync/atomic.StorePointer
|
|
//go:nosplit
|
|
func sync_atomic_StorePointer(ptr *unsafe.Pointer, new unsafe.Pointer) {
|
|
sync_atomic_StoreUintptr((*uintptr)(unsafe.Pointer(ptr)), uintptr(new))
|
|
atomicstorep1(noescape(unsafe.Pointer(ptr)), new)
|
|
writebarrierptr_nostore((*uintptr)(unsafe.Pointer(ptr)), uintptr(new))
|
|
if mheap_.shadow_enabled {
|
|
writebarrierptr_noshadow((*uintptr)(noescape(unsafe.Pointer(ptr))))
|
|
}
|
|
}
|
|
|
|
//go:linkname sync_atomic_SwapUintptr sync/atomic.SwapUintptr
|
|
func sync_atomic_SwapUintptr(ptr *uintptr, new uintptr) uintptr
|
|
|
|
//go:linkname sync_atomic_SwapPointer sync/atomic.SwapPointer
|
|
//go:nosplit
|
|
func sync_atomic_SwapPointer(ptr unsafe.Pointer, new unsafe.Pointer) unsafe.Pointer {
|
|
old := unsafe.Pointer(sync_atomic_SwapUintptr((*uintptr)(noescape(ptr)), uintptr(new)))
|
|
writebarrierptr_nostore((*uintptr)(ptr), uintptr(new))
|
|
if mheap_.shadow_enabled {
|
|
writebarrierptr_noshadow((*uintptr)(noescape(ptr)))
|
|
}
|
|
return old
|
|
}
|
|
|
|
//go:linkname sync_atomic_CompareAndSwapUintptr sync/atomic.CompareAndSwapUintptr
|
|
func sync_atomic_CompareAndSwapUintptr(ptr *uintptr, old, new uintptr) bool
|
|
|
|
//go:linkname sync_atomic_CompareAndSwapPointer sync/atomic.CompareAndSwapPointer
|
|
//go:nosplit
|
|
func sync_atomic_CompareAndSwapPointer(ptr *unsafe.Pointer, old, new unsafe.Pointer) bool {
|
|
if !sync_atomic_CompareAndSwapUintptr((*uintptr)(noescape(unsafe.Pointer(ptr))), uintptr(old), uintptr(new)) {
|
|
return false
|
|
}
|
|
writebarrierptr_nostore((*uintptr)(unsafe.Pointer(ptr)), uintptr(new))
|
|
if mheap_.shadow_enabled {
|
|
writebarrierptr_noshadow((*uintptr)(noescape(unsafe.Pointer(ptr))))
|
|
}
|
|
return true
|
|
}
|