1
0
mirror of https://github.com/golang/go synced 2024-11-23 05:30:07 -07:00

runtime: declare arg size/map for race version of sync/atomic functions

The argument size and map are used in stack scanning if those
functions are deferred. Declare the right argument size and map
so they can be scanned correctly.

Fixes #42599.

Change-Id: I74f9409d574cf7c383f4d8f83e38521026b48861
Reviewed-on: https://go-review.googlesource.com/c/go/+/270079
Trust: Cherry Zhang <cherryyz@google.com>
Run-TryBot: Cherry Zhang <cherryyz@google.com>
Reviewed-by: Keith Randall <khr@golang.org>
This commit is contained in:
Cherry Zhang 2020-11-13 21:08:26 -05:00
parent d70a33a40b
commit 0932dc2118
4 changed files with 154 additions and 78 deletions

View File

@ -299,3 +299,27 @@ func TestNoRaceAtomicCrash(t *testing.T) {
}()
atomic.AddInt32(nilptr, 1)
}
func TestNoRaceDeferAtomicStore(t *testing.T) {
// Test that when an atomic function is deferred directly, the
// GC scans it correctly. See issue 42599.
type foo struct {
bar int64
}
var doFork func(f *foo, depth int)
doFork = func(f *foo, depth int) {
atomic.StoreInt64(&f.bar, 1)
defer atomic.StoreInt64(&f.bar, 0)
if depth > 0 {
for i := 0; i < 2; i++ {
f2 := &foo{}
go doFork(f2, depth-1)
}
}
runtime.GC()
}
f := &foo{}
doFork(f, 11)
}

View File

@ -207,110 +207,136 @@ TEXT runtime·racefuncexit(SB), NOSPLIT, $0-0
// Atomic operations for sync/atomic package.
// Load
TEXT syncatomic·LoadInt32(SB), NOSPLIT, $0-0
TEXT syncatomic·LoadInt32(SB), NOSPLIT, $0-12
GO_ARGS
MOVQ $__tsan_go_atomic32_load(SB), AX
CALL racecallatomic<>(SB)
RET
TEXT syncatomic·LoadInt64(SB), NOSPLIT, $0-0
TEXT syncatomic·LoadInt64(SB), NOSPLIT, $0-16
GO_ARGS
MOVQ $__tsan_go_atomic64_load(SB), AX
CALL racecallatomic<>(SB)
RET
TEXT syncatomic·LoadUint32(SB), NOSPLIT, $0-0
TEXT syncatomic·LoadUint32(SB), NOSPLIT, $0-12
GO_ARGS
JMP syncatomic·LoadInt32(SB)
TEXT syncatomic·LoadUint64(SB), NOSPLIT, $0-0
TEXT syncatomic·LoadUint64(SB), NOSPLIT, $0-16
GO_ARGS
JMP syncatomic·LoadInt64(SB)
TEXT syncatomic·LoadUintptr(SB), NOSPLIT, $0-0
TEXT syncatomic·LoadUintptr(SB), NOSPLIT, $0-16
GO_ARGS
JMP syncatomic·LoadInt64(SB)
TEXT syncatomic·LoadPointer(SB), NOSPLIT, $0-0
TEXT syncatomic·LoadPointer(SB), NOSPLIT, $0-16
GO_ARGS
JMP syncatomic·LoadInt64(SB)
// Store
TEXT syncatomic·StoreInt32(SB), NOSPLIT, $0-0
TEXT syncatomic·StoreInt32(SB), NOSPLIT, $0-12
GO_ARGS
MOVQ $__tsan_go_atomic32_store(SB), AX
CALL racecallatomic<>(SB)
RET
TEXT syncatomic·StoreInt64(SB), NOSPLIT, $0-0
TEXT syncatomic·StoreInt64(SB), NOSPLIT, $0-16
GO_ARGS
MOVQ $__tsan_go_atomic64_store(SB), AX
CALL racecallatomic<>(SB)
RET
TEXT syncatomic·StoreUint32(SB), NOSPLIT, $0-0
TEXT syncatomic·StoreUint32(SB), NOSPLIT, $0-12
GO_ARGS
JMP syncatomic·StoreInt32(SB)
TEXT syncatomic·StoreUint64(SB), NOSPLIT, $0-0
TEXT syncatomic·StoreUint64(SB), NOSPLIT, $0-16
GO_ARGS
JMP syncatomic·StoreInt64(SB)
TEXT syncatomic·StoreUintptr(SB), NOSPLIT, $0-0
TEXT syncatomic·StoreUintptr(SB), NOSPLIT, $0-16
GO_ARGS
JMP syncatomic·StoreInt64(SB)
// Swap
TEXT syncatomic·SwapInt32(SB), NOSPLIT, $0-0
TEXT syncatomic·SwapInt32(SB), NOSPLIT, $0-20
GO_ARGS
MOVQ $__tsan_go_atomic32_exchange(SB), AX
CALL racecallatomic<>(SB)
RET
TEXT syncatomic·SwapInt64(SB), NOSPLIT, $0-0
TEXT syncatomic·SwapInt64(SB), NOSPLIT, $0-24
GO_ARGS
MOVQ $__tsan_go_atomic64_exchange(SB), AX
CALL racecallatomic<>(SB)
RET
TEXT syncatomic·SwapUint32(SB), NOSPLIT, $0-0
TEXT syncatomic·SwapUint32(SB), NOSPLIT, $0-20
GO_ARGS
JMP syncatomic·SwapInt32(SB)
TEXT syncatomic·SwapUint64(SB), NOSPLIT, $0-0
TEXT syncatomic·SwapUint64(SB), NOSPLIT, $0-24
GO_ARGS
JMP syncatomic·SwapInt64(SB)
TEXT syncatomic·SwapUintptr(SB), NOSPLIT, $0-0
TEXT syncatomic·SwapUintptr(SB), NOSPLIT, $0-24
GO_ARGS
JMP syncatomic·SwapInt64(SB)
// Add
TEXT syncatomic·AddInt32(SB), NOSPLIT, $0-0
TEXT syncatomic·AddInt32(SB), NOSPLIT, $0-20
GO_ARGS
MOVQ $__tsan_go_atomic32_fetch_add(SB), AX
CALL racecallatomic<>(SB)
MOVL add+8(FP), AX // convert fetch_add to add_fetch
ADDL AX, ret+16(FP)
RET
TEXT syncatomic·AddInt64(SB), NOSPLIT, $0-0
TEXT syncatomic·AddInt64(SB), NOSPLIT, $0-24
GO_ARGS
MOVQ $__tsan_go_atomic64_fetch_add(SB), AX
CALL racecallatomic<>(SB)
MOVQ add+8(FP), AX // convert fetch_add to add_fetch
ADDQ AX, ret+16(FP)
RET
TEXT syncatomic·AddUint32(SB), NOSPLIT, $0-0
TEXT syncatomic·AddUint32(SB), NOSPLIT, $0-20
GO_ARGS
JMP syncatomic·AddInt32(SB)
TEXT syncatomic·AddUint64(SB), NOSPLIT, $0-0
TEXT syncatomic·AddUint64(SB), NOSPLIT, $0-24
GO_ARGS
JMP syncatomic·AddInt64(SB)
TEXT syncatomic·AddUintptr(SB), NOSPLIT, $0-0
TEXT syncatomic·AddUintptr(SB), NOSPLIT, $0-24
GO_ARGS
JMP syncatomic·AddInt64(SB)
// CompareAndSwap
TEXT syncatomic·CompareAndSwapInt32(SB), NOSPLIT, $0-0
TEXT syncatomic·CompareAndSwapInt32(SB), NOSPLIT, $0-17
GO_ARGS
MOVQ $__tsan_go_atomic32_compare_exchange(SB), AX
CALL racecallatomic<>(SB)
RET
TEXT syncatomic·CompareAndSwapInt64(SB), NOSPLIT, $0-0
TEXT syncatomic·CompareAndSwapInt64(SB), NOSPLIT, $0-25
GO_ARGS
MOVQ $__tsan_go_atomic64_compare_exchange(SB), AX
CALL racecallatomic<>(SB)
RET
TEXT syncatomic·CompareAndSwapUint32(SB), NOSPLIT, $0-0
TEXT syncatomic·CompareAndSwapUint32(SB), NOSPLIT, $0-17
GO_ARGS
JMP syncatomic·CompareAndSwapInt32(SB)
TEXT syncatomic·CompareAndSwapUint64(SB), NOSPLIT, $0-0
TEXT syncatomic·CompareAndSwapUint64(SB), NOSPLIT, $0-25
GO_ARGS
JMP syncatomic·CompareAndSwapInt64(SB)
TEXT syncatomic·CompareAndSwapUintptr(SB), NOSPLIT, $0-0
TEXT syncatomic·CompareAndSwapUintptr(SB), NOSPLIT, $0-25
GO_ARGS
JMP syncatomic·CompareAndSwapInt64(SB)
// Generic atomic operation implementation.

View File

@ -200,86 +200,86 @@ TEXT runtime·racefuncexit(SB), NOSPLIT, $0-0
// R0, R1, R2 set in racecallatomic
// Load
TEXT syncatomic·LoadInt32(SB), NOSPLIT, $0
TEXT syncatomic·LoadInt32(SB), NOSPLIT, $0-12
GO_ARGS
MOVD $__tsan_go_atomic32_load(SB), R9
BL racecallatomic<>(SB)
RET
TEXT syncatomic·LoadInt64(SB), NOSPLIT, $0
TEXT syncatomic·LoadInt64(SB), NOSPLIT, $0-16
GO_ARGS
MOVD $__tsan_go_atomic64_load(SB), R9
BL racecallatomic<>(SB)
RET
TEXT syncatomic·LoadUint32(SB), NOSPLIT, $0
TEXT syncatomic·LoadUint32(SB), NOSPLIT, $0-12
GO_ARGS
JMP syncatomic·LoadInt32(SB)
TEXT syncatomic·LoadUint64(SB), NOSPLIT, $0
TEXT syncatomic·LoadUint64(SB), NOSPLIT, $0-16
GO_ARGS
JMP syncatomic·LoadInt64(SB)
TEXT syncatomic·LoadUintptr(SB), NOSPLIT, $0
TEXT syncatomic·LoadUintptr(SB), NOSPLIT, $0-16
GO_ARGS
JMP syncatomic·LoadInt64(SB)
TEXT syncatomic·LoadPointer(SB), NOSPLIT, $0
TEXT syncatomic·LoadPointer(SB), NOSPLIT, $0-16
GO_ARGS
JMP syncatomic·LoadInt64(SB)
// Store
TEXT syncatomic·StoreInt32(SB), NOSPLIT, $0
TEXT syncatomic·StoreInt32(SB), NOSPLIT, $0-12
GO_ARGS
MOVD $__tsan_go_atomic32_store(SB), R9
BL racecallatomic<>(SB)
RET
TEXT syncatomic·StoreInt64(SB), NOSPLIT, $0
TEXT syncatomic·StoreInt64(SB), NOSPLIT, $0-16
GO_ARGS
MOVD $__tsan_go_atomic64_store(SB), R9
BL racecallatomic<>(SB)
RET
TEXT syncatomic·StoreUint32(SB), NOSPLIT, $0
TEXT syncatomic·StoreUint32(SB), NOSPLIT, $0-12
GO_ARGS
JMP syncatomic·StoreInt32(SB)
TEXT syncatomic·StoreUint64(SB), NOSPLIT, $0
TEXT syncatomic·StoreUint64(SB), NOSPLIT, $0-16
GO_ARGS
JMP syncatomic·StoreInt64(SB)
TEXT syncatomic·StoreUintptr(SB), NOSPLIT, $0
TEXT syncatomic·StoreUintptr(SB), NOSPLIT, $0-16
GO_ARGS
JMP syncatomic·StoreInt64(SB)
// Swap
TEXT syncatomic·SwapInt32(SB), NOSPLIT, $0
TEXT syncatomic·SwapInt32(SB), NOSPLIT, $0-20
GO_ARGS
MOVD $__tsan_go_atomic32_exchange(SB), R9
BL racecallatomic<>(SB)
RET
TEXT syncatomic·SwapInt64(SB), NOSPLIT, $0
TEXT syncatomic·SwapInt64(SB), NOSPLIT, $0-24
GO_ARGS
MOVD $__tsan_go_atomic64_exchange(SB), R9
BL racecallatomic<>(SB)
RET
TEXT syncatomic·SwapUint32(SB), NOSPLIT, $0
TEXT syncatomic·SwapUint32(SB), NOSPLIT, $0-20
GO_ARGS
JMP syncatomic·SwapInt32(SB)
TEXT syncatomic·SwapUint64(SB), NOSPLIT, $0
TEXT syncatomic·SwapUint64(SB), NOSPLIT, $0-24
GO_ARGS
JMP syncatomic·SwapInt64(SB)
TEXT syncatomic·SwapUintptr(SB), NOSPLIT, $0
TEXT syncatomic·SwapUintptr(SB), NOSPLIT, $0-24
GO_ARGS
JMP syncatomic·SwapInt64(SB)
// Add
TEXT syncatomic·AddInt32(SB), NOSPLIT, $0
TEXT syncatomic·AddInt32(SB), NOSPLIT, $0-20
GO_ARGS
MOVD $__tsan_go_atomic32_fetch_add(SB), R9
BL racecallatomic<>(SB)
@ -289,7 +289,7 @@ TEXT syncatomic·AddInt32(SB), NOSPLIT, $0
MOVW R0, ret+16(FP)
RET
TEXT syncatomic·AddInt64(SB), NOSPLIT, $0
TEXT syncatomic·AddInt64(SB), NOSPLIT, $0-24
GO_ARGS
MOVD $__tsan_go_atomic64_fetch_add(SB), R9
BL racecallatomic<>(SB)
@ -299,40 +299,40 @@ TEXT syncatomic·AddInt64(SB), NOSPLIT, $0
MOVD R0, ret+16(FP)
RET
TEXT syncatomic·AddUint32(SB), NOSPLIT, $0
TEXT syncatomic·AddUint32(SB), NOSPLIT, $0-20
GO_ARGS
JMP syncatomic·AddInt32(SB)
TEXT syncatomic·AddUint64(SB), NOSPLIT, $0
TEXT syncatomic·AddUint64(SB), NOSPLIT, $0-24
GO_ARGS
JMP syncatomic·AddInt64(SB)
TEXT syncatomic·AddUintptr(SB), NOSPLIT, $0
TEXT syncatomic·AddUintptr(SB), NOSPLIT, $0-24
GO_ARGS
JMP syncatomic·AddInt64(SB)
// CompareAndSwap
TEXT syncatomic·CompareAndSwapInt32(SB), NOSPLIT, $0
TEXT syncatomic·CompareAndSwapInt32(SB), NOSPLIT, $0-17
GO_ARGS
MOVD $__tsan_go_atomic32_compare_exchange(SB), R9
BL racecallatomic<>(SB)
RET
TEXT syncatomic·CompareAndSwapInt64(SB), NOSPLIT, $0
TEXT syncatomic·CompareAndSwapInt64(SB), NOSPLIT, $0-25
GO_ARGS
MOVD $__tsan_go_atomic64_compare_exchange(SB), R9
BL racecallatomic<>(SB)
RET
TEXT syncatomic·CompareAndSwapUint32(SB), NOSPLIT, $0
TEXT syncatomic·CompareAndSwapUint32(SB), NOSPLIT, $0-17
GO_ARGS
JMP syncatomic·CompareAndSwapInt32(SB)
TEXT syncatomic·CompareAndSwapUint64(SB), NOSPLIT, $0
TEXT syncatomic·CompareAndSwapUint64(SB), NOSPLIT, $0-25
GO_ARGS
JMP syncatomic·CompareAndSwapInt64(SB)
TEXT syncatomic·CompareAndSwapUintptr(SB), NOSPLIT, $0
TEXT syncatomic·CompareAndSwapUintptr(SB), NOSPLIT, $0-25
GO_ARGS
JMP syncatomic·CompareAndSwapInt64(SB)

View File

@ -207,78 +207,95 @@ TEXT runtime·racefuncexit(SB), NOSPLIT, $0-0
// R3, R4, R5 set in racecallatomic
// Load atomic in tsan
TEXT syncatomic·LoadInt32(SB), NOSPLIT, $0-0
TEXT syncatomic·LoadInt32(SB), NOSPLIT, $0-12
GO_ARGS
// void __tsan_go_atomic32_load(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
MOVD $__tsan_go_atomic32_load(SB), R8
ADD $32, R1, R6 // addr of caller's 1st arg
BR racecallatomic<>(SB)
RET
TEXT syncatomic·LoadInt64(SB), NOSPLIT, $0-0
TEXT syncatomic·LoadInt64(SB), NOSPLIT, $0-16
GO_ARGS
// void __tsan_go_atomic64_load(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
MOVD $__tsan_go_atomic64_load(SB), R8
ADD $32, R1, R6 // addr of caller's 1st arg
BR racecallatomic<>(SB)
RET
TEXT syncatomic·LoadUint32(SB), NOSPLIT, $0-0
TEXT syncatomic·LoadUint32(SB), NOSPLIT, $0-12
GO_ARGS
BR syncatomic·LoadInt32(SB)
TEXT syncatomic·LoadUint64(SB), NOSPLIT, $0-0
TEXT syncatomic·LoadUint64(SB), NOSPLIT, $0-16
GO_ARGS
BR syncatomic·LoadInt64(SB)
TEXT syncatomic·LoadUintptr(SB), NOSPLIT, $0-0
TEXT syncatomic·LoadUintptr(SB), NOSPLIT, $0-16
GO_ARGS
BR syncatomic·LoadInt64(SB)
TEXT syncatomic·LoadPointer(SB), NOSPLIT, $0-0
TEXT syncatomic·LoadPointer(SB), NOSPLIT, $0-16
GO_ARGS
BR syncatomic·LoadInt64(SB)
// Store atomic in tsan
TEXT syncatomic·StoreInt32(SB), NOSPLIT, $0-0
TEXT syncatomic·StoreInt32(SB), NOSPLIT, $0-12
GO_ARGS
// void __tsan_go_atomic32_store(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
MOVD $__tsan_go_atomic32_store(SB), R8
ADD $32, R1, R6 // addr of caller's 1st arg
BR racecallatomic<>(SB)
TEXT syncatomic·StoreInt64(SB), NOSPLIT, $0-0
TEXT syncatomic·StoreInt64(SB), NOSPLIT, $0-16
GO_ARGS
// void __tsan_go_atomic64_store(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
MOVD $__tsan_go_atomic64_store(SB), R8
ADD $32, R1, R6 // addr of caller's 1st arg
BR racecallatomic<>(SB)
TEXT syncatomic·StoreUint32(SB), NOSPLIT, $0-0
TEXT syncatomic·StoreUint32(SB), NOSPLIT, $0-12
GO_ARGS
BR syncatomic·StoreInt32(SB)
TEXT syncatomic·StoreUint64(SB), NOSPLIT, $0-0
TEXT syncatomic·StoreUint64(SB), NOSPLIT, $0-16
GO_ARGS
BR syncatomic·StoreInt64(SB)
TEXT syncatomic·StoreUintptr(SB), NOSPLIT, $0-0
TEXT syncatomic·StoreUintptr(SB), NOSPLIT, $0-16
GO_ARGS
BR syncatomic·StoreInt64(SB)
// Swap in tsan
TEXT syncatomic·SwapInt32(SB), NOSPLIT, $0-0
TEXT syncatomic·SwapInt32(SB), NOSPLIT, $0-20
GO_ARGS
// void __tsan_go_atomic32_exchange(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
MOVD $__tsan_go_atomic32_exchange(SB), R8
ADD $32, R1, R6 // addr of caller's 1st arg
BR racecallatomic<>(SB)
TEXT syncatomic·SwapInt64(SB), NOSPLIT, $0-0
TEXT syncatomic·SwapInt64(SB), NOSPLIT, $0-24
GO_ARGS
// void __tsan_go_atomic64_exchange(ThreadState *thr, uptr cpc, uptr pc, u8 *a)
MOVD $__tsan_go_atomic64_exchange(SB), R8
ADD $32, R1, R6 // addr of caller's 1st arg
BR racecallatomic<>(SB)
TEXT syncatomic·SwapUint32(SB), NOSPLIT, $0-0
TEXT syncatomic·SwapUint32(SB), NOSPLIT, $0-20
GO_ARGS
BR syncatomic·SwapInt32(SB)
TEXT syncatomic·SwapUint64(SB), NOSPLIT, $0-0
TEXT syncatomic·SwapUint64(SB), NOSPLIT, $0-24
GO_ARGS
BR syncatomic·SwapInt64(SB)
TEXT syncatomic·SwapUintptr(SB), NOSPLIT, $0-0
TEXT syncatomic·SwapUintptr(SB), NOSPLIT, $0-24
GO_ARGS
BR syncatomic·SwapInt64(SB)
// Add atomic in tsan
TEXT syncatomic·AddInt32(SB), NOSPLIT, $0-0
TEXT syncatomic·AddInt32(SB), NOSPLIT, $0-20
GO_ARGS
// void __tsan_go_atomic32_fetch_add(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
MOVD $__tsan_go_atomic32_fetch_add(SB), R8
ADD $64, R1, R6 // addr of caller's 1st arg
@ -291,7 +308,8 @@ TEXT syncatomic·AddInt32(SB), NOSPLIT, $0-0
MOVW R3, ret+16(FP)
RET
TEXT syncatomic·AddInt64(SB), NOSPLIT, $0-0
TEXT syncatomic·AddInt64(SB), NOSPLIT, $0-24
GO_ARGS
// void __tsan_go_atomic64_fetch_add(ThreadState *thr, uptr cpc, uptr pc, u8 *a);
MOVD $__tsan_go_atomic64_fetch_add(SB), R8
ADD $64, R1, R6 // addr of caller's 1st arg
@ -304,37 +322,45 @@ TEXT syncatomic·AddInt64(SB), NOSPLIT, $0-0
MOVD R3, ret+16(FP)
RET
TEXT syncatomic·AddUint32(SB), NOSPLIT, $0-0
TEXT syncatomic·AddUint32(SB), NOSPLIT, $0-20
GO_ARGS
BR syncatomic·AddInt32(SB)
TEXT syncatomic·AddUint64(SB), NOSPLIT, $0-0
TEXT syncatomic·AddUint64(SB), NOSPLIT, $0-24
GO_ARGS
BR syncatomic·AddInt64(SB)
TEXT syncatomic·AddUintptr(SB), NOSPLIT, $0-0
TEXT syncatomic·AddUintptr(SB), NOSPLIT, $0-24
GO_ARGS
BR syncatomic·AddInt64(SB)
// CompareAndSwap in tsan
TEXT syncatomic·CompareAndSwapInt32(SB), NOSPLIT, $0-0
TEXT syncatomic·CompareAndSwapInt32(SB), NOSPLIT, $0-17
GO_ARGS
// void __tsan_go_atomic32_compare_exchange(
// ThreadState *thr, uptr cpc, uptr pc, u8 *a)
MOVD $__tsan_go_atomic32_compare_exchange(SB), R8
ADD $32, R1, R6 // addr of caller's 1st arg
BR racecallatomic<>(SB)
TEXT syncatomic·CompareAndSwapInt64(SB), NOSPLIT, $0-0
TEXT syncatomic·CompareAndSwapInt64(SB), NOSPLIT, $0-25
GO_ARGS
// void __tsan_go_atomic32_compare_exchange(
// ThreadState *thr, uptr cpc, uptr pc, u8 *a)
MOVD $__tsan_go_atomic64_compare_exchange(SB), R8
ADD $32, R1, R6 // addr of caller's 1st arg
BR racecallatomic<>(SB)
TEXT syncatomic·CompareAndSwapUint32(SB), NOSPLIT, $0-0
TEXT syncatomic·CompareAndSwapUint32(SB), NOSPLIT, $0-17
GO_ARGS
BR syncatomic·CompareAndSwapInt32(SB)
TEXT syncatomic·CompareAndSwapUint64(SB), NOSPLIT, $0-0
TEXT syncatomic·CompareAndSwapUint64(SB), NOSPLIT, $0-25
GO_ARGS
BR syncatomic·CompareAndSwapInt64(SB)
TEXT syncatomic·CompareAndSwapUintptr(SB), NOSPLIT, $0-0
TEXT syncatomic·CompareAndSwapUintptr(SB), NOSPLIT, $0-25
GO_ARGS
BR syncatomic·CompareAndSwapInt64(SB)
// Common function used to call tsan's atomic functions