2018-02-12 19:50:16 +01:00
|
|
|
// Copyright 2009 The Go Authors. All rights reserved.
|
|
|
|
// Use of this source code is governed by a BSD-style
|
|
|
|
// license that can be found in the LICENSE file.
|
|
|
|
|
|
|
|
package runtime
|
|
|
|
|
|
|
|
import (
|
|
|
|
"runtime/internal/atomic"
|
|
|
|
"unsafe"
|
|
|
|
)
|
|
|
|
|
|
|
|
// These functions cannot have go:noescape annotations,
|
|
|
|
// because while ptr does not escape, new does.
|
|
|
|
// If new is marked as not escaping, the compiler will make incorrect
|
|
|
|
// escape analysis decisions about the pointer value being stored.
|
|
|
|
|
2018-09-24 23:46:21 +02:00
|
|
|
// atomicwb performs a write barrier before an atomic pointer write.
|
|
|
|
// The caller should guard the call with "if writeBarrier.enabled".
|
|
|
|
//
|
|
|
|
//go:nosplit
|
|
|
|
func atomicwb(ptr *unsafe.Pointer, new unsafe.Pointer) {
|
|
|
|
slot := (*uintptr)(unsafe.Pointer(ptr))
|
|
|
|
if !getg().m.p.ptr().wbBuf.putFast(*slot, uintptr(new)) {
|
|
|
|
wbBufFlush(slot, uintptr(new))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-02-12 19:50:16 +01:00
|
|
|
// atomicstorep performs *ptr = new atomically and invokes a write barrier.
|
|
|
|
//
|
|
|
|
//go:nosplit
|
|
|
|
func atomicstorep(ptr unsafe.Pointer, new unsafe.Pointer) {
|
2018-09-24 23:46:21 +02:00
|
|
|
if writeBarrier.enabled {
|
|
|
|
atomicwb((*unsafe.Pointer)(ptr), new)
|
|
|
|
}
|
2018-02-12 19:50:16 +01:00
|
|
|
atomic.StorepNoWB(noescape(ptr), new)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Like above, but implement in terms of sync/atomic's uintptr operations.
|
|
|
|
// We cannot just call the runtime routines, because the race detector expects
|
|
|
|
// to be able to intercept the sync/atomic forms but not the runtime forms.
|
|
|
|
|
2018-10-26 00:18:08 +02:00
|
|
|
//go:linkname sync_atomic_StoreUintptr sync..z2fatomic.StoreUintptr
|
2018-02-12 19:50:16 +01:00
|
|
|
func sync_atomic_StoreUintptr(ptr *uintptr, new uintptr)
|
|
|
|
|
2018-10-26 00:18:08 +02:00
|
|
|
//go:linkname sync_atomic_StorePointer sync..z2fatomic.StorePointer
|
2018-02-12 19:50:16 +01:00
|
|
|
//go:nosplit
|
|
|
|
func sync_atomic_StorePointer(ptr *unsafe.Pointer, new unsafe.Pointer) {
|
2018-09-24 23:46:21 +02:00
|
|
|
if writeBarrier.enabled {
|
|
|
|
atomicwb(ptr, new)
|
|
|
|
}
|
2018-02-12 19:50:16 +01:00
|
|
|
sync_atomic_StoreUintptr((*uintptr)(unsafe.Pointer(ptr)), uintptr(new))
|
|
|
|
}
|
|
|
|
|
2018-10-26 00:18:08 +02:00
|
|
|
//go:linkname sync_atomic_SwapUintptr sync..z2fatomic.SwapUintptr
|
2018-02-12 19:50:16 +01:00
|
|
|
func sync_atomic_SwapUintptr(ptr *uintptr, new uintptr) uintptr
|
|
|
|
|
2018-10-26 00:18:08 +02:00
|
|
|
//go:linkname sync_atomic_SwapPointer sync..z2fatomic.SwapPointer
|
2018-02-12 19:50:16 +01:00
|
|
|
//go:nosplit
|
|
|
|
func sync_atomic_SwapPointer(ptr *unsafe.Pointer, new unsafe.Pointer) unsafe.Pointer {
|
2018-09-24 23:46:21 +02:00
|
|
|
if writeBarrier.enabled {
|
|
|
|
atomicwb(ptr, new)
|
|
|
|
}
|
2018-02-12 19:50:16 +01:00
|
|
|
old := unsafe.Pointer(sync_atomic_SwapUintptr((*uintptr)(noescape(unsafe.Pointer(ptr))), uintptr(new)))
|
|
|
|
return old
|
|
|
|
}
|
|
|
|
|
2018-10-26 00:18:08 +02:00
|
|
|
//go:linkname sync_atomic_CompareAndSwapUintptr sync..z2fatomic.CompareAndSwapUintptr
|
2018-02-12 19:50:16 +01:00
|
|
|
func sync_atomic_CompareAndSwapUintptr(ptr *uintptr, old, new uintptr) bool
|
|
|
|
|
2018-10-26 00:18:08 +02:00
|
|
|
//go:linkname sync_atomic_CompareAndSwapPointer sync..z2fatomic.CompareAndSwapPointer
|
2018-02-12 19:50:16 +01:00
|
|
|
//go:nosplit
|
|
|
|
func sync_atomic_CompareAndSwapPointer(ptr *unsafe.Pointer, old, new unsafe.Pointer) bool {
|
2018-09-24 23:46:21 +02:00
|
|
|
if writeBarrier.enabled {
|
|
|
|
atomicwb(ptr, new)
|
|
|
|
}
|
2018-02-12 19:50:16 +01:00
|
|
|
return sync_atomic_CompareAndSwapUintptr((*uintptr)(noescape(unsafe.Pointer(ptr))), uintptr(old), uintptr(new))
|
|
|
|
}
|