Compare commits
4 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
f5ef5a686c | ||
|
|
f3a584b498 | ||
|
|
57388d3e2f | ||
|
|
0d3d2f971b |
18
README.md
18
README.md
@@ -42,15 +42,15 @@ The Windows binary provided here also supports Windows 7 and Windows Server 2008
|
||||
|
||||
| OS | Architecture | Filename | SHA‑256 Hash |
|
||||
|----|--------------|----------|--------------|
|
||||
| **macOS** | Intel (amd64) | [go-legacy-win7-1.25.0-2.darwin_amd64.tar.gz](https://github.com/thongtech/go-legacy-win7/releases/download/v1.25.0-2/go-legacy-win7-1.25.0-2.darwin_amd64.tar.gz) | `2f3d1089b1770f2ea5f803cb95841da7a08a7f4b8f5c8cbe8889372fd29ca7fa` |
|
||||
| macOS | Apple (ARM64) | [go-legacy-win7-1.25.0-2.darwin_arm64.tar.gz](https://github.com/thongtech/go-legacy-win7/releases/download/v1.25.0-2/go-legacy-win7-1.25.0-2.darwin_arm64.tar.gz) | `f56f5bf1dc7e41aa7af16cdfa6829292ce46a1ed32b3d481528670a9fafd78d7` |
|
||||
| **Linux** | x86 (386) | [go-legacy-win7-1.25.0-2.linux_386.tar.gz](https://github.com/thongtech/go-legacy-win7/releases/download/v1.25.0-2/go-legacy-win7-1.25.0-2.linux_386.tar.gz) | `6fceeb092e1f30661bf50f20a7c671e893b3cd6de5aa566a4a4ef672fd12c5b0` |
|
||||
| Linux | x64 (amd64) | [go-legacy-win7-1.25.0-2.linux_amd64.tar.gz](https://github.com/thongtech/go-legacy-win7/releases/download/v1.25.0-2/go-legacy-win7-1.25.0-2.linux_amd64.tar.gz) | `21546d98267d4650096d9ec447480b516e0242bcec8de95a18b0f28ed53e393e` |
|
||||
| Linux | ARM (32‑bit) | [go-legacy-win7-1.25.0-2.linux_arm.tar.gz](https://github.com/thongtech/go-legacy-win7/releases/download/v1.25.0-2/go-legacy-win7-1.25.0-2.linux_arm.tar.gz) | `3b5465afa3cd011926b7168dd4abe5d75c55ef03ce165c7b88e07a79b3dca6cc` |
|
||||
| Linux | ARM64 | [go-legacy-win7-1.25.0-2.linux_arm64.tar.gz](https://github.com/thongtech/go-legacy-win7/releases/download/v1.25.0-2/go-legacy-win7-1.25.0-2.linux_arm64.tar.gz) | `b103720b046a88e826df741bc979daa3343c95d734f36b8d556897a5acda1c84` |
|
||||
| **Windows** | x86 (386) | [go-legacy-win7-1.25.0-2.windows_386.zip](https://github.com/thongtech/go-legacy-win7/releases/download/v1.25.0-2/go-legacy-win7-1.25.0-2.windows_386.zip) | `0f0a8f4e68ffbadb99ee60779efed148e629410208633ff59bfc2bf7de988aa8` |
|
||||
| Windows | x64 (amd64) | [go-legacy-win7-1.25.0-2.windows_amd64.zip](https://github.com/thongtech/go-legacy-win7/releases/download/v1.25.0-2/go-legacy-win7-1.25.0-2.windows_amd64.zip) | `ce54380d04fc395ed0eeaec49ebcad049d92d4d274d23d1d192cb7e897ad4be2` |
|
||||
| Windows | ARM64 | [go-legacy-win7-1.25.0-2.windows_arm64.zip](https://github.com/thongtech/go-legacy-win7/releases/download/v1.25.0-2/go-legacy-win7-1.25.0-2.windows_arm64.zip) | `35fdd7d381377d34e019d66795574379b4da8634a5ce3d226e0d8d2a5d9d101c` |
|
||||
| **macOS** | Intel (amd64) | [go-legacy-win7-1.25.1-1.darwin_amd64.tar.gz](https://github.com/thongtech/go-legacy-win7/releases/download/v1.25.1-1/go-legacy-win7-1.25.1-1.darwin_amd64.tar.gz) | `4bd9d9a3079c1b4d81b22d999697903fa9ac95eb18dee9376a88e6fa82474943` |
|
||||
| macOS | Apple (ARM64) | [go-legacy-win7-1.25.1-1.darwin_arm64.tar.gz](https://github.com/thongtech/go-legacy-win7/releases/download/v1.25.1-1/go-legacy-win7-1.25.1-1.darwin_arm64.tar.gz) | `d269eb12273f6a94df965a4bcd46b67aafe45467701b6e1ddf0df31d687b55bb` |
|
||||
| **Linux** | x86 (386) | [go-legacy-win7-1.25.1-1.linux_386.tar.gz](https://github.com/thongtech/go-legacy-win7/releases/download/v1.25.1-1/go-legacy-win7-1.25.1-1.linux_386.tar.gz) | `de0d649fff56dd0fcdd335c5a126ad5847cf0a227724cff44f86cb78d14ddc43` |
|
||||
| Linux | x64 (amd64) | [go-legacy-win7-1.25.1-1.linux_amd64.tar.gz](https://github.com/thongtech/go-legacy-win7/releases/download/v1.25.1-1/go-legacy-win7-1.25.1-1.linux_amd64.tar.gz) | `452aafba7800600da66dfeb36d3cf291f9c01381fdc6e24441aba7697c49d33a` |
|
||||
| Linux | ARM (32‑bit) | [go-legacy-win7-1.25.1-1.linux_arm.tar.gz](https://github.com/thongtech/go-legacy-win7/releases/download/v1.25.1-1/go-legacy-win7-1.25.1-1.linux_arm.tar.gz) | `eb80a1f41bd3e21c0f857500b2d30afcd17311f902ea3c04c13e345664a54ff6` |
|
||||
| Linux | ARM64 | [go-legacy-win7-1.25.1-1.linux_arm64.tar.gz](https://github.com/thongtech/go-legacy-win7/releases/download/v1.25.1-1/go-legacy-win7-1.25.1-1.linux_arm64.tar.gz) | `a05468e7b90fbeee70610dc4c1a439d7929a0ec06765d5cb0454cd900e4f90cb` |
|
||||
| **Windows** | x86 (386) | [go-legacy-win7-1.25.1-1.windows_386.zip](https://github.com/thongtech/go-legacy-win7/releases/download/v1.25.1-1/go-legacy-win7-1.25.1-1.windows_386.zip) | `c7c90918f8506e2fd28270a38a03e5200635e2183e4365090e7b00aca5a320be` |
|
||||
| Windows | x64 (amd64) | [go-legacy-win7-1.25.1-1.windows_amd64.zip](https://github.com/thongtech/go-legacy-win7/releases/download/v1.25.1-1/go-legacy-win7-1.25.1-1.windows_amd64.zip) | `aa6c2a22bb7c8e4ac632e3656ce6eb3461de7ea0645064496f161bc4edc47555` |
|
||||
| Windows | ARM64 | [go-legacy-win7-1.25.1-1.windows_arm64.zip](https://github.com/thongtech/go-legacy-win7/releases/download/v1.25.1-1/go-legacy-win7-1.25.1-1.windows_arm64.zip) | `fe498473f8f6cc7ad7f5d41a46acebc8f6ac1be7edaeb75f356eb604061b4276` |
|
||||
|
||||
### Before you begin
|
||||
To avoid PATH/GOROOT conflicts and mixed toolchains, uninstall any existing Go installation first.
|
||||
|
||||
8368
patches/0007-Drop-public-And-Or-ops-and-race-instrumentation.patch
Normal file
8368
patches/0007-Drop-public-And-Or-ops-and-race-instrumentation.patch
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,40 @@
|
||||
From c63f37858041ffd2bcf751f57632c90d630eddc2 Mon Sep 17 00:00:00 2001
|
||||
From: Vorapol Rinsatitnon <vorapol.r@pm.me>
|
||||
Date: Thu, 18 Sep 2025 12:01:09 +0800
|
||||
Subject: [PATCH] Replace atomic Or with compare-and-swap loop
|
||||
|
||||
---
|
||||
src/sync/waitgroup.go | 17 +++++++++++++----
|
||||
1 file changed, 13 insertions(+), 4 deletions(-)
|
||||
|
||||
diff --git a/src/sync/waitgroup.go b/src/sync/waitgroup.go
|
||||
index 5b035aa3..8aeae7bf 100644
|
||||
--- a/src/sync/waitgroup.go
|
||||
+++ b/src/sync/waitgroup.go
|
||||
@@ -94,10 +94,19 @@ func (wg *WaitGroup) Add(delta int) {
|
||||
fatal("sync: WaitGroup.Add called from multiple synctest bubbles")
|
||||
case synctest.CurrentBubble:
|
||||
bubbled = true
|
||||
- state := wg.state.Or(waitGroupBubbleFlag)
|
||||
- if state != 0 && state&waitGroupBubbleFlag == 0 {
|
||||
- // Add has been called from outside this bubble.
|
||||
- fatal("sync: WaitGroup.Add called from inside and outside synctest bubble")
|
||||
+ // Use compare-and-swap loop to implement atomic Or operation
|
||||
+ // since race detector doesn't have __tsan_go_atomic64_fetch_or
|
||||
+ for {
|
||||
+ old := wg.state.Load()
|
||||
+ new := old | waitGroupBubbleFlag
|
||||
+ if wg.state.CompareAndSwap(old, new) {
|
||||
+ state := old
|
||||
+ if state != 0 && state&waitGroupBubbleFlag == 0 {
|
||||
+ // Add has been called from outside this bubble.
|
||||
+ fatal("sync: WaitGroup.Add called from inside and outside synctest bubble")
|
||||
+ }
|
||||
+ break
|
||||
+ }
|
||||
}
|
||||
}
|
||||
}
|
||||
--
|
||||
2.47.2
|
||||
|
||||
@@ -406,10 +406,6 @@ var __tsan_report_count byte
|
||||
//go:cgo_import_static __tsan_go_atomic64_exchange
|
||||
//go:cgo_import_static __tsan_go_atomic32_fetch_add
|
||||
//go:cgo_import_static __tsan_go_atomic64_fetch_add
|
||||
//go:cgo_import_static __tsan_go_atomic32_fetch_and
|
||||
//go:cgo_import_static __tsan_go_atomic64_fetch_and
|
||||
//go:cgo_import_static __tsan_go_atomic32_fetch_or
|
||||
//go:cgo_import_static __tsan_go_atomic64_fetch_or
|
||||
//go:cgo_import_static __tsan_go_atomic32_compare_exchange
|
||||
//go:cgo_import_static __tsan_go_atomic64_compare_exchange
|
||||
|
||||
@@ -738,36 +734,6 @@ func abigen_sync_atomic_AddUint64(addr *uint64, delta uint64) (new uint64)
|
||||
//go:linkname abigen_sync_atomic_AddUintptr sync/atomic.AddUintptr
|
||||
func abigen_sync_atomic_AddUintptr(addr *uintptr, delta uintptr) (new uintptr)
|
||||
|
||||
//go:linkname abigen_sync_atomic_AndInt32 sync/atomic.AndInt32
|
||||
func abigen_sync_atomic_AndInt32(addr *int32, mask int32) (old int32)
|
||||
|
||||
//go:linkname abigen_sync_atomic_AndUint32 sync/atomic.AndUint32
|
||||
func abigen_sync_atomic_AndUint32(addr *uint32, mask uint32) (old uint32)
|
||||
|
||||
//go:linkname abigen_sync_atomic_AndInt64 sync/atomic.AndInt64
|
||||
func abigen_sync_atomic_AndInt64(addr *int64, mask int64) (old int64)
|
||||
|
||||
//go:linkname abigen_sync_atomic_AndUint64 sync/atomic.AndUint64
|
||||
func abigen_sync_atomic_AndUint64(addr *uint64, mask uint64) (old uint64)
|
||||
|
||||
//go:linkname abigen_sync_atomic_AndUintptr sync/atomic.AndUintptr
|
||||
func abigen_sync_atomic_AndUintptr(addr *uintptr, mask uintptr) (old uintptr)
|
||||
|
||||
//go:linkname abigen_sync_atomic_OrInt32 sync/atomic.OrInt32
|
||||
func abigen_sync_atomic_OrInt32(addr *int32, mask int32) (old int32)
|
||||
|
||||
//go:linkname abigen_sync_atomic_OrUint32 sync/atomic.OrUint32
|
||||
func abigen_sync_atomic_OrUint32(addr *uint32, mask uint32) (old uint32)
|
||||
|
||||
//go:linkname abigen_sync_atomic_OrInt64 sync/atomic.OrInt64
|
||||
func abigen_sync_atomic_OrInt64(addr *int64, mask int64) (old int64)
|
||||
|
||||
//go:linkname abigen_sync_atomic_OrUint64 sync/atomic.OrUint64
|
||||
func abigen_sync_atomic_OrUint64(addr *uint64, mask uint64) (old uint64)
|
||||
|
||||
//go:linkname abigen_sync_atomic_OrUintptr sync/atomic.OrUintptr
|
||||
func abigen_sync_atomic_OrUintptr(addr *uintptr, mask uintptr) (old uintptr)
|
||||
|
||||
//go:linkname abigen_sync_atomic_CompareAndSwapInt32 sync/atomic.CompareAndSwapInt32
|
||||
func abigen_sync_atomic_CompareAndSwapInt32(addr *int32, old, new int32) (swapped bool)
|
||||
|
||||
|
||||
Binary file not shown.
@@ -303,57 +303,6 @@ TEXT sync∕atomic·AddUintptr(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·AddInt64(SB)
|
||||
|
||||
// And
|
||||
TEXT sync∕atomic·AndInt32(SB), NOSPLIT|NOFRAME, $0-20
|
||||
GO_ARGS
|
||||
MOVQ $__tsan_go_atomic32_fetch_and(SB), AX
|
||||
CALL racecallatomic<>(SB)
|
||||
RET
|
||||
|
||||
TEXT sync∕atomic·AndInt64(SB), NOSPLIT|NOFRAME, $0-24
|
||||
GO_ARGS
|
||||
MOVQ $__tsan_go_atomic64_fetch_and(SB), AX
|
||||
CALL racecallatomic<>(SB)
|
||||
RET
|
||||
|
||||
TEXT sync∕atomic·AndUint32(SB), NOSPLIT, $0-20
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·AndInt32(SB)
|
||||
|
||||
TEXT sync∕atomic·AndUint64(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·AndInt64(SB)
|
||||
|
||||
TEXT sync∕atomic·AndUintptr(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·AndInt64(SB)
|
||||
|
||||
// Or
|
||||
TEXT sync∕atomic·OrInt32(SB), NOSPLIT|NOFRAME, $0-20
|
||||
GO_ARGS
|
||||
MOVQ $__tsan_go_atomic32_fetch_or(SB), AX
|
||||
CALL racecallatomic<>(SB)
|
||||
RET
|
||||
|
||||
TEXT sync∕atomic·OrInt64(SB), NOSPLIT|NOFRAME, $0-24
|
||||
GO_ARGS
|
||||
MOVQ $__tsan_go_atomic64_fetch_or(SB), AX
|
||||
CALL racecallatomic<>(SB)
|
||||
RET
|
||||
|
||||
TEXT sync∕atomic·OrUint32(SB), NOSPLIT, $0-20
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·OrInt32(SB)
|
||||
|
||||
TEXT sync∕atomic·OrUint64(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·OrInt64(SB)
|
||||
|
||||
TEXT sync∕atomic·OrUintptr(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·OrInt64(SB)
|
||||
|
||||
|
||||
// CompareAndSwap
|
||||
TEXT sync∕atomic·CompareAndSwapInt32(SB), NOSPLIT|NOFRAME, $0-17
|
||||
GO_ARGS
|
||||
|
||||
@@ -312,56 +312,6 @@ TEXT sync∕atomic·AddUintptr(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·AddInt64(SB)
|
||||
|
||||
// And
|
||||
TEXT sync∕atomic·AndInt32(SB), NOSPLIT, $0-20
|
||||
GO_ARGS
|
||||
MOVD $__tsan_go_atomic32_fetch_and(SB), R9
|
||||
BL racecallatomic<>(SB)
|
||||
RET
|
||||
|
||||
TEXT sync∕atomic·AndInt64(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
MOVD $__tsan_go_atomic64_fetch_and(SB), R9
|
||||
BL racecallatomic<>(SB)
|
||||
RET
|
||||
|
||||
TEXT sync∕atomic·AndUint32(SB), NOSPLIT, $0-20
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·AndInt32(SB)
|
||||
|
||||
TEXT sync∕atomic·AndUint64(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·AndInt64(SB)
|
||||
|
||||
TEXT sync∕atomic·AndUintptr(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·AndInt64(SB)
|
||||
|
||||
// Or
|
||||
TEXT sync∕atomic·OrInt32(SB), NOSPLIT, $0-20
|
||||
GO_ARGS
|
||||
MOVD $__tsan_go_atomic32_fetch_or(SB), R9
|
||||
BL racecallatomic<>(SB)
|
||||
RET
|
||||
|
||||
TEXT sync∕atomic·OrInt64(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
MOVD $__tsan_go_atomic64_fetch_or(SB), R9
|
||||
BL racecallatomic<>(SB)
|
||||
RET
|
||||
|
||||
TEXT sync∕atomic·OrUint32(SB), NOSPLIT, $0-20
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·OrInt32(SB)
|
||||
|
||||
TEXT sync∕atomic·OrUint64(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·OrInt64(SB)
|
||||
|
||||
TEXT sync∕atomic·OrUintptr(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·OrInt64(SB)
|
||||
|
||||
// CompareAndSwap
|
||||
TEXT sync∕atomic·CompareAndSwapInt32(SB), NOSPLIT, $0-17
|
||||
GO_ARGS
|
||||
|
||||
@@ -325,52 +325,6 @@ TEXT sync∕atomic·AddUintptr(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
BR sync∕atomic·AddInt64(SB)
|
||||
|
||||
// And
|
||||
TEXT sync∕atomic·AndInt32(SB), NOSPLIT, $0-20
|
||||
GO_ARGS
|
||||
MOVD $__tsan_go_atomic32_fetch_and(SB), R8
|
||||
BR racecallatomic<>(SB)
|
||||
|
||||
TEXT sync∕atomic·AndInt64(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
MOVD $__tsan_go_atomic64_fetch_and(SB), R8
|
||||
BR racecallatomic<>(SB)
|
||||
|
||||
TEXT sync∕atomic·AndUint32(SB), NOSPLIT, $0-20
|
||||
GO_ARGS
|
||||
BR sync∕atomic·AndInt32(SB)
|
||||
|
||||
TEXT sync∕atomic·AndUint64(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
BR sync∕atomic·AndInt64(SB)
|
||||
|
||||
TEXT sync∕atomic·AndUintptr(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
BR sync∕atomic·AndInt64(SB)
|
||||
|
||||
// Or
|
||||
TEXT sync∕atomic·OrInt32(SB), NOSPLIT, $0-20
|
||||
GO_ARGS
|
||||
MOVD $__tsan_go_atomic32_fetch_or(SB), R8
|
||||
BR racecallatomic<>(SB)
|
||||
|
||||
TEXT sync∕atomic·OrInt64(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
MOVD $__tsan_go_atomic64_fetch_or(SB), R8
|
||||
BR racecallatomic<>(SB)
|
||||
|
||||
TEXT sync∕atomic·OrUint32(SB), NOSPLIT, $0-20
|
||||
GO_ARGS
|
||||
BR sync∕atomic·OrInt32(SB)
|
||||
|
||||
TEXT sync∕atomic·OrUint64(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
BR sync∕atomic·OrInt64(SB)
|
||||
|
||||
TEXT sync∕atomic·OrUintptr(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
BR sync∕atomic·OrInt64(SB)
|
||||
|
||||
// CompareAndSwap in tsan
|
||||
TEXT sync∕atomic·CompareAndSwapInt32(SB), NOSPLIT, $0-17
|
||||
GO_ARGS
|
||||
|
||||
@@ -274,56 +274,6 @@ TEXT sync∕atomic·AddUintptr(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·AddInt64(SB)
|
||||
|
||||
// And
|
||||
TEXT sync∕atomic·AndInt32(SB), NOSPLIT, $0-20
|
||||
GO_ARGS
|
||||
MOVD $__tsan_go_atomic32_fetch_and(SB), R1
|
||||
BL racecallatomic<>(SB)
|
||||
RET
|
||||
|
||||
TEXT sync∕atomic·AndInt64(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
MOVD $__tsan_go_atomic64_fetch_and(SB), R1
|
||||
BL racecallatomic<>(SB)
|
||||
RET
|
||||
|
||||
TEXT sync∕atomic·AndUint32(SB), NOSPLIT, $0-20
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·AndInt32(SB)
|
||||
|
||||
TEXT sync∕atomic·AndUint64(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·AndInt64(SB)
|
||||
|
||||
TEXT sync∕atomic·AndUintptr(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·AndInt64(SB)
|
||||
|
||||
// Or
|
||||
TEXT sync∕atomic·OrInt32(SB), NOSPLIT, $0-20
|
||||
GO_ARGS
|
||||
MOVD $__tsan_go_atomic32_fetch_or(SB), R1
|
||||
BL racecallatomic<>(SB)
|
||||
RET
|
||||
|
||||
TEXT sync∕atomic·OrInt64(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
MOVD $__tsan_go_atomic64_fetch_or(SB), R1
|
||||
BL racecallatomic<>(SB)
|
||||
RET
|
||||
|
||||
TEXT sync∕atomic·OrUint32(SB), NOSPLIT, $0-20
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·OrInt32(SB)
|
||||
|
||||
TEXT sync∕atomic·OrUint64(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·OrInt64(SB)
|
||||
|
||||
TEXT sync∕atomic·OrUintptr(SB), NOSPLIT, $0-24
|
||||
GO_ARGS
|
||||
JMP sync∕atomic·OrInt64(SB)
|
||||
|
||||
// CompareAndSwap
|
||||
|
||||
TEXT sync∕atomic·CompareAndSwapInt32(SB), NOSPLIT, $0-17
|
||||
|
||||
@@ -83,33 +83,3 @@ TEXT ·StoreUint64(SB),NOSPLIT,$0
|
||||
|
||||
TEXT ·StoreUintptr(SB),NOSPLIT,$0
|
||||
JMP internal∕runtime∕atomic·Storeuintptr(SB)
|
||||
|
||||
TEXT ·AndInt32(SB),NOSPLIT,$0
|
||||
JMP internal∕runtime∕atomic·And32(SB)
|
||||
|
||||
TEXT ·AndUint32(SB),NOSPLIT,$0
|
||||
JMP internal∕runtime∕atomic·And32(SB)
|
||||
|
||||
TEXT ·AndUintptr(SB),NOSPLIT,$0
|
||||
JMP internal∕runtime∕atomic·Anduintptr(SB)
|
||||
|
||||
TEXT ·AndInt64(SB),NOSPLIT,$0
|
||||
JMP internal∕runtime∕atomic·And64(SB)
|
||||
|
||||
TEXT ·AndUint64(SB),NOSPLIT,$0
|
||||
JMP internal∕runtime∕atomic·And64(SB)
|
||||
|
||||
TEXT ·OrInt32(SB),NOSPLIT,$0
|
||||
JMP internal∕runtime∕atomic·Or32(SB)
|
||||
|
||||
TEXT ·OrUint32(SB),NOSPLIT,$0
|
||||
JMP internal∕runtime∕atomic·Or32(SB)
|
||||
|
||||
TEXT ·OrUintptr(SB),NOSPLIT,$0
|
||||
JMP internal∕runtime∕atomic·Oruintptr(SB)
|
||||
|
||||
TEXT ·OrInt64(SB),NOSPLIT,$0
|
||||
JMP internal∕runtime∕atomic·Or64(SB)
|
||||
|
||||
TEXT ·OrUint64(SB),NOSPLIT,$0
|
||||
JMP internal∕runtime∕atomic·Or64(SB)
|
||||
|
||||
@@ -531,472 +531,6 @@ func TestAddUintptrMethod(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestAndInt32(t *testing.T) {
|
||||
var x struct {
|
||||
before int32
|
||||
i int32
|
||||
after int32
|
||||
}
|
||||
x.before = magic32
|
||||
x.after = magic32
|
||||
x.i = -1
|
||||
j := x.i
|
||||
for mask := int32(1); mask != 0; mask <<= 1 {
|
||||
old := x.i
|
||||
k := AndInt32(&x.i, ^mask)
|
||||
j &= ^mask
|
||||
if x.i != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magic32 || x.after != magic32 {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAndInt32Method(t *testing.T) {
|
||||
var x struct {
|
||||
before int32
|
||||
i Int32
|
||||
after int32
|
||||
}
|
||||
x.before = magic32
|
||||
x.after = magic32
|
||||
x.i.Store(-1)
|
||||
j := x.i.Load()
|
||||
for mask := int32(1); mask != 0; mask <<= 1 {
|
||||
old := x.i.Load()
|
||||
k := x.i.And(^mask)
|
||||
j &= ^mask
|
||||
if x.i.Load() != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magic32 || x.after != magic32 {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAndUint32(t *testing.T) {
|
||||
var x struct {
|
||||
before uint32
|
||||
i uint32
|
||||
after uint32
|
||||
}
|
||||
x.before = magic32
|
||||
x.after = magic32
|
||||
x.i = 0xffffffff
|
||||
j := x.i
|
||||
for mask := uint32(1); mask != 0; mask <<= 1 {
|
||||
old := x.i
|
||||
k := AndUint32(&x.i, ^mask)
|
||||
j &= ^mask
|
||||
if x.i != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magic32 || x.after != magic32 {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAndUint32Method(t *testing.T) {
|
||||
var x struct {
|
||||
before uint32
|
||||
i Uint32
|
||||
after uint32
|
||||
}
|
||||
x.before = magic32
|
||||
x.after = magic32
|
||||
x.i.Store(0xffffffff)
|
||||
j := x.i.Load()
|
||||
for mask := uint32(1); mask != 0; mask <<= 1 {
|
||||
old := x.i.Load()
|
||||
k := x.i.And(^mask)
|
||||
j &= ^mask
|
||||
if x.i.Load() != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magic32 || x.after != magic32 {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAndInt64(t *testing.T) {
|
||||
var x struct {
|
||||
before int64
|
||||
i int64
|
||||
after int64
|
||||
}
|
||||
magic64 := int64(magic64)
|
||||
x.before = magic64
|
||||
x.after = magic64
|
||||
x.i = -1
|
||||
j := x.i
|
||||
for mask := int64(1); mask != 0; mask <<= 1 {
|
||||
old := x.i
|
||||
k := AndInt64(&x.i, ^mask)
|
||||
j &= ^mask
|
||||
if x.i != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magic64 || x.after != magic64 {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic64, magic64)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAndInt64Method(t *testing.T) {
|
||||
var x struct {
|
||||
before int64
|
||||
i Int64
|
||||
after int64
|
||||
}
|
||||
magic64 := int64(magic64)
|
||||
x.before = magic64
|
||||
x.after = magic64
|
||||
x.i.Store(-1)
|
||||
j := x.i.Load()
|
||||
for mask := int64(1); mask != 0; mask <<= 1 {
|
||||
old := x.i.Load()
|
||||
k := x.i.And(^mask)
|
||||
j &= ^mask
|
||||
if x.i.Load() != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magic64 || x.after != magic64 {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic64, magic64)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAndUint64(t *testing.T) {
|
||||
var x struct {
|
||||
before uint64
|
||||
i uint64
|
||||
after uint64
|
||||
}
|
||||
magic64 := uint64(magic64)
|
||||
x.before = magic64
|
||||
x.after = magic64
|
||||
x.i = 0xfffffffffffffff
|
||||
j := x.i
|
||||
for mask := uint64(1); mask != 0; mask <<= 1 {
|
||||
old := x.i
|
||||
k := AndUint64(&x.i, ^mask)
|
||||
j &= ^mask
|
||||
if x.i != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magic64 || x.after != magic64 {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic64, magic64)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAndUint64Method(t *testing.T) {
|
||||
var x struct {
|
||||
before uint64
|
||||
i Uint64
|
||||
after uint64
|
||||
}
|
||||
magic64 := uint64(magic64)
|
||||
x.before = magic64
|
||||
x.after = magic64
|
||||
x.i.Store(0xfffffffffffffff)
|
||||
j := x.i.Load()
|
||||
for mask := uint64(1); mask != 0; mask <<= 1 {
|
||||
old := x.i.Load()
|
||||
k := x.i.And(^mask)
|
||||
j &= ^mask
|
||||
if x.i.Load() != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magic64 || x.after != magic64 {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic64, magic64)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAndUintptr(t *testing.T) {
|
||||
var x struct {
|
||||
before uintptr
|
||||
i uintptr
|
||||
after uintptr
|
||||
}
|
||||
var m uint64 = magic64
|
||||
magicptr := uintptr(m)
|
||||
x.before = magicptr
|
||||
x.after = magicptr
|
||||
x.i = ^uintptr(0)
|
||||
j := x.i
|
||||
for mask := uintptr(1); mask != 0; mask <<= 1 {
|
||||
old := x.i
|
||||
k := AndUintptr(&x.i, ^mask)
|
||||
j &= ^mask
|
||||
if x.i != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magicptr || x.after != magicptr {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magicptr, magicptr)
|
||||
}
|
||||
}
|
||||
|
||||
func TestAndUintptrMethod(t *testing.T) {
|
||||
var x struct {
|
||||
before uintptr
|
||||
i Uintptr
|
||||
after uintptr
|
||||
}
|
||||
var m uint64 = magic64
|
||||
magicptr := uintptr(m)
|
||||
x.before = magicptr
|
||||
x.after = magicptr
|
||||
x.i.Store(^uintptr(0))
|
||||
j := x.i.Load()
|
||||
for mask := uintptr(1); mask != 0; mask <<= 1 {
|
||||
old := x.i.Load()
|
||||
k := x.i.And(^mask)
|
||||
j &= ^mask
|
||||
if x.i.Load() != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magicptr || x.after != magicptr {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magicptr, magicptr)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOrInt32(t *testing.T) {
|
||||
var x struct {
|
||||
before int32
|
||||
i int32
|
||||
after int32
|
||||
}
|
||||
x.before = magic32
|
||||
x.after = magic32
|
||||
var j int32
|
||||
for mask := int32(1); mask != 0; mask <<= 1 {
|
||||
old := x.i
|
||||
k := OrInt32(&x.i, mask)
|
||||
j |= mask
|
||||
if x.i != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magic32 || x.after != magic32 {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOrInt32Method(t *testing.T) {
|
||||
var x struct {
|
||||
before int32
|
||||
i Int32
|
||||
after int32
|
||||
}
|
||||
x.before = magic32
|
||||
x.after = magic32
|
||||
var j int32
|
||||
for mask := int32(1); mask != 0; mask <<= 1 {
|
||||
old := x.i.Load()
|
||||
k := x.i.Or(mask)
|
||||
j |= mask
|
||||
if x.i.Load() != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magic32 || x.after != magic32 {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOrUint32(t *testing.T) {
|
||||
var x struct {
|
||||
before uint32
|
||||
i uint32
|
||||
after uint32
|
||||
}
|
||||
x.before = magic32
|
||||
x.after = magic32
|
||||
var j uint32
|
||||
for mask := uint32(1); mask != 0; mask <<= 1 {
|
||||
old := x.i
|
||||
k := OrUint32(&x.i, mask)
|
||||
j |= mask
|
||||
if x.i != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magic32 || x.after != magic32 {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOrUint32Method(t *testing.T) {
|
||||
var x struct {
|
||||
before uint32
|
||||
i Uint32
|
||||
after uint32
|
||||
}
|
||||
x.before = magic32
|
||||
x.after = magic32
|
||||
var j uint32
|
||||
for mask := uint32(1); mask != 0; mask <<= 1 {
|
||||
old := x.i.Load()
|
||||
k := x.i.Or(mask)
|
||||
j |= mask
|
||||
if x.i.Load() != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magic32 || x.after != magic32 {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic32, magic32)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOrInt64(t *testing.T) {
|
||||
var x struct {
|
||||
before int64
|
||||
i int64
|
||||
after int64
|
||||
}
|
||||
magic64 := int64(magic64)
|
||||
x.before = magic64
|
||||
x.after = magic64
|
||||
var j int64
|
||||
for mask := int64(1); mask != 0; mask <<= 1 {
|
||||
old := x.i
|
||||
k := OrInt64(&x.i, mask)
|
||||
j |= mask
|
||||
if x.i != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magic64 || x.after != magic64 {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic64, magic64)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOrInt64Method(t *testing.T) {
|
||||
var x struct {
|
||||
before int64
|
||||
i Int64
|
||||
after int64
|
||||
}
|
||||
magic64 := int64(magic64)
|
||||
x.before = magic64
|
||||
x.after = magic64
|
||||
var j int64
|
||||
for mask := int64(1); mask != 0; mask <<= 1 {
|
||||
old := x.i.Load()
|
||||
k := x.i.Or(mask)
|
||||
j |= mask
|
||||
if x.i.Load() != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magic64 || x.after != magic64 {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic64, magic64)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOrUint64(t *testing.T) {
|
||||
var x struct {
|
||||
before uint64
|
||||
i uint64
|
||||
after uint64
|
||||
}
|
||||
magic64 := uint64(magic64)
|
||||
x.before = magic64
|
||||
x.after = magic64
|
||||
var j uint64
|
||||
for mask := uint64(1); mask != 0; mask <<= 1 {
|
||||
old := x.i
|
||||
k := OrUint64(&x.i, mask)
|
||||
j |= mask
|
||||
if x.i != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magic64 || x.after != magic64 {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic64, magic64)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOrUint64Method(t *testing.T) {
|
||||
var x struct {
|
||||
before uint64
|
||||
i Uint64
|
||||
after uint64
|
||||
}
|
||||
magic64 := uint64(magic64)
|
||||
x.before = magic64
|
||||
x.after = magic64
|
||||
var j uint64
|
||||
for mask := uint64(1); mask != 0; mask <<= 1 {
|
||||
old := x.i.Load()
|
||||
k := x.i.Or(mask)
|
||||
j |= mask
|
||||
if x.i.Load() != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magic64 || x.after != magic64 {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magic64, magic64)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOrUintptr(t *testing.T) {
|
||||
var x struct {
|
||||
before uintptr
|
||||
i uintptr
|
||||
after uintptr
|
||||
}
|
||||
var m uint64 = magic64
|
||||
magicptr := uintptr(m)
|
||||
x.before = magicptr
|
||||
x.after = magicptr
|
||||
var j uintptr
|
||||
for mask := uintptr(1); mask != 0; mask <<= 1 {
|
||||
old := x.i
|
||||
k := OrUintptr(&x.i, mask)
|
||||
j |= mask
|
||||
if x.i != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i, j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magicptr || x.after != magicptr {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magicptr, magicptr)
|
||||
}
|
||||
}
|
||||
|
||||
func TestOrUintptrMethod(t *testing.T) {
|
||||
var x struct {
|
||||
before uintptr
|
||||
i Uintptr
|
||||
after uintptr
|
||||
}
|
||||
var m uint64 = magic64
|
||||
magicptr := uintptr(m)
|
||||
x.before = magicptr
|
||||
x.after = magicptr
|
||||
var j uintptr
|
||||
for mask := uintptr(1); mask != 0; mask <<= 1 {
|
||||
old := x.i.Load()
|
||||
k := x.i.Or(mask)
|
||||
j |= mask
|
||||
if x.i.Load() != j || k != old {
|
||||
t.Fatalf("mask=%d i=%d j=%d k=%d old=%d", mask, x.i.Load(), j, k, old)
|
||||
}
|
||||
}
|
||||
if x.before != magicptr || x.after != magicptr {
|
||||
t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, magicptr, magicptr)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCompareAndSwapInt32(t *testing.T) {
|
||||
var x struct {
|
||||
before int32
|
||||
|
||||
@@ -128,48 +128,6 @@ func AddUint32(addr *uint32, delta uint32) (new uint32)
|
||||
//go:noescape
|
||||
func AddUintptr(addr *uintptr, delta uintptr) (new uintptr)
|
||||
|
||||
// AndInt32 atomically performs a bitwise AND operation on *addr using the bitmask provided as mask
|
||||
// and returns the old value.
|
||||
// Consider using the more ergonomic and less error-prone [Int32.And] instead.
|
||||
//
|
||||
//go:noescape
|
||||
func AndInt32(addr *int32, mask int32) (old int32)
|
||||
|
||||
// AndUint32 atomically performs a bitwise AND operation on *addr using the bitmask provided as mask
|
||||
// and returns the old value.
|
||||
// Consider using the more ergonomic and less error-prone [Uint32.And] instead.
|
||||
//
|
||||
//go:noescape
|
||||
func AndUint32(addr *uint32, mask uint32) (old uint32)
|
||||
|
||||
// AndUintptr atomically performs a bitwise AND operation on *addr using the bitmask provided as mask
|
||||
// and returns the old value.
|
||||
// Consider using the more ergonomic and less error-prone [Uintptr.And] instead.
|
||||
//
|
||||
//go:noescape
|
||||
func AndUintptr(addr *uintptr, mask uintptr) (old uintptr)
|
||||
|
||||
// OrInt32 atomically performs a bitwise OR operation on *addr using the bitmask provided as mask
|
||||
// and returns the old value.
|
||||
// Consider using the more ergonomic and less error-prone [Int32.Or] instead.
|
||||
//
|
||||
//go:noescape
|
||||
func OrInt32(addr *int32, mask int32) (old int32)
|
||||
|
||||
// OrUint32 atomically performs a bitwise OR operation on *addr using the bitmask provided as mask
|
||||
// and returns the old value.
|
||||
// Consider using the more ergonomic and less error-prone [Uint32.Or] instead.
|
||||
//
|
||||
//go:noescape
|
||||
func OrUint32(addr *uint32, mask uint32) (old uint32)
|
||||
|
||||
// OrUintptr atomically performs a bitwise OR operation on *addr using the bitmask provided as mask
|
||||
// and returns the old value.
|
||||
// Consider using the more ergonomic and less error-prone [Uintptr.Or] instead.
|
||||
//
|
||||
//go:noescape
|
||||
func OrUintptr(addr *uintptr, mask uintptr) (old uintptr)
|
||||
|
||||
// LoadInt32 atomically loads *addr.
|
||||
// Consider using the more ergonomic and less error-prone [Int32.Load] instead.
|
||||
//
|
||||
|
||||
@@ -93,14 +93,6 @@ func (x *Int32) CompareAndSwap(old, new int32) (swapped bool) {
|
||||
// Add atomically adds delta to x and returns the new value.
|
||||
func (x *Int32) Add(delta int32) (new int32) { return AddInt32(&x.v, delta) }
|
||||
|
||||
// And atomically performs a bitwise AND operation on x using the bitmask
|
||||
// provided as mask and returns the old value.
|
||||
func (x *Int32) And(mask int32) (old int32) { return AndInt32(&x.v, mask) }
|
||||
|
||||
// Or atomically performs a bitwise OR operation on x using the bitmask
|
||||
// provided as mask and returns the old value.
|
||||
func (x *Int32) Or(mask int32) (old int32) { return OrInt32(&x.v, mask) }
|
||||
|
||||
// An Int64 is an atomic int64. The zero value is zero.
|
||||
//
|
||||
// Int64 must not be copied after first use.
|
||||
@@ -127,14 +119,6 @@ func (x *Int64) CompareAndSwap(old, new int64) (swapped bool) {
|
||||
// Add atomically adds delta to x and returns the new value.
|
||||
func (x *Int64) Add(delta int64) (new int64) { return AddInt64(&x.v, delta) }
|
||||
|
||||
// And atomically performs a bitwise AND operation on x using the bitmask
|
||||
// provided as mask and returns the old value.
|
||||
func (x *Int64) And(mask int64) (old int64) { return AndInt64(&x.v, mask) }
|
||||
|
||||
// Or atomically performs a bitwise OR operation on x using the bitmask
|
||||
// provided as mask and returns the old value.
|
||||
func (x *Int64) Or(mask int64) (old int64) { return OrInt64(&x.v, mask) }
|
||||
|
||||
// A Uint32 is an atomic uint32. The zero value is zero.
|
||||
//
|
||||
// Uint32 must not be copied after first use.
|
||||
@@ -160,14 +144,6 @@ func (x *Uint32) CompareAndSwap(old, new uint32) (swapped bool) {
|
||||
// Add atomically adds delta to x and returns the new value.
|
||||
func (x *Uint32) Add(delta uint32) (new uint32) { return AddUint32(&x.v, delta) }
|
||||
|
||||
// And atomically performs a bitwise AND operation on x using the bitmask
|
||||
// provided as mask and returns the old value.
|
||||
func (x *Uint32) And(mask uint32) (old uint32) { return AndUint32(&x.v, mask) }
|
||||
|
||||
// Or atomically performs a bitwise OR operation on x using the bitmask
|
||||
// provided as mask and returns the old value.
|
||||
func (x *Uint32) Or(mask uint32) (old uint32) { return OrUint32(&x.v, mask) }
|
||||
|
||||
// A Uint64 is an atomic uint64. The zero value is zero.
|
||||
//
|
||||
// Uint64 must not be copied after first use.
|
||||
@@ -194,14 +170,6 @@ func (x *Uint64) CompareAndSwap(old, new uint64) (swapped bool) {
|
||||
// Add atomically adds delta to x and returns the new value.
|
||||
func (x *Uint64) Add(delta uint64) (new uint64) { return AddUint64(&x.v, delta) }
|
||||
|
||||
// And atomically performs a bitwise AND operation on x using the bitmask
|
||||
// provided as mask and returns the old value.
|
||||
func (x *Uint64) And(mask uint64) (old uint64) { return AndUint64(&x.v, mask) }
|
||||
|
||||
// Or atomically performs a bitwise OR operation on x using the bitmask
|
||||
// provided as mask and returns the old value.
|
||||
func (x *Uint64) Or(mask uint64) (old uint64) { return OrUint64(&x.v, mask) }
|
||||
|
||||
// A Uintptr is an atomic uintptr. The zero value is zero.
|
||||
//
|
||||
// Uintptr must not be copied after first use.
|
||||
@@ -227,14 +195,6 @@ func (x *Uintptr) CompareAndSwap(old, new uintptr) (swapped bool) {
|
||||
// Add atomically adds delta to x and returns the new value.
|
||||
func (x *Uintptr) Add(delta uintptr) (new uintptr) { return AddUintptr(&x.v, delta) }
|
||||
|
||||
// And atomically performs a bitwise AND operation on x using the bitmask
|
||||
// provided as mask and returns the old value.
|
||||
func (x *Uintptr) And(mask uintptr) (old uintptr) { return AndUintptr(&x.v, mask) }
|
||||
|
||||
// Or atomically performs a bitwise OR operation on x using the bitmask
|
||||
// provided as mask and returns the updated value after the OR operation.
|
||||
func (x *Uintptr) Or(mask uintptr) (old uintptr) { return OrUintptr(&x.v, mask) }
|
||||
|
||||
// noCopy may be added to structs which must not be copied
|
||||
// after the first use.
|
||||
//
|
||||
|
||||
@@ -94,11 +94,20 @@ func (wg *WaitGroup) Add(delta int) {
|
||||
fatal("sync: WaitGroup.Add called from multiple synctest bubbles")
|
||||
case synctest.CurrentBubble:
|
||||
bubbled = true
|
||||
state := wg.state.Or(waitGroupBubbleFlag)
|
||||
// Use compare-and-swap loop to implement atomic Or operation
|
||||
// since race detector doesn't have __tsan_go_atomic64_fetch_or
|
||||
for {
|
||||
old := wg.state.Load()
|
||||
new := old | waitGroupBubbleFlag
|
||||
if wg.state.CompareAndSwap(old, new) {
|
||||
state := old
|
||||
if state != 0 && state&waitGroupBubbleFlag == 0 {
|
||||
// Add has been called from outside this bubble.
|
||||
fatal("sync: WaitGroup.Add called from inside and outside synctest bubble")
|
||||
}
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
state := wg.state.Add(uint64(delta) << 32)
|
||||
|
||||
Reference in New Issue
Block a user