1
2
3
4
5 package atomic_test
6
7 import (
8 "internal/goarch"
9 "internal/runtime/atomic"
10 "runtime"
11 "testing"
12 "unsafe"
13 )
14
15 func runParallel(N, iter int, f func()) {
16 defer runtime.GOMAXPROCS(runtime.GOMAXPROCS(int(N)))
17 done := make(chan bool)
18 for i := 0; i < N; i++ {
19 go func() {
20 for j := 0; j < iter; j++ {
21 f()
22 }
23 done <- true
24 }()
25 }
26 for i := 0; i < N; i++ {
27 <-done
28 }
29 }
30
31 func TestXadduintptr(t *testing.T) {
32 N := 20
33 iter := 100000
34 if testing.Short() {
35 N = 10
36 iter = 10000
37 }
38 inc := uintptr(100)
39 total := uintptr(0)
40 runParallel(N, iter, func() {
41 atomic.Xadduintptr(&total, inc)
42 })
43 if want := uintptr(N*iter) * inc; want != total {
44 t.Fatalf("xadduintpr error, want %d, got %d", want, total)
45 }
46 total = 0
47 runParallel(N, iter, func() {
48 atomic.Xadduintptr(&total, inc)
49 atomic.Xadduintptr(&total, uintptr(-int64(inc)))
50 })
51 if total != 0 {
52 t.Fatalf("xadduintpr total error, want %d, got %d", 0, total)
53 }
54 }
55
56
57
58 func TestXadduintptrOnUint64(t *testing.T) {
59 if goarch.BigEndian {
60
61
62
63
64 t.Skip("skip xadduintptr on big endian architecture")
65 }
66 const inc = 100
67 val := uint64(0)
68 atomic.Xadduintptr((*uintptr)(unsafe.Pointer(&val)), inc)
69 if inc != val {
70 t.Fatalf("xadduintptr should increase lower-order bits, want %d, got %d", inc, val)
71 }
72 }
73
74 func shouldPanic(t *testing.T, name string, f func()) {
75 defer func() {
76
77 runtime.GC()
78
79 err := recover()
80 want := "unaligned 64-bit atomic operation"
81 if err == nil {
82 t.Errorf("%s did not panic", name)
83 } else if s, _ := err.(string); s != want {
84 t.Errorf("%s: wanted panic %q, got %q", name, want, err)
85 }
86 }()
87 f()
88 }
89
90
91 func TestUnaligned64(t *testing.T) {
92
93
94
95
96 if unsafe.Sizeof(int(0)) != 4 {
97 t.Skip("test only runs on 32-bit systems")
98 }
99
100 x := make([]uint32, 4)
101 u := unsafe.Pointer(uintptr(unsafe.Pointer(&x[0])) | 4)
102
103 up64 := (*uint64)(u)
104 p64 := (*int64)(u)
105
106 shouldPanic(t, "Load64", func() { atomic.Load64(up64) })
107 shouldPanic(t, "Loadint64", func() { atomic.Loadint64(p64) })
108 shouldPanic(t, "Store64", func() { atomic.Store64(up64, 0) })
109 shouldPanic(t, "Xadd64", func() { atomic.Xadd64(up64, 1) })
110 shouldPanic(t, "Xchg64", func() { atomic.Xchg64(up64, 1) })
111 shouldPanic(t, "Cas64", func() { atomic.Cas64(up64, 1, 2) })
112 }
113
114 func TestAnd8(t *testing.T) {
115
116 x := uint8(0xff)
117 for i := uint8(0); i < 8; i++ {
118 atomic.And8(&x, ^(1 << i))
119 if r := uint8(0xff) << (i + 1); x != r {
120 t.Fatalf("clearing bit %#x: want %#x, got %#x", uint8(1<<i), r, x)
121 }
122 }
123
124
125 a := make([]uint8, 1<<12)
126 for i := range a {
127 a[i] = 0xff
128 }
129
130
131 done := make(chan bool)
132 for i := 0; i < 8; i++ {
133 m := ^uint8(1 << i)
134 go func() {
135 for i := range a {
136 atomic.And8(&a[i], m)
137 }
138 done <- true
139 }()
140 }
141 for i := 0; i < 8; i++ {
142 <-done
143 }
144
145
146 for i, v := range a {
147 if v != 0 {
148 t.Fatalf("a[%v] not cleared: want %#x, got %#x", i, uint8(0), v)
149 }
150 }
151 }
152
153 func TestAnd(t *testing.T) {
154
155 x := uint32(0xffffffff)
156 for i := uint32(0); i < 32; i++ {
157 atomic.And(&x, ^(1 << i))
158 if r := uint32(0xffffffff) << (i + 1); x != r {
159 t.Fatalf("clearing bit %#x: want %#x, got %#x", uint32(1<<i), r, x)
160 }
161 }
162
163
164 a := make([]uint32, 1<<12)
165 for i := range a {
166 a[i] = 0xffffffff
167 }
168
169
170 done := make(chan bool)
171 for i := 0; i < 32; i++ {
172 m := ^uint32(1 << i)
173 go func() {
174 for i := range a {
175 atomic.And(&a[i], m)
176 }
177 done <- true
178 }()
179 }
180 for i := 0; i < 32; i++ {
181 <-done
182 }
183
184
185 for i, v := range a {
186 if v != 0 {
187 t.Fatalf("a[%v] not cleared: want %#x, got %#x", i, uint32(0), v)
188 }
189 }
190 }
191
192 func TestOr8(t *testing.T) {
193
194 x := uint8(0)
195 for i := uint8(0); i < 8; i++ {
196 atomic.Or8(&x, 1<<i)
197 if r := (uint8(1) << (i + 1)) - 1; x != r {
198 t.Fatalf("setting bit %#x: want %#x, got %#x", uint8(1)<<i, r, x)
199 }
200 }
201
202
203 a := make([]uint8, 1<<12)
204
205
206 done := make(chan bool)
207 for i := 0; i < 8; i++ {
208 m := uint8(1 << i)
209 go func() {
210 for i := range a {
211 atomic.Or8(&a[i], m)
212 }
213 done <- true
214 }()
215 }
216 for i := 0; i < 8; i++ {
217 <-done
218 }
219
220
221 for i, v := range a {
222 if v != 0xff {
223 t.Fatalf("a[%v] not fully set: want %#x, got %#x", i, uint8(0xff), v)
224 }
225 }
226 }
227
228 func TestOr(t *testing.T) {
229
230 x := uint32(0)
231 for i := uint32(0); i < 32; i++ {
232 atomic.Or(&x, 1<<i)
233 if r := (uint32(1) << (i + 1)) - 1; x != r {
234 t.Fatalf("setting bit %#x: want %#x, got %#x", uint32(1)<<i, r, x)
235 }
236 }
237
238
239 a := make([]uint32, 1<<12)
240
241
242 done := make(chan bool)
243 for i := 0; i < 32; i++ {
244 m := uint32(1 << i)
245 go func() {
246 for i := range a {
247 atomic.Or(&a[i], m)
248 }
249 done <- true
250 }()
251 }
252 for i := 0; i < 32; i++ {
253 <-done
254 }
255
256
257 for i, v := range a {
258 if v != 0xffffffff {
259 t.Fatalf("a[%v] not fully set: want %#x, got %#x", i, uint32(0xffffffff), v)
260 }
261 }
262 }
263
264 func TestBitwiseContended8(t *testing.T) {
265
266 a := make([]uint8, 16)
267
268
269 N := 1 << 16
270 if testing.Short() {
271 N = 1 << 10
272 }
273
274
275 done := make(chan bool)
276 for i := 0; i < 8; i++ {
277 m := uint8(1 << i)
278 go func() {
279 for n := 0; n < N; n++ {
280 for i := range a {
281 atomic.Or8(&a[i], m)
282 if atomic.Load8(&a[i])&m != m {
283 t.Errorf("a[%v] bit %#x not set", i, m)
284 }
285 atomic.And8(&a[i], ^m)
286 if atomic.Load8(&a[i])&m != 0 {
287 t.Errorf("a[%v] bit %#x not clear", i, m)
288 }
289 }
290 }
291 done <- true
292 }()
293 }
294 for i := 0; i < 8; i++ {
295 <-done
296 }
297
298
299 for i, v := range a {
300 if v != 0 {
301 t.Fatalf("a[%v] not cleared: want %#x, got %#x", i, uint8(0), v)
302 }
303 }
304 }
305
306 func TestBitwiseContended(t *testing.T) {
307
308 a := make([]uint32, 16)
309
310
311 N := 1 << 16
312 if testing.Short() {
313 N = 1 << 10
314 }
315
316
317 done := make(chan bool)
318 for i := 0; i < 32; i++ {
319 m := uint32(1 << i)
320 go func() {
321 for n := 0; n < N; n++ {
322 for i := range a {
323 atomic.Or(&a[i], m)
324 if atomic.Load(&a[i])&m != m {
325 t.Errorf("a[%v] bit %#x not set", i, m)
326 }
327 atomic.And(&a[i], ^m)
328 if atomic.Load(&a[i])&m != 0 {
329 t.Errorf("a[%v] bit %#x not clear", i, m)
330 }
331 }
332 }
333 done <- true
334 }()
335 }
336 for i := 0; i < 32; i++ {
337 <-done
338 }
339
340
341 for i, v := range a {
342 if v != 0 {
343 t.Fatalf("a[%v] not cleared: want %#x, got %#x", i, uint32(0), v)
344 }
345 }
346 }
347
348 func TestCasRel(t *testing.T) {
349 const _magic = 0x5a5aa5a5
350 var x struct {
351 before uint32
352 i uint32
353 after uint32
354 o uint32
355 n uint32
356 }
357
358 x.before = _magic
359 x.after = _magic
360 for j := 0; j < 32; j += 1 {
361 x.i = (1 << j) + 0
362 x.o = (1 << j) + 0
363 x.n = (1 << j) + 1
364 if !atomic.CasRel(&x.i, x.o, x.n) {
365 t.Fatalf("should have swapped %#x %#x", x.o, x.n)
366 }
367
368 if x.i != x.n {
369 t.Fatalf("wrong x.i after swap: x.i=%#x x.n=%#x", x.i, x.n)
370 }
371
372 if x.before != _magic || x.after != _magic {
373 t.Fatalf("wrong magic: %#x _ %#x != %#x _ %#x", x.before, x.after, _magic, _magic)
374 }
375 }
376 }
377
378 func TestStorepNoWB(t *testing.T) {
379 var p [2]*int
380 for i := range p {
381 atomic.StorepNoWB(unsafe.Pointer(&p[i]), unsafe.Pointer(new(int)))
382 }
383 if p[0] == p[1] {
384 t.Error("Bad escape analysis of StorepNoWB")
385 }
386 }
387
View as plain text