1// Copyright 2011 The Go Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5package race_test
6
7import (
8	"runtime"
9	"sync"
10	"sync/atomic"
11	"testing"
12	"unsafe"
13)
14
15func TestNoRaceAtomicAddInt64(t *testing.T) {
16	var x1, x2 int8
17	_ = x1 + x2
18	var s int64
19	ch := make(chan bool, 2)
20	go func() {
21		x1 = 1
22		if atomic.AddInt64(&s, 1) == 2 {
23			x2 = 1
24		}
25		ch <- true
26	}()
27	go func() {
28		x2 = 1
29		if atomic.AddInt64(&s, 1) == 2 {
30			x1 = 1
31		}
32		ch <- true
33	}()
34	<-ch
35	<-ch
36}
37
38func TestRaceAtomicAddInt64(t *testing.T) {
39	var x1, x2 int8
40	_ = x1 + x2
41	var s int64
42	ch := make(chan bool, 2)
43	go func() {
44		x1 = 1
45		if atomic.AddInt64(&s, 1) == 1 {
46			x2 = 1
47		}
48		ch <- true
49	}()
50	go func() {
51		x2 = 1
52		if atomic.AddInt64(&s, 1) == 1 {
53			x1 = 1
54		}
55		ch <- true
56	}()
57	<-ch
58	<-ch
59}
60
61func TestNoRaceAtomicAddInt32(t *testing.T) {
62	var x1, x2 int8
63	_ = x1 + x2
64	var s int32
65	ch := make(chan bool, 2)
66	go func() {
67		x1 = 1
68		if atomic.AddInt32(&s, 1) == 2 {
69			x2 = 1
70		}
71		ch <- true
72	}()
73	go func() {
74		x2 = 1
75		if atomic.AddInt32(&s, 1) == 2 {
76			x1 = 1
77		}
78		ch <- true
79	}()
80	<-ch
81	<-ch
82}
83
84func TestNoRaceAtomicLoadAddInt32(t *testing.T) {
85	var x int64
86	_ = x
87	var s int32
88	go func() {
89		x = 2
90		atomic.AddInt32(&s, 1)
91	}()
92	for atomic.LoadInt32(&s) != 1 {
93		runtime.Gosched()
94	}
95	x = 1
96}
97
98func TestNoRaceAtomicLoadStoreInt32(t *testing.T) {
99	var x int64
100	_ = x
101	var s int32
102	go func() {
103		x = 2
104		atomic.StoreInt32(&s, 1)
105	}()
106	for atomic.LoadInt32(&s) != 1 {
107		runtime.Gosched()
108	}
109	x = 1
110}
111
112func TestNoRaceAtomicStoreCASInt32(t *testing.T) {
113	var x int64
114	_ = x
115	var s int32
116	go func() {
117		x = 2
118		atomic.StoreInt32(&s, 1)
119	}()
120	for !atomic.CompareAndSwapInt32(&s, 1, 0) {
121		runtime.Gosched()
122	}
123	x = 1
124}
125
126func TestNoRaceAtomicCASLoadInt32(t *testing.T) {
127	var x int64
128	_ = x
129	var s int32
130	go func() {
131		x = 2
132		if !atomic.CompareAndSwapInt32(&s, 0, 1) {
133			panic("")
134		}
135	}()
136	for atomic.LoadInt32(&s) != 1 {
137		runtime.Gosched()
138	}
139	x = 1
140}
141
142func TestNoRaceAtomicCASCASInt32(t *testing.T) {
143	var x int64
144	_ = x
145	var s int32
146	go func() {
147		x = 2
148		if !atomic.CompareAndSwapInt32(&s, 0, 1) {
149			panic("")
150		}
151	}()
152	for !atomic.CompareAndSwapInt32(&s, 1, 0) {
153		runtime.Gosched()
154	}
155	x = 1
156}
157
158func TestNoRaceAtomicCASCASInt32_2(t *testing.T) {
159	var x1, x2 int8
160	_ = x1 + x2
161	var s int32
162	ch := make(chan bool, 2)
163	go func() {
164		x1 = 1
165		if !atomic.CompareAndSwapInt32(&s, 0, 1) {
166			x2 = 1
167		}
168		ch <- true
169	}()
170	go func() {
171		x2 = 1
172		if !atomic.CompareAndSwapInt32(&s, 0, 1) {
173			x1 = 1
174		}
175		ch <- true
176	}()
177	<-ch
178	<-ch
179}
180
181func TestNoRaceAtomicLoadInt64(t *testing.T) {
182	var x int32
183	_ = x
184	var s int64
185	go func() {
186		x = 2
187		atomic.AddInt64(&s, 1)
188	}()
189	for atomic.LoadInt64(&s) != 1 {
190		runtime.Gosched()
191	}
192	x = 1
193}
194
195func TestNoRaceAtomicCASCASUInt64(t *testing.T) {
196	var x int64
197	_ = x
198	var s uint64
199	go func() {
200		x = 2
201		if !atomic.CompareAndSwapUint64(&s, 0, 1) {
202			panic("")
203		}
204	}()
205	for !atomic.CompareAndSwapUint64(&s, 1, 0) {
206		runtime.Gosched()
207	}
208	x = 1
209}
210
211func TestNoRaceAtomicLoadStorePointer(t *testing.T) {
212	var x int64
213	_ = x
214	var s unsafe.Pointer
215	var y int = 2
216	var p unsafe.Pointer = unsafe.Pointer(&y)
217	go func() {
218		x = 2
219		atomic.StorePointer(&s, p)
220	}()
221	for atomic.LoadPointer(&s) != p {
222		runtime.Gosched()
223	}
224	x = 1
225}
226
227func TestNoRaceAtomicStoreCASUint64(t *testing.T) {
228	var x int64
229	_ = x
230	var s uint64
231	go func() {
232		x = 2
233		atomic.StoreUint64(&s, 1)
234	}()
235	for !atomic.CompareAndSwapUint64(&s, 1, 0) {
236		runtime.Gosched()
237	}
238	x = 1
239}
240
241func TestRaceAtomicStoreLoad(t *testing.T) {
242	c := make(chan bool)
243	var a uint64
244	go func() {
245		atomic.StoreUint64(&a, 1)
246		c <- true
247	}()
248	_ = a
249	<-c
250}
251
252func TestRaceAtomicLoadStore(t *testing.T) {
253	c := make(chan bool)
254	var a uint64
255	go func() {
256		_ = atomic.LoadUint64(&a)
257		c <- true
258	}()
259	a = 1
260	<-c
261}
262
263func TestRaceAtomicAddLoad(t *testing.T) {
264	c := make(chan bool)
265	var a uint64
266	go func() {
267		atomic.AddUint64(&a, 1)
268		c <- true
269	}()
270	_ = a
271	<-c
272}
273
274func TestRaceAtomicAddStore(t *testing.T) {
275	c := make(chan bool)
276	var a uint64
277	go func() {
278		atomic.AddUint64(&a, 1)
279		c <- true
280	}()
281	a = 42
282	<-c
283}
284
285// A nil pointer in an atomic operation should not deadlock
286// the rest of the program. Used to hang indefinitely.
287func TestNoRaceAtomicCrash(t *testing.T) {
288	var mutex sync.Mutex
289	var nilptr *int32
290	panics := 0
291	defer func() {
292		if x := recover(); x != nil {
293			mutex.Lock()
294			panics++
295			mutex.Unlock()
296		} else {
297			panic("no panic")
298		}
299	}()
300	atomic.AddInt32(nilptr, 1)
301}
302
303func TestNoRaceDeferAtomicStore(t *testing.T) {
304	// Test that when an atomic function is deferred directly, the
305	// GC scans it correctly. See issue 42599.
306	type foo struct {
307		bar int64
308	}
309
310	var doFork func(f *foo, depth int)
311	doFork = func(f *foo, depth int) {
312		atomic.StoreInt64(&f.bar, 1)
313		defer atomic.StoreInt64(&f.bar, 0)
314		if depth > 0 {
315			for i := 0; i < 2; i++ {
316				f2 := &foo{}
317				go doFork(f2, depth-1)
318			}
319		}
320		runtime.GC()
321	}
322
323	f := &foo{}
324	doFork(f, 11)
325}
326