1// Copyright 2016 The Go Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style
3// license that can be found in the LICENSE file.
4
5//go:build mips || mipsle
6
7// Export some functions via linkname to assembly in sync/atomic.
8//
9//go:linkname Xadd64
10//go:linkname Xchg64
11//go:linkname Cas64
12//go:linkname Load64
13//go:linkname Store64
14//go:linkname Or64
15//go:linkname And64
16
17package atomic
18
19import (
20	"internal/cpu"
21	"unsafe"
22)
23
24// TODO implement lock striping
25var lock struct {
26	state uint32
27	pad   [cpu.CacheLinePadSize - 4]byte
28}
29
30//go:noescape
31func spinLock(state *uint32)
32
33//go:noescape
34func spinUnlock(state *uint32)
35
36//go:nosplit
37func lockAndCheck(addr *uint64) {
38	// ensure 8-byte alignment
39	if uintptr(unsafe.Pointer(addr))&7 != 0 {
40		panicUnaligned()
41	}
42	// force dereference before taking lock
43	_ = *addr
44
45	spinLock(&lock.state)
46}
47
48//go:nosplit
49func unlock() {
50	spinUnlock(&lock.state)
51}
52
53//go:nosplit
54func Xadd64(addr *uint64, delta int64) (new uint64) {
55	lockAndCheck(addr)
56
57	new = *addr + uint64(delta)
58	*addr = new
59
60	unlock()
61	return
62}
63
64//go:nosplit
65func Xchg64(addr *uint64, new uint64) (old uint64) {
66	lockAndCheck(addr)
67
68	old = *addr
69	*addr = new
70
71	unlock()
72	return
73}
74
75//go:nosplit
76func Cas64(addr *uint64, old, new uint64) (swapped bool) {
77	lockAndCheck(addr)
78
79	if (*addr) == old {
80		*addr = new
81		unlock()
82		return true
83	}
84
85	unlock()
86	return false
87}
88
89//go:nosplit
90func Load64(addr *uint64) (val uint64) {
91	lockAndCheck(addr)
92
93	val = *addr
94
95	unlock()
96	return
97}
98
99//go:nosplit
100func Store64(addr *uint64, val uint64) {
101	lockAndCheck(addr)
102
103	*addr = val
104
105	unlock()
106	return
107}
108
109//go:nosplit
110func Or64(addr *uint64, val uint64) (old uint64) {
111	for {
112		old = *addr
113		if Cas64(addr, old, old|val) {
114			return old
115		}
116	}
117}
118
119//go:nosplit
120func And64(addr *uint64, val uint64) (old uint64) {
121	for {
122		old = *addr
123		if Cas64(addr, old, old&val) {
124			return old
125		}
126	}
127}
128
129//go:noescape
130func Xadd(ptr *uint32, delta int32) uint32
131
132//go:noescape
133func Xadduintptr(ptr *uintptr, delta uintptr) uintptr
134
135//go:noescape
136func Xchg(ptr *uint32, new uint32) uint32
137
138//go:noescape
139func Xchguintptr(ptr *uintptr, new uintptr) uintptr
140
141//go:noescape
142func Load(ptr *uint32) uint32
143
144//go:noescape
145func Load8(ptr *uint8) uint8
146
147// NO go:noescape annotation; *ptr escapes if result escapes (#31525)
148func Loadp(ptr unsafe.Pointer) unsafe.Pointer
149
150//go:noescape
151func LoadAcq(ptr *uint32) uint32
152
153//go:noescape
154func LoadAcquintptr(ptr *uintptr) uintptr
155
156//go:noescape
157func And8(ptr *uint8, val uint8)
158
159//go:noescape
160func Or8(ptr *uint8, val uint8)
161
162//go:noescape
163func And(ptr *uint32, val uint32)
164
165//go:noescape
166func Or(ptr *uint32, val uint32)
167
168//go:noescape
169func And32(ptr *uint32, val uint32) uint32
170
171//go:noescape
172func Or32(ptr *uint32, val uint32) uint32
173
174//go:noescape
175func Anduintptr(ptr *uintptr, val uintptr) uintptr
176
177//go:noescape
178func Oruintptr(ptr *uintptr, val uintptr) uintptr
179
180//go:noescape
181func Store(ptr *uint32, val uint32)
182
183//go:noescape
184func Store8(ptr *uint8, val uint8)
185
186// NO go:noescape annotation; see atomic_pointer.go.
187func StorepNoWB(ptr unsafe.Pointer, val unsafe.Pointer)
188
189//go:noescape
190func StoreRel(ptr *uint32, val uint32)
191
192//go:noescape
193func StoreReluintptr(ptr *uintptr, val uintptr)
194
195//go:noescape
196func CasRel(addr *uint32, old, new uint32) bool
197