atomic_init_testset.c revision 1.19 1 1.19 thorpej /* $NetBSD: atomic_init_testset.c,v 1.19 2024/01/21 03:42:08 thorpej Exp $ */
2 1.1 ad
3 1.1 ad /*-
4 1.1 ad * Copyright (c) 2008 The NetBSD Foundation, Inc.
5 1.1 ad * All rights reserved.
6 1.1 ad *
7 1.1 ad * Redistribution and use in source and binary forms, with or without
8 1.1 ad * modification, are permitted provided that the following conditions
9 1.1 ad * are met:
10 1.1 ad * 1. Redistributions of source code must retain the above copyright
11 1.1 ad * notice, this list of conditions and the following disclaimer.
12 1.1 ad * 2. Redistributions in binary form must reproduce the above copyright
13 1.1 ad * notice, this list of conditions and the following disclaimer in the
14 1.1 ad * documentation and/or other materials provided with the distribution.
15 1.1 ad *
16 1.1 ad * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
17 1.1 ad * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
18 1.1 ad * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19 1.1 ad * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
20 1.1 ad * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 1.1 ad * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 1.1 ad * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23 1.1 ad * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24 1.1 ad * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25 1.1 ad * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
26 1.1 ad * POSSIBILITY OF SUCH DAMAGE.
27 1.1 ad */
28 1.1 ad
29 1.1 ad /*
30 1.1 ad * libc glue for atomic operations where the hardware does not provide
31 1.1 ad * compare-and-swap. It's assumed that this will only be used on 32-bit
32 1.1 ad * platforms.
33 1.1 ad *
34 1.1 ad * This should be compiled with '-fno-reorder-blocks -fomit-frame-pointer'
35 1.1 ad * if using gcc.
36 1.1 ad */
37 1.1 ad
38 1.1 ad #include <sys/cdefs.h>
39 1.19 thorpej __RCSID("$NetBSD: atomic_init_testset.c,v 1.19 2024/01/21 03:42:08 thorpej Exp $");
40 1.1 ad
41 1.19 thorpej #include "extern.h"
42 1.1 ad #include "atomic_op_namespace.h"
43 1.1 ad
44 1.1 ad #include <sys/types.h>
45 1.1 ad #include <sys/atomic.h>
46 1.1 ad #include <sys/lock.h>
47 1.1 ad #include <sys/ras.h>
48 1.1 ad #include <sys/sysctl.h>
49 1.1 ad
50 1.1 ad #include <string.h>
51 1.1 ad
52 1.1 ad #define I2 __SIMPLELOCK_UNLOCKED, __SIMPLELOCK_UNLOCKED,
53 1.1 ad #define I16 I2 I2 I2 I2 I2 I2 I2 I2
54 1.1 ad #define I128 I16 I16 I16 I16 I16 I16 I16 I16
55 1.1 ad
56 1.1 ad static __cpu_simple_lock_t atomic_locks[128] = { I128 };
57 1.14 martin /*
58 1.14 martin * Pick a lock out of above array depending on the object address
59 1.14 martin * passed. Most variables used atomically will not be in the same
60 1.14 martin * cacheline - and if they are, using the same lock is fine.
61 1.14 martin */
62 1.14 martin #define HASH(PTR) (((uintptr_t)(PTR) >> 3) & 127)
63 1.6 skrll
64 1.6 skrll #ifdef __HAVE_ASM_ATOMIC_CAS_UP
65 1.6 skrll extern uint32_t _atomic_cas_up(volatile uint32_t *, uint32_t, uint32_t);
66 1.6 skrll #else
67 1.6 skrll static uint32_t _atomic_cas_up(volatile uint32_t *, uint32_t, uint32_t);
68 1.6 skrll #endif
69 1.6 skrll static uint32_t (*_atomic_cas_fn)(volatile uint32_t *, uint32_t, uint32_t) =
70 1.6 skrll _atomic_cas_up;
71 1.10 matt RAS_DECL(_atomic_cas);
72 1.10 matt
73 1.15 matt #ifdef __HAVE_ATOMIC_CAS_64_UP
74 1.15 matt #ifdef __HAVE_ASM_ATOMIC_CAS_64_UP
75 1.15 matt extern uint64_t _atomic_cas_64_up(volatile uint64_t *, uint64_t, uint64_t);
76 1.15 matt #else
77 1.15 matt static uint64_t _atomic_cas_64_up(volatile uint64_t *, uint64_t, uint64_t);
78 1.15 matt #endif
79 1.15 matt static uint64_t (*_atomic_cas_64_fn)(volatile uint64_t *, uint64_t, uint64_t) =
80 1.15 matt _atomic_cas_64_up;
81 1.15 matt RAS_DECL(_atomic_cas_64);
82 1.15 matt #endif
83 1.15 matt
84 1.10 matt #ifdef __HAVE_ASM_ATOMIC_CAS_16_UP
85 1.10 matt extern uint16_t _atomic_cas_16_up(volatile uint16_t *, uint16_t, uint16_t);
86 1.10 matt #else
87 1.10 matt static uint16_t _atomic_cas_16_up(volatile uint16_t *, uint16_t, uint16_t);
88 1.10 matt #endif
89 1.10 matt static uint16_t (*_atomic_cas_16_fn)(volatile uint16_t *, uint16_t, uint16_t) =
90 1.10 matt _atomic_cas_16_up;
91 1.10 matt RAS_DECL(_atomic_cas_16);
92 1.10 matt
93 1.10 matt #ifdef __HAVE_ASM_ATOMIC_CAS_8_UP
94 1.10 matt extern uint8_t _atomic_cas_8_up(volatile uint8_t *, uint8_t, uint8_t);
95 1.10 matt #else
96 1.10 matt static uint8_t _atomic_cas_8_up(volatile uint8_t *, uint8_t, uint8_t);
97 1.10 matt #endif
98 1.10 matt static uint8_t (*_atomic_cas_8_fn)(volatile uint8_t *, uint8_t, uint8_t) =
99 1.10 matt _atomic_cas_8_up;
100 1.10 matt RAS_DECL(_atomic_cas_8);
101 1.1 ad
102 1.6 skrll #ifndef __HAVE_ASM_ATOMIC_CAS_UP
103 1.1 ad static uint32_t
104 1.1 ad _atomic_cas_up(volatile uint32_t *ptr, uint32_t old, uint32_t new)
105 1.1 ad {
106 1.1 ad uint32_t ret;
107 1.1 ad
108 1.1 ad RAS_START(_atomic_cas);
109 1.1 ad ret = *ptr;
110 1.3 ad if (__predict_false(ret != old)) {
111 1.1 ad return ret;
112 1.1 ad }
113 1.1 ad *ptr = new;
114 1.1 ad RAS_END(_atomic_cas);
115 1.1 ad
116 1.1 ad return ret;
117 1.1 ad }
118 1.5 scw #endif
119 1.1 ad
120 1.15 matt #if defined(__HAVE_ATOMIC_CAS_64_UP) && !defined(__HAVE_ASM_ATOMIC_CAS_64_UP)
121 1.15 matt static uint64_t
122 1.15 matt _atomic_cas_64_up(volatile uint64_t *ptr, uint64_t old, uint64_t new)
123 1.15 matt {
124 1.15 matt uint64_t ret;
125 1.15 matt
126 1.15 matt RAS_START(_atomic_cas_64);
127 1.15 matt ret = *ptr;
128 1.15 matt if (__predict_false(ret != old)) {
129 1.15 matt return ret;
130 1.15 matt }
131 1.15 matt *ptr = new;
132 1.15 matt RAS_END(_atomic_cas_64);
133 1.15 matt
134 1.15 matt return ret;
135 1.15 matt }
136 1.15 matt #endif
137 1.15 matt
138 1.10 matt #ifndef __HAVE_ASM_ATOMIC_CAS_16_UP
139 1.10 matt static uint16_t
140 1.12 martin _atomic_cas_16_up(volatile uint16_t *ptr, uint16_t old, uint16_t new)
141 1.10 matt {
142 1.10 matt uint16_t ret;
143 1.10 matt
144 1.10 matt RAS_START(_atomic_cas_16);
145 1.10 matt ret = *ptr;
146 1.10 matt if (__predict_false(ret != old)) {
147 1.10 matt return ret;
148 1.10 matt }
149 1.10 matt *ptr = new;
150 1.10 matt RAS_END(_atomic_cas_16);
151 1.10 matt
152 1.10 matt return ret;
153 1.10 matt }
154 1.10 matt #endif
155 1.10 matt
156 1.11 martin #ifndef __HAVE_ASM_ATOMIC_CAS_8_UP
157 1.10 matt static uint8_t
158 1.11 martin _atomic_cas_8_up(volatile uint8_t *ptr, uint8_t old, uint8_t new)
159 1.10 matt {
160 1.10 matt uint8_t ret;
161 1.10 matt
162 1.10 matt RAS_START(_atomic_cas_8);
163 1.10 matt ret = *ptr;
164 1.10 matt if (__predict_false(ret != old)) {
165 1.10 matt return ret;
166 1.10 matt }
167 1.10 matt *ptr = new;
168 1.12 martin RAS_END(_atomic_cas_8);
169 1.10 matt
170 1.10 matt return ret;
171 1.10 matt }
172 1.10 matt #endif
173 1.10 matt
174 1.1 ad static uint32_t
175 1.1 ad _atomic_cas_mp(volatile uint32_t *ptr, uint32_t old, uint32_t new)
176 1.1 ad {
177 1.1 ad __cpu_simple_lock_t *lock;
178 1.1 ad uint32_t ret;
179 1.1 ad
180 1.14 martin lock = &atomic_locks[HASH(ptr)];
181 1.14 martin __cpu_simple_lock(lock);
182 1.14 martin ret = *ptr;
183 1.14 martin if (__predict_true(ret == old)) {
184 1.14 martin *ptr = new;
185 1.14 martin }
186 1.14 martin __cpu_simple_unlock(lock);
187 1.14 martin
188 1.14 martin return ret;
189 1.14 martin }
190 1.14 martin
191 1.15 matt #ifdef __HAVE_ATOMIC_CAS_64_UP
192 1.15 matt static uint64_t
193 1.15 matt _atomic_cas_64_mp(volatile uint64_t *ptr, uint64_t old, uint64_t new)
194 1.15 matt {
195 1.15 matt __cpu_simple_lock_t *lock;
196 1.15 matt uint64_t ret;
197 1.15 matt
198 1.15 matt lock = &atomic_locks[HASH(ptr)];
199 1.15 matt __cpu_simple_lock(lock);
200 1.15 matt ret = *ptr;
201 1.15 matt if (__predict_true(ret == old)) {
202 1.15 matt *ptr = new;
203 1.15 matt }
204 1.15 matt __cpu_simple_unlock(lock);
205 1.15 matt
206 1.15 matt return ret;
207 1.15 matt }
208 1.15 matt #endif
209 1.15 matt
210 1.14 martin static uint16_t
211 1.14 martin _atomic_cas_16_mp(volatile uint16_t *ptr, uint16_t old, uint16_t new)
212 1.14 martin {
213 1.14 martin __cpu_simple_lock_t *lock;
214 1.14 martin uint16_t ret;
215 1.14 martin
216 1.14 martin lock = &atomic_locks[HASH(ptr)];
217 1.14 martin __cpu_simple_lock(lock);
218 1.14 martin ret = *ptr;
219 1.14 martin if (__predict_true(ret == old)) {
220 1.14 martin *ptr = new;
221 1.14 martin }
222 1.14 martin __cpu_simple_unlock(lock);
223 1.14 martin
224 1.14 martin return ret;
225 1.14 martin }
226 1.14 martin
227 1.14 martin static uint8_t
228 1.14 martin _atomic_cas_8_mp(volatile uint8_t *ptr, uint8_t old, uint8_t new)
229 1.14 martin {
230 1.14 martin __cpu_simple_lock_t *lock;
231 1.14 martin uint8_t ret;
232 1.14 martin
233 1.14 martin lock = &atomic_locks[HASH(ptr)];
234 1.1 ad __cpu_simple_lock(lock);
235 1.1 ad ret = *ptr;
236 1.1 ad if (__predict_true(ret == old)) {
237 1.1 ad *ptr = new;
238 1.1 ad }
239 1.1 ad __cpu_simple_unlock(lock);
240 1.1 ad
241 1.1 ad return ret;
242 1.1 ad }
243 1.1 ad
244 1.1 ad uint32_t
245 1.1 ad _atomic_cas_32(volatile uint32_t *ptr, uint32_t old, uint32_t new)
246 1.1 ad {
247 1.1 ad
248 1.1 ad return (*_atomic_cas_fn)(ptr, old, new);
249 1.1 ad }
250 1.1 ad
251 1.15 matt #ifdef __HAVE_ATOMIC_CAS_64_UP
252 1.15 matt uint64_t _atomic_cas_64(volatile uint64_t *, uint64_t, uint64_t);
253 1.15 matt
254 1.15 matt uint64_t
255 1.15 matt _atomic_cas_64(volatile uint64_t *ptr, uint64_t old, uint64_t new)
256 1.15 matt {
257 1.15 matt
258 1.15 matt return (*_atomic_cas_64_fn)(ptr, old, new);
259 1.15 matt }
260 1.15 matt #endif
261 1.10 matt
262 1.10 matt uint16_t
263 1.10 matt _atomic_cas_16(volatile uint16_t *ptr, uint16_t old, uint16_t new)
264 1.10 matt {
265 1.10 matt
266 1.10 matt return (*_atomic_cas_16_fn)(ptr, old, new);
267 1.10 matt }
268 1.10 matt
269 1.10 matt uint8_t _atomic_cas_8(volatile uint8_t *, uint8_t, uint8_t);
270 1.10 matt
271 1.10 matt uint8_t
272 1.10 matt _atomic_cas_8(volatile uint8_t *ptr, uint8_t old, uint8_t new)
273 1.10 matt {
274 1.10 matt
275 1.10 matt return (*_atomic_cas_8_fn)(ptr, old, new);
276 1.10 matt }
277 1.10 matt
278 1.18 christos void __section(".text.startup") __attribute__ ((__visibility__("hidden")))
279 1.2 ad __libc_atomic_init(void)
280 1.1 ad {
281 1.1 ad int ncpu, mib[2];
282 1.1 ad size_t len;
283 1.1 ad
284 1.1 ad _atomic_cas_fn = _atomic_cas_mp;
285 1.15 matt #ifdef __HAVE_ATOMIC_CAS_64_UP
286 1.15 matt _atomic_cas_64_fn = _atomic_cas_64_mp;
287 1.15 matt #endif
288 1.14 martin _atomic_cas_16_fn = _atomic_cas_16_mp;
289 1.14 martin _atomic_cas_8_fn = _atomic_cas_8_mp;
290 1.1 ad
291 1.1 ad mib[0] = CTL_HW;
292 1.1 ad mib[1] = HW_NCPU;
293 1.1 ad len = sizeof(ncpu);
294 1.1 ad if (sysctl(mib, 2, &ncpu, &len, NULL, 0) == -1)
295 1.1 ad return;
296 1.1 ad if (ncpu > 1)
297 1.1 ad return;
298 1.17 martin
299 1.1 ad if (rasctl(RAS_ADDR(_atomic_cas), RAS_SIZE(_atomic_cas),
300 1.1 ad RAS_INSTALL) == 0) {
301 1.1 ad _atomic_cas_fn = _atomic_cas_up;
302 1.1 ad }
303 1.10 matt
304 1.17 martin
305 1.15 matt #ifdef __HAVE_ATOMIC_CAS_64_UP
306 1.15 matt if (rasctl(RAS_ADDR(_atomic_cas_64), RAS_SIZE(_atomic_cas_64),
307 1.15 matt RAS_INSTALL) == 0) {
308 1.15 matt _atomic_cas_64_fn = _atomic_cas_64_up;
309 1.15 matt }
310 1.15 matt #endif
311 1.15 matt
312 1.10 matt if (rasctl(RAS_ADDR(_atomic_cas_16), RAS_SIZE(_atomic_cas_16),
313 1.10 matt RAS_INSTALL) == 0) {
314 1.10 matt _atomic_cas_16_fn = _atomic_cas_16_up;
315 1.10 matt }
316 1.10 matt
317 1.10 matt if (rasctl(RAS_ADDR(_atomic_cas_8), RAS_SIZE(_atomic_cas_8),
318 1.10 matt RAS_INSTALL) == 0) {
319 1.10 matt _atomic_cas_8_fn = _atomic_cas_8_up;
320 1.10 matt }
321 1.1 ad }
322 1.1 ad
323 1.1 ad #undef atomic_cas_32
324 1.2 ad #undef atomic_cas_uint
325 1.2 ad #undef atomic_cas_ulong
326 1.2 ad #undef atomic_cas_ptr
327 1.8 joerg #undef atomic_cas_32_ni
328 1.8 joerg #undef atomic_cas_uint_ni
329 1.8 joerg #undef atomic_cas_ulong_ni
330 1.8 joerg #undef atomic_cas_ptr_ni
331 1.2 ad
332 1.1 ad atomic_op_alias(atomic_cas_32,_atomic_cas_32)
333 1.1 ad atomic_op_alias(atomic_cas_uint,_atomic_cas_32)
334 1.1 ad __strong_alias(_atomic_cas_uint,_atomic_cas_32)
335 1.1 ad atomic_op_alias(atomic_cas_ulong,_atomic_cas_32)
336 1.1 ad __strong_alias(_atomic_cas_ulong,_atomic_cas_32)
337 1.1 ad atomic_op_alias(atomic_cas_ptr,_atomic_cas_32)
338 1.1 ad __strong_alias(_atomic_cas_ptr,_atomic_cas_32)
339 1.2 ad
340 1.2 ad atomic_op_alias(atomic_cas_32_ni,_atomic_cas_32)
341 1.2 ad __strong_alias(_atomic_cas_32_ni,_atomic_cas_32)
342 1.2 ad atomic_op_alias(atomic_cas_uint_ni,_atomic_cas_32)
343 1.2 ad __strong_alias(_atomic_cas_uint_ni,_atomic_cas_32)
344 1.2 ad atomic_op_alias(atomic_cas_ulong_ni,_atomic_cas_32)
345 1.2 ad __strong_alias(_atomic_cas_ulong_ni,_atomic_cas_32)
346 1.2 ad atomic_op_alias(atomic_cas_ptr_ni,_atomic_cas_32)
347 1.2 ad __strong_alias(_atomic_cas_ptr_ni,_atomic_cas_32)
348 1.10 matt
349 1.15 matt //atomic_op_alias(atomic_cas_16,_atomic_cas_16)
350 1.15 matt //atomic_op_alias(atomic_cas_16_ni,_atomic_cas_16)
351 1.15 matt //atomic_op_alias(atomic_cas_8,_atomic_cas_8)
352 1.15 matt //atomic_op_alias(atomic_cas_8_ni,_atomic_cas_8)
353 1.15 matt #ifdef __HAVE_ATOMIC_CAS_64_UP
354 1.16 martin atomic_op_alias(atomic_cas_64_ni,_atomic_cas_64)
355 1.16 martin __strong_alias(_atomic_cas_64_ni,_atomic_cas_64)
356 1.15 matt crt_alias(__sync_val_compare_and_swap_8,_atomic_cas_64)
357 1.15 matt #endif
358 1.13 martin crt_alias(__sync_val_compare_and_swap_4,_atomic_cas_32)
359 1.13 martin crt_alias(__sync_val_compare_and_swap_2,_atomic_cas_16)
360 1.13 martin crt_alias(__sync_val_compare_and_swap_1,_atomic_cas_8)
361