atomic_init_testset.c revision 1.18 1 1.18 christos /* $NetBSD: atomic_init_testset.c,v 1.18 2024/01/19 19:33:49 christos Exp $ */
2 1.1 ad
3 1.1 ad /*-
4 1.1 ad * Copyright (c) 2008 The NetBSD Foundation, Inc.
5 1.1 ad * All rights reserved.
6 1.1 ad *
7 1.1 ad * Redistribution and use in source and binary forms, with or without
8 1.1 ad * modification, are permitted provided that the following conditions
9 1.1 ad * are met:
10 1.1 ad * 1. Redistributions of source code must retain the above copyright
11 1.1 ad * notice, this list of conditions and the following disclaimer.
12 1.1 ad * 2. Redistributions in binary form must reproduce the above copyright
13 1.1 ad * notice, this list of conditions and the following disclaimer in the
14 1.1 ad * documentation and/or other materials provided with the distribution.
15 1.1 ad *
16 1.1 ad * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
17 1.1 ad * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
18 1.1 ad * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19 1.1 ad * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
20 1.1 ad * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 1.1 ad * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 1.1 ad * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23 1.1 ad * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24 1.1 ad * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25 1.1 ad * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
26 1.1 ad * POSSIBILITY OF SUCH DAMAGE.
27 1.1 ad */
28 1.1 ad
29 1.1 ad /*
30 1.1 ad * libc glue for atomic operations where the hardware does not provide
31 1.1 ad * compare-and-swap. It's assumed that this will only be used on 32-bit
32 1.1 ad * platforms.
33 1.1 ad *
34 1.1 ad * This should be compiled with '-fno-reorder-blocks -fomit-frame-pointer'
35 1.1 ad * if using gcc.
36 1.1 ad */
37 1.1 ad
38 1.1 ad #include <sys/cdefs.h>
39 1.18 christos __RCSID("$NetBSD: atomic_init_testset.c,v 1.18 2024/01/19 19:33:49 christos Exp $");
40 1.1 ad
41 1.1 ad #include "atomic_op_namespace.h"
42 1.1 ad
43 1.1 ad #include <sys/types.h>
44 1.1 ad #include <sys/atomic.h>
45 1.1 ad #include <sys/lock.h>
46 1.1 ad #include <sys/ras.h>
47 1.1 ad #include <sys/sysctl.h>
48 1.1 ad
49 1.1 ad #include <string.h>
50 1.1 ad
51 1.1 ad #define I2 __SIMPLELOCK_UNLOCKED, __SIMPLELOCK_UNLOCKED,
52 1.1 ad #define I16 I2 I2 I2 I2 I2 I2 I2 I2
53 1.1 ad #define I128 I16 I16 I16 I16 I16 I16 I16 I16
54 1.1 ad
55 1.1 ad static __cpu_simple_lock_t atomic_locks[128] = { I128 };
56 1.14 martin /*
57 1.14 martin * Pick a lock out of above array depending on the object address
58 1.14 martin * passed. Most variables used atomically will not be in the same
59 1.14 martin * cacheline - and if they are, using the same lock is fine.
60 1.14 martin */
61 1.14 martin #define HASH(PTR) (((uintptr_t)(PTR) >> 3) & 127)
62 1.6 skrll
63 1.6 skrll #ifdef __HAVE_ASM_ATOMIC_CAS_UP
64 1.6 skrll extern uint32_t _atomic_cas_up(volatile uint32_t *, uint32_t, uint32_t);
65 1.6 skrll #else
66 1.6 skrll static uint32_t _atomic_cas_up(volatile uint32_t *, uint32_t, uint32_t);
67 1.6 skrll #endif
68 1.6 skrll static uint32_t (*_atomic_cas_fn)(volatile uint32_t *, uint32_t, uint32_t) =
69 1.6 skrll _atomic_cas_up;
70 1.10 matt RAS_DECL(_atomic_cas);
71 1.10 matt
72 1.15 matt #ifdef __HAVE_ATOMIC_CAS_64_UP
73 1.15 matt #ifdef __HAVE_ASM_ATOMIC_CAS_64_UP
74 1.15 matt extern uint64_t _atomic_cas_64_up(volatile uint64_t *, uint64_t, uint64_t);
75 1.15 matt #else
76 1.15 matt static uint64_t _atomic_cas_64_up(volatile uint64_t *, uint64_t, uint64_t);
77 1.15 matt #endif
78 1.15 matt static uint64_t (*_atomic_cas_64_fn)(volatile uint64_t *, uint64_t, uint64_t) =
79 1.15 matt _atomic_cas_64_up;
80 1.15 matt RAS_DECL(_atomic_cas_64);
81 1.15 matt #endif
82 1.15 matt
83 1.10 matt #ifdef __HAVE_ASM_ATOMIC_CAS_16_UP
84 1.10 matt extern uint16_t _atomic_cas_16_up(volatile uint16_t *, uint16_t, uint16_t);
85 1.10 matt #else
86 1.10 matt static uint16_t _atomic_cas_16_up(volatile uint16_t *, uint16_t, uint16_t);
87 1.10 matt #endif
88 1.10 matt static uint16_t (*_atomic_cas_16_fn)(volatile uint16_t *, uint16_t, uint16_t) =
89 1.10 matt _atomic_cas_16_up;
90 1.10 matt RAS_DECL(_atomic_cas_16);
91 1.10 matt
92 1.10 matt #ifdef __HAVE_ASM_ATOMIC_CAS_8_UP
93 1.10 matt extern uint8_t _atomic_cas_8_up(volatile uint8_t *, uint8_t, uint8_t);
94 1.10 matt #else
95 1.10 matt static uint8_t _atomic_cas_8_up(volatile uint8_t *, uint8_t, uint8_t);
96 1.10 matt #endif
97 1.10 matt static uint8_t (*_atomic_cas_8_fn)(volatile uint8_t *, uint8_t, uint8_t) =
98 1.10 matt _atomic_cas_8_up;
99 1.10 matt RAS_DECL(_atomic_cas_8);
100 1.1 ad
101 1.6 skrll #ifndef __HAVE_ASM_ATOMIC_CAS_UP
102 1.1 ad static uint32_t
103 1.1 ad _atomic_cas_up(volatile uint32_t *ptr, uint32_t old, uint32_t new)
104 1.1 ad {
105 1.1 ad uint32_t ret;
106 1.1 ad
107 1.1 ad RAS_START(_atomic_cas);
108 1.1 ad ret = *ptr;
109 1.3 ad if (__predict_false(ret != old)) {
110 1.1 ad return ret;
111 1.1 ad }
112 1.1 ad *ptr = new;
113 1.1 ad RAS_END(_atomic_cas);
114 1.1 ad
115 1.1 ad return ret;
116 1.1 ad }
117 1.5 scw #endif
118 1.1 ad
119 1.15 matt #if defined(__HAVE_ATOMIC_CAS_64_UP) && !defined(__HAVE_ASM_ATOMIC_CAS_64_UP)
120 1.15 matt static uint64_t
121 1.15 matt _atomic_cas_64_up(volatile uint64_t *ptr, uint64_t old, uint64_t new)
122 1.15 matt {
123 1.15 matt uint64_t ret;
124 1.15 matt
125 1.15 matt RAS_START(_atomic_cas_64);
126 1.15 matt ret = *ptr;
127 1.15 matt if (__predict_false(ret != old)) {
128 1.15 matt return ret;
129 1.15 matt }
130 1.15 matt *ptr = new;
131 1.15 matt RAS_END(_atomic_cas_64);
132 1.15 matt
133 1.15 matt return ret;
134 1.15 matt }
135 1.15 matt #endif
136 1.15 matt
137 1.10 matt #ifndef __HAVE_ASM_ATOMIC_CAS_16_UP
138 1.10 matt static uint16_t
139 1.12 martin _atomic_cas_16_up(volatile uint16_t *ptr, uint16_t old, uint16_t new)
140 1.10 matt {
141 1.10 matt uint16_t ret;
142 1.10 matt
143 1.10 matt RAS_START(_atomic_cas_16);
144 1.10 matt ret = *ptr;
145 1.10 matt if (__predict_false(ret != old)) {
146 1.10 matt return ret;
147 1.10 matt }
148 1.10 matt *ptr = new;
149 1.10 matt RAS_END(_atomic_cas_16);
150 1.10 matt
151 1.10 matt return ret;
152 1.10 matt }
153 1.10 matt #endif
154 1.10 matt
155 1.11 martin #ifndef __HAVE_ASM_ATOMIC_CAS_8_UP
156 1.10 matt static uint8_t
157 1.11 martin _atomic_cas_8_up(volatile uint8_t *ptr, uint8_t old, uint8_t new)
158 1.10 matt {
159 1.10 matt uint8_t ret;
160 1.10 matt
161 1.10 matt RAS_START(_atomic_cas_8);
162 1.10 matt ret = *ptr;
163 1.10 matt if (__predict_false(ret != old)) {
164 1.10 matt return ret;
165 1.10 matt }
166 1.10 matt *ptr = new;
167 1.12 martin RAS_END(_atomic_cas_8);
168 1.10 matt
169 1.10 matt return ret;
170 1.10 matt }
171 1.10 matt #endif
172 1.10 matt
173 1.1 ad static uint32_t
174 1.1 ad _atomic_cas_mp(volatile uint32_t *ptr, uint32_t old, uint32_t new)
175 1.1 ad {
176 1.1 ad __cpu_simple_lock_t *lock;
177 1.1 ad uint32_t ret;
178 1.1 ad
179 1.14 martin lock = &atomic_locks[HASH(ptr)];
180 1.14 martin __cpu_simple_lock(lock);
181 1.14 martin ret = *ptr;
182 1.14 martin if (__predict_true(ret == old)) {
183 1.14 martin *ptr = new;
184 1.14 martin }
185 1.14 martin __cpu_simple_unlock(lock);
186 1.14 martin
187 1.14 martin return ret;
188 1.14 martin }
189 1.14 martin
190 1.15 matt #ifdef __HAVE_ATOMIC_CAS_64_UP
191 1.15 matt static uint64_t
192 1.15 matt _atomic_cas_64_mp(volatile uint64_t *ptr, uint64_t old, uint64_t new)
193 1.15 matt {
194 1.15 matt __cpu_simple_lock_t *lock;
195 1.15 matt uint64_t ret;
196 1.15 matt
197 1.15 matt lock = &atomic_locks[HASH(ptr)];
198 1.15 matt __cpu_simple_lock(lock);
199 1.15 matt ret = *ptr;
200 1.15 matt if (__predict_true(ret == old)) {
201 1.15 matt *ptr = new;
202 1.15 matt }
203 1.15 matt __cpu_simple_unlock(lock);
204 1.15 matt
205 1.15 matt return ret;
206 1.15 matt }
207 1.15 matt #endif
208 1.15 matt
209 1.14 martin static uint16_t
210 1.14 martin _atomic_cas_16_mp(volatile uint16_t *ptr, uint16_t old, uint16_t new)
211 1.14 martin {
212 1.14 martin __cpu_simple_lock_t *lock;
213 1.14 martin uint16_t ret;
214 1.14 martin
215 1.14 martin lock = &atomic_locks[HASH(ptr)];
216 1.14 martin __cpu_simple_lock(lock);
217 1.14 martin ret = *ptr;
218 1.14 martin if (__predict_true(ret == old)) {
219 1.14 martin *ptr = new;
220 1.14 martin }
221 1.14 martin __cpu_simple_unlock(lock);
222 1.14 martin
223 1.14 martin return ret;
224 1.14 martin }
225 1.14 martin
226 1.14 martin static uint8_t
227 1.14 martin _atomic_cas_8_mp(volatile uint8_t *ptr, uint8_t old, uint8_t new)
228 1.14 martin {
229 1.14 martin __cpu_simple_lock_t *lock;
230 1.14 martin uint8_t ret;
231 1.14 martin
232 1.14 martin lock = &atomic_locks[HASH(ptr)];
233 1.1 ad __cpu_simple_lock(lock);
234 1.1 ad ret = *ptr;
235 1.1 ad if (__predict_true(ret == old)) {
236 1.1 ad *ptr = new;
237 1.1 ad }
238 1.1 ad __cpu_simple_unlock(lock);
239 1.1 ad
240 1.1 ad return ret;
241 1.1 ad }
242 1.1 ad
243 1.1 ad uint32_t
244 1.1 ad _atomic_cas_32(volatile uint32_t *ptr, uint32_t old, uint32_t new)
245 1.1 ad {
246 1.1 ad
247 1.1 ad return (*_atomic_cas_fn)(ptr, old, new);
248 1.1 ad }
249 1.1 ad
250 1.15 matt #ifdef __HAVE_ATOMIC_CAS_64_UP
251 1.15 matt uint64_t _atomic_cas_64(volatile uint64_t *, uint64_t, uint64_t);
252 1.15 matt
253 1.15 matt uint64_t
254 1.15 matt _atomic_cas_64(volatile uint64_t *ptr, uint64_t old, uint64_t new)
255 1.15 matt {
256 1.15 matt
257 1.15 matt return (*_atomic_cas_64_fn)(ptr, old, new);
258 1.15 matt }
259 1.15 matt #endif
260 1.10 matt
261 1.10 matt uint16_t
262 1.10 matt _atomic_cas_16(volatile uint16_t *ptr, uint16_t old, uint16_t new)
263 1.10 matt {
264 1.10 matt
265 1.10 matt return (*_atomic_cas_16_fn)(ptr, old, new);
266 1.10 matt }
267 1.10 matt
268 1.10 matt uint8_t _atomic_cas_8(volatile uint8_t *, uint8_t, uint8_t);
269 1.10 matt
270 1.10 matt uint8_t
271 1.10 matt _atomic_cas_8(volatile uint8_t *ptr, uint8_t old, uint8_t new)
272 1.10 matt {
273 1.10 matt
274 1.10 matt return (*_atomic_cas_8_fn)(ptr, old, new);
275 1.10 matt }
276 1.10 matt
277 1.18 christos void __section(".text.startup") __attribute__ ((__visibility__("hidden")))
278 1.2 ad __libc_atomic_init(void)
279 1.1 ad {
280 1.1 ad int ncpu, mib[2];
281 1.1 ad size_t len;
282 1.1 ad
283 1.1 ad _atomic_cas_fn = _atomic_cas_mp;
284 1.15 matt #ifdef __HAVE_ATOMIC_CAS_64_UP
285 1.15 matt _atomic_cas_64_fn = _atomic_cas_64_mp;
286 1.15 matt #endif
287 1.14 martin _atomic_cas_16_fn = _atomic_cas_16_mp;
288 1.14 martin _atomic_cas_8_fn = _atomic_cas_8_mp;
289 1.1 ad
290 1.1 ad mib[0] = CTL_HW;
291 1.1 ad mib[1] = HW_NCPU;
292 1.1 ad len = sizeof(ncpu);
293 1.1 ad if (sysctl(mib, 2, &ncpu, &len, NULL, 0) == -1)
294 1.1 ad return;
295 1.1 ad if (ncpu > 1)
296 1.1 ad return;
297 1.17 martin
298 1.1 ad if (rasctl(RAS_ADDR(_atomic_cas), RAS_SIZE(_atomic_cas),
299 1.1 ad RAS_INSTALL) == 0) {
300 1.1 ad _atomic_cas_fn = _atomic_cas_up;
301 1.1 ad }
302 1.10 matt
303 1.17 martin
304 1.15 matt #ifdef __HAVE_ATOMIC_CAS_64_UP
305 1.15 matt if (rasctl(RAS_ADDR(_atomic_cas_64), RAS_SIZE(_atomic_cas_64),
306 1.15 matt RAS_INSTALL) == 0) {
307 1.15 matt _atomic_cas_64_fn = _atomic_cas_64_up;
308 1.15 matt }
309 1.15 matt #endif
310 1.15 matt
311 1.10 matt if (rasctl(RAS_ADDR(_atomic_cas_16), RAS_SIZE(_atomic_cas_16),
312 1.10 matt RAS_INSTALL) == 0) {
313 1.10 matt _atomic_cas_16_fn = _atomic_cas_16_up;
314 1.10 matt }
315 1.10 matt
316 1.10 matt if (rasctl(RAS_ADDR(_atomic_cas_8), RAS_SIZE(_atomic_cas_8),
317 1.10 matt RAS_INSTALL) == 0) {
318 1.10 matt _atomic_cas_8_fn = _atomic_cas_8_up;
319 1.10 matt }
320 1.1 ad }
321 1.1 ad
322 1.1 ad #undef atomic_cas_32
323 1.2 ad #undef atomic_cas_uint
324 1.2 ad #undef atomic_cas_ulong
325 1.2 ad #undef atomic_cas_ptr
326 1.8 joerg #undef atomic_cas_32_ni
327 1.8 joerg #undef atomic_cas_uint_ni
328 1.8 joerg #undef atomic_cas_ulong_ni
329 1.8 joerg #undef atomic_cas_ptr_ni
330 1.2 ad
331 1.1 ad atomic_op_alias(atomic_cas_32,_atomic_cas_32)
332 1.1 ad atomic_op_alias(atomic_cas_uint,_atomic_cas_32)
333 1.1 ad __strong_alias(_atomic_cas_uint,_atomic_cas_32)
334 1.1 ad atomic_op_alias(atomic_cas_ulong,_atomic_cas_32)
335 1.1 ad __strong_alias(_atomic_cas_ulong,_atomic_cas_32)
336 1.1 ad atomic_op_alias(atomic_cas_ptr,_atomic_cas_32)
337 1.1 ad __strong_alias(_atomic_cas_ptr,_atomic_cas_32)
338 1.2 ad
339 1.2 ad atomic_op_alias(atomic_cas_32_ni,_atomic_cas_32)
340 1.2 ad __strong_alias(_atomic_cas_32_ni,_atomic_cas_32)
341 1.2 ad atomic_op_alias(atomic_cas_uint_ni,_atomic_cas_32)
342 1.2 ad __strong_alias(_atomic_cas_uint_ni,_atomic_cas_32)
343 1.2 ad atomic_op_alias(atomic_cas_ulong_ni,_atomic_cas_32)
344 1.2 ad __strong_alias(_atomic_cas_ulong_ni,_atomic_cas_32)
345 1.2 ad atomic_op_alias(atomic_cas_ptr_ni,_atomic_cas_32)
346 1.2 ad __strong_alias(_atomic_cas_ptr_ni,_atomic_cas_32)
347 1.10 matt
348 1.15 matt //atomic_op_alias(atomic_cas_16,_atomic_cas_16)
349 1.15 matt //atomic_op_alias(atomic_cas_16_ni,_atomic_cas_16)
350 1.15 matt //atomic_op_alias(atomic_cas_8,_atomic_cas_8)
351 1.15 matt //atomic_op_alias(atomic_cas_8_ni,_atomic_cas_8)
352 1.15 matt #ifdef __HAVE_ATOMIC_CAS_64_UP
353 1.16 martin atomic_op_alias(atomic_cas_64_ni,_atomic_cas_64)
354 1.16 martin __strong_alias(_atomic_cas_64_ni,_atomic_cas_64)
355 1.15 matt crt_alias(__sync_val_compare_and_swap_8,_atomic_cas_64)
356 1.15 matt #endif
357 1.13 martin crt_alias(__sync_val_compare_and_swap_4,_atomic_cas_32)
358 1.13 martin crt_alias(__sync_val_compare_and_swap_2,_atomic_cas_16)
359 1.13 martin crt_alias(__sync_val_compare_and_swap_1,_atomic_cas_8)
360