atomic_init_testset.c revision 1.7.6.2 1 1.7.6.2 yamt /* $NetBSD: atomic_init_testset.c,v 1.7.6.2 2014/05/22 11:26:30 yamt Exp $ */
2 1.1 ad
3 1.1 ad /*-
4 1.1 ad * Copyright (c) 2008 The NetBSD Foundation, Inc.
5 1.1 ad * All rights reserved.
6 1.1 ad *
7 1.1 ad * Redistribution and use in source and binary forms, with or without
8 1.1 ad * modification, are permitted provided that the following conditions
9 1.1 ad * are met:
10 1.1 ad * 1. Redistributions of source code must retain the above copyright
11 1.1 ad * notice, this list of conditions and the following disclaimer.
12 1.1 ad * 2. Redistributions in binary form must reproduce the above copyright
13 1.1 ad * notice, this list of conditions and the following disclaimer in the
14 1.1 ad * documentation and/or other materials provided with the distribution.
15 1.1 ad *
16 1.1 ad * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
17 1.1 ad * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
18 1.1 ad * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19 1.1 ad * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
20 1.1 ad * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 1.1 ad * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 1.1 ad * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23 1.1 ad * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24 1.1 ad * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25 1.1 ad * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
26 1.1 ad * POSSIBILITY OF SUCH DAMAGE.
27 1.1 ad */
28 1.1 ad
29 1.1 ad /*
30 1.1 ad * libc glue for atomic operations where the hardware does not provide
31 1.1 ad * compare-and-swap. It's assumed that this will only be used on 32-bit
32 1.1 ad * platforms.
33 1.1 ad *
34 1.1 ad * This should be compiled with '-fno-reorder-blocks -fomit-frame-pointer'
35 1.1 ad * if using gcc.
36 1.1 ad */
37 1.1 ad
38 1.1 ad #include <sys/cdefs.h>
39 1.7.6.2 yamt __RCSID("$NetBSD: atomic_init_testset.c,v 1.7.6.2 2014/05/22 11:26:30 yamt Exp $");
40 1.1 ad
41 1.1 ad #include "atomic_op_namespace.h"
42 1.1 ad
43 1.1 ad #include <sys/types.h>
44 1.1 ad #include <sys/atomic.h>
45 1.1 ad #include <sys/lock.h>
46 1.1 ad #include <sys/ras.h>
47 1.1 ad #include <sys/sysctl.h>
48 1.1 ad
49 1.1 ad #include <string.h>
50 1.1 ad
51 1.1 ad #define I2 __SIMPLELOCK_UNLOCKED, __SIMPLELOCK_UNLOCKED,
52 1.1 ad #define I16 I2 I2 I2 I2 I2 I2 I2 I2
53 1.1 ad #define I128 I16 I16 I16 I16 I16 I16 I16 I16
54 1.1 ad
55 1.1 ad static __cpu_simple_lock_t atomic_locks[128] = { I128 };
56 1.7.6.2 yamt /*
57 1.7.6.2 yamt * Pick a lock out of above array depending on the object address
58 1.7.6.2 yamt * passed. Most variables used atomically will not be in the same
59 1.7.6.2 yamt * cacheline - and if they are, using the same lock is fine.
60 1.7.6.2 yamt */
61 1.7.6.2 yamt #define HASH(PTR) (((uintptr_t)(PTR) >> 3) & 127)
62 1.6 skrll
63 1.6 skrll #ifdef __HAVE_ASM_ATOMIC_CAS_UP
64 1.6 skrll extern uint32_t _atomic_cas_up(volatile uint32_t *, uint32_t, uint32_t);
65 1.6 skrll #else
66 1.6 skrll static uint32_t _atomic_cas_up(volatile uint32_t *, uint32_t, uint32_t);
67 1.6 skrll #endif
68 1.6 skrll static uint32_t (*_atomic_cas_fn)(volatile uint32_t *, uint32_t, uint32_t) =
69 1.6 skrll _atomic_cas_up;
70 1.7.6.2 yamt RAS_DECL(_atomic_cas);
71 1.1 ad
72 1.7.6.2 yamt #ifdef __HAVE_ATOMIC_CAS_64_UP
73 1.7.6.2 yamt #ifdef __HAVE_ASM_ATOMIC_CAS_64_UP
74 1.7.6.2 yamt extern uint64_t _atomic_cas_64_up(volatile uint64_t *, uint64_t, uint64_t);
75 1.7.6.2 yamt #else
76 1.7.6.2 yamt static uint64_t _atomic_cas_64_up(volatile uint64_t *, uint64_t, uint64_t);
77 1.7.6.2 yamt #endif
78 1.7.6.2 yamt static uint64_t (*_atomic_cas_64_fn)(volatile uint64_t *, uint64_t, uint64_t) =
79 1.7.6.2 yamt _atomic_cas_64_up;
80 1.7.6.2 yamt RAS_DECL(_atomic_cas_64);
81 1.7.6.2 yamt #endif
82 1.1 ad
83 1.7.6.2 yamt #ifdef __HAVE_ASM_ATOMIC_CAS_16_UP
84 1.7.6.2 yamt extern uint16_t _atomic_cas_16_up(volatile uint16_t *, uint16_t, uint16_t);
85 1.7.6.2 yamt #else
86 1.7.6.2 yamt static uint16_t _atomic_cas_16_up(volatile uint16_t *, uint16_t, uint16_t);
87 1.7.6.2 yamt #endif
88 1.7.6.2 yamt static uint16_t (*_atomic_cas_16_fn)(volatile uint16_t *, uint16_t, uint16_t) =
89 1.7.6.2 yamt _atomic_cas_16_up;
90 1.7.6.2 yamt RAS_DECL(_atomic_cas_16);
91 1.7.6.2 yamt
92 1.7.6.2 yamt #ifdef __HAVE_ASM_ATOMIC_CAS_8_UP
93 1.7.6.2 yamt extern uint8_t _atomic_cas_8_up(volatile uint8_t *, uint8_t, uint8_t);
94 1.7.6.2 yamt #else
95 1.7.6.2 yamt static uint8_t _atomic_cas_8_up(volatile uint8_t *, uint8_t, uint8_t);
96 1.7.6.2 yamt #endif
97 1.7.6.2 yamt static uint8_t (*_atomic_cas_8_fn)(volatile uint8_t *, uint8_t, uint8_t) =
98 1.7.6.2 yamt _atomic_cas_8_up;
99 1.7.6.2 yamt RAS_DECL(_atomic_cas_8);
100 1.7.6.2 yamt
101 1.7.6.2 yamt void __libc_atomic_init(void) __attribute__ ((visibility("hidden")));
102 1.1 ad
103 1.6 skrll #ifndef __HAVE_ASM_ATOMIC_CAS_UP
104 1.1 ad static uint32_t
105 1.1 ad _atomic_cas_up(volatile uint32_t *ptr, uint32_t old, uint32_t new)
106 1.1 ad {
107 1.1 ad uint32_t ret;
108 1.1 ad
109 1.1 ad RAS_START(_atomic_cas);
110 1.1 ad ret = *ptr;
111 1.3 ad if (__predict_false(ret != old)) {
112 1.1 ad return ret;
113 1.1 ad }
114 1.1 ad *ptr = new;
115 1.1 ad RAS_END(_atomic_cas);
116 1.1 ad
117 1.1 ad return ret;
118 1.1 ad }
119 1.5 scw #endif
120 1.1 ad
121 1.7.6.2 yamt #if defined(__HAVE_ATOMIC_CAS_64_UP) && !defined(__HAVE_ASM_ATOMIC_CAS_64_UP)
122 1.7.6.2 yamt static uint64_t
123 1.7.6.2 yamt _atomic_cas_64_up(volatile uint64_t *ptr, uint64_t old, uint64_t new)
124 1.7.6.2 yamt {
125 1.7.6.2 yamt uint64_t ret;
126 1.7.6.2 yamt
127 1.7.6.2 yamt RAS_START(_atomic_cas_64);
128 1.7.6.2 yamt ret = *ptr;
129 1.7.6.2 yamt if (__predict_false(ret != old)) {
130 1.7.6.2 yamt return ret;
131 1.7.6.2 yamt }
132 1.7.6.2 yamt *ptr = new;
133 1.7.6.2 yamt RAS_END(_atomic_cas_64);
134 1.7.6.2 yamt
135 1.7.6.2 yamt return ret;
136 1.7.6.2 yamt }
137 1.7.6.2 yamt #endif
138 1.7.6.2 yamt
139 1.7.6.2 yamt #ifndef __HAVE_ASM_ATOMIC_CAS_16_UP
140 1.7.6.2 yamt static uint16_t
141 1.7.6.2 yamt _atomic_cas_16_up(volatile uint16_t *ptr, uint16_t old, uint16_t new)
142 1.7.6.2 yamt {
143 1.7.6.2 yamt uint16_t ret;
144 1.7.6.2 yamt
145 1.7.6.2 yamt RAS_START(_atomic_cas_16);
146 1.7.6.2 yamt ret = *ptr;
147 1.7.6.2 yamt if (__predict_false(ret != old)) {
148 1.7.6.2 yamt return ret;
149 1.7.6.2 yamt }
150 1.7.6.2 yamt *ptr = new;
151 1.7.6.2 yamt RAS_END(_atomic_cas_16);
152 1.7.6.2 yamt
153 1.7.6.2 yamt return ret;
154 1.7.6.2 yamt }
155 1.7.6.2 yamt #endif
156 1.7.6.2 yamt
157 1.7.6.2 yamt #ifndef __HAVE_ASM_ATOMIC_CAS_8_UP
158 1.7.6.2 yamt static uint8_t
159 1.7.6.2 yamt _atomic_cas_8_up(volatile uint8_t *ptr, uint8_t old, uint8_t new)
160 1.7.6.2 yamt {
161 1.7.6.2 yamt uint8_t ret;
162 1.7.6.2 yamt
163 1.7.6.2 yamt RAS_START(_atomic_cas_8);
164 1.7.6.2 yamt ret = *ptr;
165 1.7.6.2 yamt if (__predict_false(ret != old)) {
166 1.7.6.2 yamt return ret;
167 1.7.6.2 yamt }
168 1.7.6.2 yamt *ptr = new;
169 1.7.6.2 yamt RAS_END(_atomic_cas_8);
170 1.7.6.2 yamt
171 1.7.6.2 yamt return ret;
172 1.7.6.2 yamt }
173 1.7.6.2 yamt #endif
174 1.7.6.2 yamt
175 1.1 ad static uint32_t
176 1.1 ad _atomic_cas_mp(volatile uint32_t *ptr, uint32_t old, uint32_t new)
177 1.1 ad {
178 1.1 ad __cpu_simple_lock_t *lock;
179 1.1 ad uint32_t ret;
180 1.1 ad
181 1.7.6.2 yamt lock = &atomic_locks[HASH(ptr)];
182 1.7.6.2 yamt __cpu_simple_lock(lock);
183 1.7.6.2 yamt ret = *ptr;
184 1.7.6.2 yamt if (__predict_true(ret == old)) {
185 1.7.6.2 yamt *ptr = new;
186 1.7.6.2 yamt }
187 1.7.6.2 yamt __cpu_simple_unlock(lock);
188 1.7.6.2 yamt
189 1.7.6.2 yamt return ret;
190 1.7.6.2 yamt }
191 1.7.6.2 yamt
192 1.7.6.2 yamt #ifdef __HAVE_ATOMIC_CAS_64_UP
193 1.7.6.2 yamt static uint64_t
194 1.7.6.2 yamt _atomic_cas_64_mp(volatile uint64_t *ptr, uint64_t old, uint64_t new)
195 1.7.6.2 yamt {
196 1.7.6.2 yamt __cpu_simple_lock_t *lock;
197 1.7.6.2 yamt uint64_t ret;
198 1.7.6.2 yamt
199 1.7.6.2 yamt lock = &atomic_locks[HASH(ptr)];
200 1.7.6.2 yamt __cpu_simple_lock(lock);
201 1.7.6.2 yamt ret = *ptr;
202 1.7.6.2 yamt if (__predict_true(ret == old)) {
203 1.7.6.2 yamt *ptr = new;
204 1.7.6.2 yamt }
205 1.7.6.2 yamt __cpu_simple_unlock(lock);
206 1.7.6.2 yamt
207 1.7.6.2 yamt return ret;
208 1.7.6.2 yamt }
209 1.7.6.2 yamt #endif
210 1.7.6.2 yamt
211 1.7.6.2 yamt static uint16_t
212 1.7.6.2 yamt _atomic_cas_16_mp(volatile uint16_t *ptr, uint16_t old, uint16_t new)
213 1.7.6.2 yamt {
214 1.7.6.2 yamt __cpu_simple_lock_t *lock;
215 1.7.6.2 yamt uint16_t ret;
216 1.7.6.2 yamt
217 1.7.6.2 yamt lock = &atomic_locks[HASH(ptr)];
218 1.7.6.2 yamt __cpu_simple_lock(lock);
219 1.7.6.2 yamt ret = *ptr;
220 1.7.6.2 yamt if (__predict_true(ret == old)) {
221 1.7.6.2 yamt *ptr = new;
222 1.7.6.2 yamt }
223 1.7.6.2 yamt __cpu_simple_unlock(lock);
224 1.7.6.2 yamt
225 1.7.6.2 yamt return ret;
226 1.7.6.2 yamt }
227 1.7.6.2 yamt
228 1.7.6.2 yamt static uint8_t
229 1.7.6.2 yamt _atomic_cas_8_mp(volatile uint8_t *ptr, uint8_t old, uint8_t new)
230 1.7.6.2 yamt {
231 1.7.6.2 yamt __cpu_simple_lock_t *lock;
232 1.7.6.2 yamt uint8_t ret;
233 1.7.6.2 yamt
234 1.7.6.2 yamt lock = &atomic_locks[HASH(ptr)];
235 1.1 ad __cpu_simple_lock(lock);
236 1.1 ad ret = *ptr;
237 1.1 ad if (__predict_true(ret == old)) {
238 1.1 ad *ptr = new;
239 1.1 ad }
240 1.1 ad __cpu_simple_unlock(lock);
241 1.1 ad
242 1.1 ad return ret;
243 1.1 ad }
244 1.1 ad
245 1.1 ad uint32_t
246 1.1 ad _atomic_cas_32(volatile uint32_t *ptr, uint32_t old, uint32_t new)
247 1.1 ad {
248 1.1 ad
249 1.1 ad return (*_atomic_cas_fn)(ptr, old, new);
250 1.1 ad }
251 1.1 ad
252 1.7.6.2 yamt #ifdef __HAVE_ATOMIC_CAS_64_UP
253 1.7.6.2 yamt uint64_t _atomic_cas_64(volatile uint64_t *, uint64_t, uint64_t);
254 1.7.6.2 yamt
255 1.7.6.2 yamt uint64_t
256 1.7.6.2 yamt _atomic_cas_64(volatile uint64_t *ptr, uint64_t old, uint64_t new)
257 1.7.6.2 yamt {
258 1.7.6.2 yamt
259 1.7.6.2 yamt return (*_atomic_cas_64_fn)(ptr, old, new);
260 1.7.6.2 yamt }
261 1.7.6.2 yamt #endif
262 1.7.6.2 yamt
263 1.7.6.2 yamt uint16_t
264 1.7.6.2 yamt _atomic_cas_16(volatile uint16_t *ptr, uint16_t old, uint16_t new)
265 1.7.6.2 yamt {
266 1.7.6.2 yamt
267 1.7.6.2 yamt return (*_atomic_cas_16_fn)(ptr, old, new);
268 1.7.6.2 yamt }
269 1.7.6.2 yamt
270 1.7.6.2 yamt uint8_t _atomic_cas_8(volatile uint8_t *, uint8_t, uint8_t);
271 1.7.6.2 yamt
272 1.7.6.2 yamt uint8_t
273 1.7.6.2 yamt _atomic_cas_8(volatile uint8_t *ptr, uint8_t old, uint8_t new)
274 1.7.6.2 yamt {
275 1.7.6.2 yamt
276 1.7.6.2 yamt return (*_atomic_cas_8_fn)(ptr, old, new);
277 1.7.6.2 yamt }
278 1.7.6.2 yamt
279 1.7.6.2 yamt void __section(".text.startup")
280 1.2 ad __libc_atomic_init(void)
281 1.1 ad {
282 1.1 ad int ncpu, mib[2];
283 1.1 ad size_t len;
284 1.1 ad
285 1.1 ad _atomic_cas_fn = _atomic_cas_mp;
286 1.7.6.2 yamt #ifdef __HAVE_ATOMIC_CAS_64_UP
287 1.7.6.2 yamt _atomic_cas_64_fn = _atomic_cas_64_mp;
288 1.7.6.2 yamt #endif
289 1.7.6.2 yamt _atomic_cas_16_fn = _atomic_cas_16_mp;
290 1.7.6.2 yamt _atomic_cas_8_fn = _atomic_cas_8_mp;
291 1.1 ad
292 1.1 ad mib[0] = CTL_HW;
293 1.1 ad mib[1] = HW_NCPU;
294 1.1 ad len = sizeof(ncpu);
295 1.1 ad if (sysctl(mib, 2, &ncpu, &len, NULL, 0) == -1)
296 1.1 ad return;
297 1.1 ad if (ncpu > 1)
298 1.1 ad return;
299 1.1 ad if (rasctl(RAS_ADDR(_atomic_cas), RAS_SIZE(_atomic_cas),
300 1.1 ad RAS_INSTALL) == 0) {
301 1.1 ad _atomic_cas_fn = _atomic_cas_up;
302 1.1 ad return;
303 1.1 ad }
304 1.7.6.2 yamt
305 1.7.6.2 yamt #ifdef __HAVE_ATOMIC_CAS_64_UP
306 1.7.6.2 yamt if (rasctl(RAS_ADDR(_atomic_cas_64), RAS_SIZE(_atomic_cas_64),
307 1.7.6.2 yamt RAS_INSTALL) == 0) {
308 1.7.6.2 yamt _atomic_cas_64_fn = _atomic_cas_64_up;
309 1.7.6.2 yamt return;
310 1.7.6.2 yamt }
311 1.7.6.2 yamt #endif
312 1.7.6.2 yamt
313 1.7.6.2 yamt if (rasctl(RAS_ADDR(_atomic_cas_16), RAS_SIZE(_atomic_cas_16),
314 1.7.6.2 yamt RAS_INSTALL) == 0) {
315 1.7.6.2 yamt _atomic_cas_16_fn = _atomic_cas_16_up;
316 1.7.6.2 yamt return;
317 1.7.6.2 yamt }
318 1.7.6.2 yamt
319 1.7.6.2 yamt if (rasctl(RAS_ADDR(_atomic_cas_8), RAS_SIZE(_atomic_cas_8),
320 1.7.6.2 yamt RAS_INSTALL) == 0) {
321 1.7.6.2 yamt _atomic_cas_8_fn = _atomic_cas_8_up;
322 1.7.6.2 yamt return;
323 1.7.6.2 yamt }
324 1.1 ad }
325 1.1 ad
326 1.1 ad #undef atomic_cas_32
327 1.2 ad #undef atomic_cas_uint
328 1.2 ad #undef atomic_cas_ulong
329 1.2 ad #undef atomic_cas_ptr
330 1.7.6.1 yamt #undef atomic_cas_32_ni
331 1.7.6.1 yamt #undef atomic_cas_uint_ni
332 1.7.6.1 yamt #undef atomic_cas_ulong_ni
333 1.7.6.1 yamt #undef atomic_cas_ptr_ni
334 1.2 ad
335 1.1 ad atomic_op_alias(atomic_cas_32,_atomic_cas_32)
336 1.1 ad atomic_op_alias(atomic_cas_uint,_atomic_cas_32)
337 1.1 ad __strong_alias(_atomic_cas_uint,_atomic_cas_32)
338 1.1 ad atomic_op_alias(atomic_cas_ulong,_atomic_cas_32)
339 1.1 ad __strong_alias(_atomic_cas_ulong,_atomic_cas_32)
340 1.1 ad atomic_op_alias(atomic_cas_ptr,_atomic_cas_32)
341 1.1 ad __strong_alias(_atomic_cas_ptr,_atomic_cas_32)
342 1.2 ad
343 1.2 ad atomic_op_alias(atomic_cas_32_ni,_atomic_cas_32)
344 1.2 ad __strong_alias(_atomic_cas_32_ni,_atomic_cas_32)
345 1.2 ad atomic_op_alias(atomic_cas_uint_ni,_atomic_cas_32)
346 1.2 ad __strong_alias(_atomic_cas_uint_ni,_atomic_cas_32)
347 1.2 ad atomic_op_alias(atomic_cas_ulong_ni,_atomic_cas_32)
348 1.2 ad __strong_alias(_atomic_cas_ulong_ni,_atomic_cas_32)
349 1.2 ad atomic_op_alias(atomic_cas_ptr_ni,_atomic_cas_32)
350 1.2 ad __strong_alias(_atomic_cas_ptr_ni,_atomic_cas_32)
351 1.7.6.2 yamt
352 1.7.6.2 yamt //atomic_op_alias(atomic_cas_16,_atomic_cas_16)
353 1.7.6.2 yamt //atomic_op_alias(atomic_cas_16_ni,_atomic_cas_16)
354 1.7.6.2 yamt //atomic_op_alias(atomic_cas_8,_atomic_cas_8)
355 1.7.6.2 yamt //atomic_op_alias(atomic_cas_8_ni,_atomic_cas_8)
356 1.7.6.2 yamt #ifdef __HAVE_ATOMIC_CAS_64_UP
357 1.7.6.2 yamt //atomic_op_alias(atomic_cas_64_ni,_atomic_cas_64)
358 1.7.6.2 yamt crt_alias(__sync_val_compare_and_swap_8,_atomic_cas_64)
359 1.7.6.2 yamt #endif
360 1.7.6.2 yamt crt_alias(__sync_val_compare_and_swap_4,_atomic_cas_32)
361 1.7.6.2 yamt crt_alias(__sync_val_compare_and_swap_2,_atomic_cas_16)
362 1.7.6.2 yamt crt_alias(__sync_val_compare_and_swap_1,_atomic_cas_8)
363