atomic_init_testset.c revision 1.14 1 1.14 martin /* $NetBSD: atomic_init_testset.c,v 1.14 2014/02/24 17:18:27 martin Exp $ */
2 1.1 ad
3 1.1 ad /*-
4 1.1 ad * Copyright (c) 2008 The NetBSD Foundation, Inc.
5 1.1 ad * All rights reserved.
6 1.1 ad *
7 1.1 ad * Redistribution and use in source and binary forms, with or without
8 1.1 ad * modification, are permitted provided that the following conditions
9 1.1 ad * are met:
10 1.1 ad * 1. Redistributions of source code must retain the above copyright
11 1.1 ad * notice, this list of conditions and the following disclaimer.
12 1.1 ad * 2. Redistributions in binary form must reproduce the above copyright
13 1.1 ad * notice, this list of conditions and the following disclaimer in the
14 1.1 ad * documentation and/or other materials provided with the distribution.
15 1.1 ad *
16 1.1 ad * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
17 1.1 ad * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
18 1.1 ad * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19 1.1 ad * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
20 1.1 ad * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 1.1 ad * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 1.1 ad * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23 1.1 ad * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24 1.1 ad * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25 1.1 ad * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
26 1.1 ad * POSSIBILITY OF SUCH DAMAGE.
27 1.1 ad */
28 1.1 ad
29 1.1 ad /*
30 1.1 ad * libc glue for atomic operations where the hardware does not provide
31 1.1 ad * compare-and-swap. It's assumed that this will only be used on 32-bit
32 1.1 ad * platforms.
33 1.1 ad *
34 1.1 ad * This should be compiled with '-fno-reorder-blocks -fomit-frame-pointer'
35 1.1 ad * if using gcc.
36 1.1 ad */
37 1.1 ad
38 1.1 ad #include <sys/cdefs.h>
39 1.14 martin __RCSID("$NetBSD: atomic_init_testset.c,v 1.14 2014/02/24 17:18:27 martin Exp $");
40 1.1 ad
41 1.1 ad #include "atomic_op_namespace.h"
42 1.1 ad
43 1.1 ad #include <sys/types.h>
44 1.1 ad #include <sys/atomic.h>
45 1.1 ad #include <sys/lock.h>
46 1.1 ad #include <sys/ras.h>
47 1.1 ad #include <sys/sysctl.h>
48 1.1 ad
49 1.1 ad #include <string.h>
50 1.1 ad
51 1.1 ad #define I2 __SIMPLELOCK_UNLOCKED, __SIMPLELOCK_UNLOCKED,
52 1.1 ad #define I16 I2 I2 I2 I2 I2 I2 I2 I2
53 1.1 ad #define I128 I16 I16 I16 I16 I16 I16 I16 I16
54 1.1 ad
55 1.1 ad static __cpu_simple_lock_t atomic_locks[128] = { I128 };
56 1.14 martin /*
57 1.14 martin * Pick a lock out of above array depending on the object address
58 1.14 martin * passed. Most variables used atomically will not be in the same
59 1.14 martin * cacheline - and if they are, using the same lock is fine.
60 1.14 martin */
61 1.14 martin #define HASH(PTR) (((uintptr_t)(PTR) >> 3) & 127)
62 1.6 skrll
63 1.6 skrll #ifdef __HAVE_ASM_ATOMIC_CAS_UP
64 1.6 skrll extern uint32_t _atomic_cas_up(volatile uint32_t *, uint32_t, uint32_t);
65 1.6 skrll #else
66 1.6 skrll static uint32_t _atomic_cas_up(volatile uint32_t *, uint32_t, uint32_t);
67 1.6 skrll #endif
68 1.6 skrll static uint32_t (*_atomic_cas_fn)(volatile uint32_t *, uint32_t, uint32_t) =
69 1.6 skrll _atomic_cas_up;
70 1.10 matt RAS_DECL(_atomic_cas);
71 1.10 matt
72 1.10 matt #ifdef __HAVE_ASM_ATOMIC_CAS_16_UP
73 1.10 matt extern uint16_t _atomic_cas_16_up(volatile uint16_t *, uint16_t, uint16_t);
74 1.10 matt #else
75 1.10 matt static uint16_t _atomic_cas_16_up(volatile uint16_t *, uint16_t, uint16_t);
76 1.10 matt #endif
77 1.10 matt static uint16_t (*_atomic_cas_16_fn)(volatile uint16_t *, uint16_t, uint16_t) =
78 1.10 matt _atomic_cas_16_up;
79 1.10 matt RAS_DECL(_atomic_cas_16);
80 1.10 matt
81 1.10 matt #ifdef __HAVE_ASM_ATOMIC_CAS_8_UP
82 1.10 matt extern uint8_t _atomic_cas_8_up(volatile uint8_t *, uint8_t, uint8_t);
83 1.10 matt #else
84 1.10 matt static uint8_t _atomic_cas_8_up(volatile uint8_t *, uint8_t, uint8_t);
85 1.10 matt #endif
86 1.10 matt static uint8_t (*_atomic_cas_8_fn)(volatile uint8_t *, uint8_t, uint8_t) =
87 1.10 matt _atomic_cas_8_up;
88 1.10 matt RAS_DECL(_atomic_cas_8);
89 1.1 ad
90 1.2 ad void __libc_atomic_init(void) __attribute__ ((visibility("hidden")));
91 1.1 ad
92 1.6 skrll #ifndef __HAVE_ASM_ATOMIC_CAS_UP
93 1.1 ad static uint32_t
94 1.1 ad _atomic_cas_up(volatile uint32_t *ptr, uint32_t old, uint32_t new)
95 1.1 ad {
96 1.1 ad uint32_t ret;
97 1.1 ad
98 1.1 ad RAS_START(_atomic_cas);
99 1.1 ad ret = *ptr;
100 1.3 ad if (__predict_false(ret != old)) {
101 1.1 ad return ret;
102 1.1 ad }
103 1.1 ad *ptr = new;
104 1.1 ad RAS_END(_atomic_cas);
105 1.1 ad
106 1.1 ad return ret;
107 1.1 ad }
108 1.5 scw #endif
109 1.1 ad
110 1.10 matt #ifndef __HAVE_ASM_ATOMIC_CAS_16_UP
111 1.10 matt static uint16_t
112 1.12 martin _atomic_cas_16_up(volatile uint16_t *ptr, uint16_t old, uint16_t new)
113 1.10 matt {
114 1.10 matt uint16_t ret;
115 1.10 matt
116 1.10 matt RAS_START(_atomic_cas_16);
117 1.10 matt ret = *ptr;
118 1.10 matt if (__predict_false(ret != old)) {
119 1.10 matt return ret;
120 1.10 matt }
121 1.10 matt *ptr = new;
122 1.10 matt RAS_END(_atomic_cas_16);
123 1.10 matt
124 1.10 matt return ret;
125 1.10 matt }
126 1.10 matt #endif
127 1.10 matt
128 1.11 martin #ifndef __HAVE_ASM_ATOMIC_CAS_8_UP
129 1.10 matt static uint8_t
130 1.11 martin _atomic_cas_8_up(volatile uint8_t *ptr, uint8_t old, uint8_t new)
131 1.10 matt {
132 1.10 matt uint8_t ret;
133 1.10 matt
134 1.10 matt RAS_START(_atomic_cas_8);
135 1.10 matt ret = *ptr;
136 1.10 matt if (__predict_false(ret != old)) {
137 1.10 matt return ret;
138 1.10 matt }
139 1.10 matt *ptr = new;
140 1.12 martin RAS_END(_atomic_cas_8);
141 1.10 matt
142 1.10 matt return ret;
143 1.10 matt }
144 1.10 matt #endif
145 1.10 matt
146 1.1 ad static uint32_t
147 1.1 ad _atomic_cas_mp(volatile uint32_t *ptr, uint32_t old, uint32_t new)
148 1.1 ad {
149 1.1 ad __cpu_simple_lock_t *lock;
150 1.1 ad uint32_t ret;
151 1.1 ad
152 1.14 martin lock = &atomic_locks[HASH(ptr)];
153 1.14 martin __cpu_simple_lock(lock);
154 1.14 martin ret = *ptr;
155 1.14 martin if (__predict_true(ret == old)) {
156 1.14 martin *ptr = new;
157 1.14 martin }
158 1.14 martin __cpu_simple_unlock(lock);
159 1.14 martin
160 1.14 martin return ret;
161 1.14 martin }
162 1.14 martin
163 1.14 martin static uint16_t
164 1.14 martin _atomic_cas_16_mp(volatile uint16_t *ptr, uint16_t old, uint16_t new)
165 1.14 martin {
166 1.14 martin __cpu_simple_lock_t *lock;
167 1.14 martin uint16_t ret;
168 1.14 martin
169 1.14 martin lock = &atomic_locks[HASH(ptr)];
170 1.14 martin __cpu_simple_lock(lock);
171 1.14 martin ret = *ptr;
172 1.14 martin if (__predict_true(ret == old)) {
173 1.14 martin *ptr = new;
174 1.14 martin }
175 1.14 martin __cpu_simple_unlock(lock);
176 1.14 martin
177 1.14 martin return ret;
178 1.14 martin }
179 1.14 martin
180 1.14 martin static uint8_t
181 1.14 martin _atomic_cas_8_mp(volatile uint8_t *ptr, uint8_t old, uint8_t new)
182 1.14 martin {
183 1.14 martin __cpu_simple_lock_t *lock;
184 1.14 martin uint8_t ret;
185 1.14 martin
186 1.14 martin lock = &atomic_locks[HASH(ptr)];
187 1.1 ad __cpu_simple_lock(lock);
188 1.1 ad ret = *ptr;
189 1.1 ad if (__predict_true(ret == old)) {
190 1.1 ad *ptr = new;
191 1.1 ad }
192 1.1 ad __cpu_simple_unlock(lock);
193 1.1 ad
194 1.1 ad return ret;
195 1.1 ad }
196 1.1 ad
197 1.1 ad uint32_t
198 1.1 ad _atomic_cas_32(volatile uint32_t *ptr, uint32_t old, uint32_t new)
199 1.1 ad {
200 1.1 ad
201 1.1 ad return (*_atomic_cas_fn)(ptr, old, new);
202 1.1 ad }
203 1.1 ad
204 1.10 matt uint16_t _atomic_cas_16(volatile uint16_t *, uint16_t, uint16_t);
205 1.10 matt
206 1.10 matt uint16_t
207 1.10 matt _atomic_cas_16(volatile uint16_t *ptr, uint16_t old, uint16_t new)
208 1.10 matt {
209 1.10 matt
210 1.10 matt return (*_atomic_cas_16_fn)(ptr, old, new);
211 1.10 matt }
212 1.10 matt
213 1.10 matt uint8_t _atomic_cas_8(volatile uint8_t *, uint8_t, uint8_t);
214 1.10 matt
215 1.10 matt uint8_t
216 1.10 matt _atomic_cas_8(volatile uint8_t *ptr, uint8_t old, uint8_t new)
217 1.10 matt {
218 1.10 matt
219 1.10 matt return (*_atomic_cas_8_fn)(ptr, old, new);
220 1.10 matt }
221 1.10 matt
222 1.9 matt void __section(".text.startup")
223 1.2 ad __libc_atomic_init(void)
224 1.1 ad {
225 1.1 ad int ncpu, mib[2];
226 1.1 ad size_t len;
227 1.1 ad
228 1.1 ad _atomic_cas_fn = _atomic_cas_mp;
229 1.14 martin _atomic_cas_16_fn = _atomic_cas_16_mp;
230 1.14 martin _atomic_cas_8_fn = _atomic_cas_8_mp;
231 1.1 ad
232 1.1 ad mib[0] = CTL_HW;
233 1.1 ad mib[1] = HW_NCPU;
234 1.1 ad len = sizeof(ncpu);
235 1.1 ad if (sysctl(mib, 2, &ncpu, &len, NULL, 0) == -1)
236 1.1 ad return;
237 1.1 ad if (ncpu > 1)
238 1.1 ad return;
239 1.1 ad if (rasctl(RAS_ADDR(_atomic_cas), RAS_SIZE(_atomic_cas),
240 1.1 ad RAS_INSTALL) == 0) {
241 1.1 ad _atomic_cas_fn = _atomic_cas_up;
242 1.1 ad return;
243 1.1 ad }
244 1.10 matt
245 1.10 matt if (rasctl(RAS_ADDR(_atomic_cas_16), RAS_SIZE(_atomic_cas_16),
246 1.10 matt RAS_INSTALL) == 0) {
247 1.10 matt _atomic_cas_16_fn = _atomic_cas_16_up;
248 1.10 matt return;
249 1.10 matt }
250 1.10 matt
251 1.10 matt if (rasctl(RAS_ADDR(_atomic_cas_8), RAS_SIZE(_atomic_cas_8),
252 1.10 matt RAS_INSTALL) == 0) {
253 1.10 matt _atomic_cas_8_fn = _atomic_cas_8_up;
254 1.10 matt return;
255 1.10 matt }
256 1.1 ad }
257 1.1 ad
258 1.1 ad #undef atomic_cas_32
259 1.2 ad #undef atomic_cas_uint
260 1.2 ad #undef atomic_cas_ulong
261 1.2 ad #undef atomic_cas_ptr
262 1.8 joerg #undef atomic_cas_32_ni
263 1.8 joerg #undef atomic_cas_uint_ni
264 1.8 joerg #undef atomic_cas_ulong_ni
265 1.8 joerg #undef atomic_cas_ptr_ni
266 1.2 ad
267 1.1 ad atomic_op_alias(atomic_cas_32,_atomic_cas_32)
268 1.1 ad atomic_op_alias(atomic_cas_uint,_atomic_cas_32)
269 1.1 ad __strong_alias(_atomic_cas_uint,_atomic_cas_32)
270 1.1 ad atomic_op_alias(atomic_cas_ulong,_atomic_cas_32)
271 1.1 ad __strong_alias(_atomic_cas_ulong,_atomic_cas_32)
272 1.1 ad atomic_op_alias(atomic_cas_ptr,_atomic_cas_32)
273 1.1 ad __strong_alias(_atomic_cas_ptr,_atomic_cas_32)
274 1.2 ad
275 1.2 ad atomic_op_alias(atomic_cas_32_ni,_atomic_cas_32)
276 1.2 ad __strong_alias(_atomic_cas_32_ni,_atomic_cas_32)
277 1.2 ad atomic_op_alias(atomic_cas_uint_ni,_atomic_cas_32)
278 1.2 ad __strong_alias(_atomic_cas_uint_ni,_atomic_cas_32)
279 1.2 ad atomic_op_alias(atomic_cas_ulong_ni,_atomic_cas_32)
280 1.2 ad __strong_alias(_atomic_cas_ulong_ni,_atomic_cas_32)
281 1.2 ad atomic_op_alias(atomic_cas_ptr_ni,_atomic_cas_32)
282 1.2 ad __strong_alias(_atomic_cas_ptr_ni,_atomic_cas_32)
283 1.10 matt
284 1.13 martin crt_alias(__sync_val_compare_and_swap_4,_atomic_cas_32)
285 1.13 martin crt_alias(__sync_val_compare_and_swap_2,_atomic_cas_16)
286 1.13 martin crt_alias(__sync_val_compare_and_swap_1,_atomic_cas_8)
287