Home | History | Annotate | Line # | Download | only in atomic
atomic_init_testset.c revision 1.15.26.3
      1 /*	$NetBSD: atomic_init_testset.c,v 1.15.26.3 2020/04/21 19:37:48 martin Exp $	*/
      2 
      3 /*-
      4  * Copyright (c) 2008 The NetBSD Foundation, Inc.
      5  * All rights reserved.
      6  *
      7  * Redistribution and use in source and binary forms, with or without
      8  * modification, are permitted provided that the following conditions
      9  * are met:
     10  * 1. Redistributions of source code must retain the above copyright
     11  *    notice, this list of conditions and the following disclaimer.
     12  * 2. Redistributions in binary form must reproduce the above copyright
     13  *    notice, this list of conditions and the following disclaimer in the
     14  *    documentation and/or other materials provided with the distribution.
     15  *
     16  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     17  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     18  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     19  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     20  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     21  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     22  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     23  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     24  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     25  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     26  * POSSIBILITY OF SUCH DAMAGE.
     27  */
     28 
     29 /*
     30  * libc glue for atomic operations where the hardware does not provide
     31  * compare-and-swap.  It's assumed that this will only be used on 32-bit
     32  * platforms.
     33  *
     34  * This should be compiled with '-fno-reorder-blocks -fomit-frame-pointer'
     35  * if using gcc.
     36  */
     37 
     38 #include <sys/cdefs.h>
     39 __RCSID("$NetBSD: atomic_init_testset.c,v 1.15.26.3 2020/04/21 19:37:48 martin Exp $");
     40 
     41 #include "atomic_op_namespace.h"
     42 
     43 #include <sys/types.h>
     44 #include <sys/atomic.h>
     45 #include <sys/lock.h>
     46 #include <sys/ras.h>
     47 #include <sys/sysctl.h>
     48 
     49 #include <string.h>
     50 
     51 #define	I2	__SIMPLELOCK_UNLOCKED, __SIMPLELOCK_UNLOCKED,
     52 #define	I16	I2 I2 I2 I2 I2 I2 I2 I2
     53 #define	I128	I16 I16 I16 I16 I16 I16 I16 I16
     54 
     55 static __cpu_simple_lock_t atomic_locks[128] = { I128 };
     56 /*
     57  * Pick a lock out of above array depending on the object address
     58  * passed. Most variables used atomically will not be in the same
     59  * cacheline - and if they are, using the same lock is fine.
     60  */
     61 #define HASH(PTR)	(((uintptr_t)(PTR) >> 3) & 127)
     62 
     63 #ifdef	__HAVE_ASM_ATOMIC_CAS_UP
     64 extern uint32_t _atomic_cas_up(volatile uint32_t *, uint32_t, uint32_t);
     65 #else
     66 static uint32_t _atomic_cas_up(volatile uint32_t *, uint32_t, uint32_t);
     67 #endif
     68 static uint32_t (*_atomic_cas_fn)(volatile uint32_t *, uint32_t, uint32_t) =
     69     _atomic_cas_up;
     70 RAS_DECL(_atomic_cas);
     71 
     72 #ifdef	__HAVE_ATOMIC_CAS_64_UP
     73 #ifdef	__HAVE_ASM_ATOMIC_CAS_64_UP
     74 extern uint64_t _atomic_cas_64_up(volatile uint64_t *, uint64_t, uint64_t);
     75 #else
     76 static uint64_t _atomic_cas_64_up(volatile uint64_t *, uint64_t, uint64_t);
     77 #endif
     78 static uint64_t (*_atomic_cas_64_fn)(volatile uint64_t *, uint64_t, uint64_t) =
     79     _atomic_cas_64_up;
     80 RAS_DECL(_atomic_cas_64);
     81 #endif
     82 
     83 #ifdef	__HAVE_ASM_ATOMIC_CAS_16_UP
     84 extern uint16_t _atomic_cas_16_up(volatile uint16_t *, uint16_t, uint16_t);
     85 #else
     86 static uint16_t _atomic_cas_16_up(volatile uint16_t *, uint16_t, uint16_t);
     87 #endif
     88 static uint16_t (*_atomic_cas_16_fn)(volatile uint16_t *, uint16_t, uint16_t) =
     89     _atomic_cas_16_up;
     90 RAS_DECL(_atomic_cas_16);
     91 
     92 #ifdef	__HAVE_ASM_ATOMIC_CAS_8_UP
     93 extern uint8_t _atomic_cas_8_up(volatile uint8_t *, uint8_t, uint8_t);
     94 #else
     95 static uint8_t _atomic_cas_8_up(volatile uint8_t *, uint8_t, uint8_t);
     96 #endif
     97 static uint8_t (*_atomic_cas_8_fn)(volatile uint8_t *, uint8_t, uint8_t) =
     98     _atomic_cas_8_up;
     99 RAS_DECL(_atomic_cas_8);
    100 
    101 void	__libc_atomic_init(void) __attribute__ ((visibility("hidden")));
    102 
    103 #ifndef	__HAVE_ASM_ATOMIC_CAS_UP
    104 static uint32_t
    105 _atomic_cas_up(volatile uint32_t *ptr, uint32_t old, uint32_t new)
    106 {
    107 	uint32_t ret;
    108 
    109 	RAS_START(_atomic_cas);
    110 	ret = *ptr;
    111 	if (__predict_false(ret != old)) {
    112 		return ret;
    113 	}
    114 	*ptr = new;
    115 	RAS_END(_atomic_cas);
    116 
    117 	return ret;
    118 }
    119 #endif
    120 
    121 #if defined(__HAVE_ATOMIC_CAS_64_UP) && !defined(__HAVE_ASM_ATOMIC_CAS_64_UP)
    122 static uint64_t
    123 _atomic_cas_64_up(volatile uint64_t *ptr, uint64_t old, uint64_t new)
    124 {
    125 	uint64_t ret;
    126 
    127 	RAS_START(_atomic_cas_64);
    128 	ret = *ptr;
    129 	if (__predict_false(ret != old)) {
    130 		return ret;
    131 	}
    132 	*ptr = new;
    133 	RAS_END(_atomic_cas_64);
    134 
    135 	return ret;
    136 }
    137 #endif
    138 
    139 #ifndef	__HAVE_ASM_ATOMIC_CAS_16_UP
    140 static uint16_t
    141 _atomic_cas_16_up(volatile uint16_t *ptr, uint16_t old, uint16_t new)
    142 {
    143 	uint16_t ret;
    144 
    145 	RAS_START(_atomic_cas_16);
    146 	ret = *ptr;
    147 	if (__predict_false(ret != old)) {
    148 		return ret;
    149 	}
    150 	*ptr = new;
    151 	RAS_END(_atomic_cas_16);
    152 
    153 	return ret;
    154 }
    155 #endif
    156 
    157 #ifndef	__HAVE_ASM_ATOMIC_CAS_8_UP
    158 static uint8_t
    159 _atomic_cas_8_up(volatile uint8_t *ptr, uint8_t old, uint8_t new)
    160 {
    161 	uint8_t ret;
    162 
    163 	RAS_START(_atomic_cas_8);
    164 	ret = *ptr;
    165 	if (__predict_false(ret != old)) {
    166 		return ret;
    167 	}
    168 	*ptr = new;
    169 	RAS_END(_atomic_cas_8);
    170 
    171 	return ret;
    172 }
    173 #endif
    174 
    175 static uint32_t
    176 _atomic_cas_mp(volatile uint32_t *ptr, uint32_t old, uint32_t new)
    177 {
    178 	__cpu_simple_lock_t *lock;
    179 	uint32_t ret;
    180 
    181 	lock = &atomic_locks[HASH(ptr)];
    182 	__cpu_simple_lock(lock);
    183 	ret = *ptr;
    184 	if (__predict_true(ret == old)) {
    185 		*ptr = new;
    186 	}
    187 	__cpu_simple_unlock(lock);
    188 
    189 	return ret;
    190 }
    191 
    192 #ifdef	__HAVE_ATOMIC_CAS_64_UP
    193 static uint64_t
    194 _atomic_cas_64_mp(volatile uint64_t *ptr, uint64_t old, uint64_t new)
    195 {
    196 	__cpu_simple_lock_t *lock;
    197 	uint64_t ret;
    198 
    199 	lock = &atomic_locks[HASH(ptr)];
    200 	__cpu_simple_lock(lock);
    201 	ret = *ptr;
    202 	if (__predict_true(ret == old)) {
    203 		*ptr = new;
    204 	}
    205 	__cpu_simple_unlock(lock);
    206 
    207 	return ret;
    208 }
    209 #endif
    210 
    211 static uint16_t
    212 _atomic_cas_16_mp(volatile uint16_t *ptr, uint16_t old, uint16_t new)
    213 {
    214 	__cpu_simple_lock_t *lock;
    215 	uint16_t ret;
    216 
    217 	lock = &atomic_locks[HASH(ptr)];
    218 	__cpu_simple_lock(lock);
    219 	ret = *ptr;
    220 	if (__predict_true(ret == old)) {
    221 		*ptr = new;
    222 	}
    223 	__cpu_simple_unlock(lock);
    224 
    225 	return ret;
    226 }
    227 
    228 static uint8_t
    229 _atomic_cas_8_mp(volatile uint8_t *ptr, uint8_t old, uint8_t new)
    230 {
    231 	__cpu_simple_lock_t *lock;
    232 	uint8_t ret;
    233 
    234 	lock = &atomic_locks[HASH(ptr)];
    235 	__cpu_simple_lock(lock);
    236 	ret = *ptr;
    237 	if (__predict_true(ret == old)) {
    238 		*ptr = new;
    239 	}
    240 	__cpu_simple_unlock(lock);
    241 
    242 	return ret;
    243 }
    244 
    245 uint32_t
    246 _atomic_cas_32(volatile uint32_t *ptr, uint32_t old, uint32_t new)
    247 {
    248 
    249 	return (*_atomic_cas_fn)(ptr, old, new);
    250 }
    251 
    252 #ifdef	__HAVE_ATOMIC_CAS_64_UP
    253 uint64_t _atomic_cas_64(volatile uint64_t *, uint64_t, uint64_t);
    254 
    255 uint64_t
    256 _atomic_cas_64(volatile uint64_t *ptr, uint64_t old, uint64_t new)
    257 {
    258 
    259 	return (*_atomic_cas_64_fn)(ptr, old, new);
    260 }
    261 #endif
    262 
    263 uint16_t
    264 _atomic_cas_16(volatile uint16_t *ptr, uint16_t old, uint16_t new)
    265 {
    266 
    267 	return (*_atomic_cas_16_fn)(ptr, old, new);
    268 }
    269 
    270 uint8_t _atomic_cas_8(volatile uint8_t *, uint8_t, uint8_t);
    271 
    272 uint8_t
    273 _atomic_cas_8(volatile uint8_t *ptr, uint8_t old, uint8_t new)
    274 {
    275 
    276 	return (*_atomic_cas_8_fn)(ptr, old, new);
    277 }
    278 
    279 void __section(".text.startup")
    280 __libc_atomic_init(void)
    281 {
    282 	int ncpu, mib[2];
    283 	size_t len;
    284 
    285 	_atomic_cas_fn = _atomic_cas_mp;
    286 #ifdef	__HAVE_ATOMIC_CAS_64_UP
    287 	_atomic_cas_64_fn = _atomic_cas_64_mp;
    288 #endif
    289 	_atomic_cas_16_fn = _atomic_cas_16_mp;
    290 	_atomic_cas_8_fn = _atomic_cas_8_mp;
    291 
    292 	mib[0] = CTL_HW;
    293 	mib[1] = HW_NCPU;
    294 	len = sizeof(ncpu);
    295 	if (sysctl(mib, 2, &ncpu, &len, NULL, 0) == -1)
    296 		return;
    297 	if (ncpu > 1)
    298 		return;
    299 	if (rasctl(RAS_ADDR(_atomic_cas), RAS_SIZE(_atomic_cas),
    300 	    RAS_INSTALL) == 0) {
    301 		_atomic_cas_fn = _atomic_cas_up;
    302 		return;
    303 	}
    304 
    305 #ifdef	__HAVE_ATOMIC_CAS_64_UP
    306 	if (rasctl(RAS_ADDR(_atomic_cas_64), RAS_SIZE(_atomic_cas_64),
    307 	    RAS_INSTALL) == 0) {
    308 		_atomic_cas_64_fn = _atomic_cas_64_up;
    309 		return;
    310 	}
    311 #endif
    312 
    313 	if (rasctl(RAS_ADDR(_atomic_cas_16), RAS_SIZE(_atomic_cas_16),
    314 	    RAS_INSTALL) == 0) {
    315 		_atomic_cas_16_fn = _atomic_cas_16_up;
    316 		return;
    317 	}
    318 
    319 	if (rasctl(RAS_ADDR(_atomic_cas_8), RAS_SIZE(_atomic_cas_8),
    320 	    RAS_INSTALL) == 0) {
    321 		_atomic_cas_8_fn = _atomic_cas_8_up;
    322 		return;
    323 	}
    324 }
    325 
    326 #undef atomic_cas_32
    327 #undef atomic_cas_uint
    328 #undef atomic_cas_ulong
    329 #undef atomic_cas_ptr
    330 #undef atomic_cas_32_ni
    331 #undef atomic_cas_uint_ni
    332 #undef atomic_cas_ulong_ni
    333 #undef atomic_cas_ptr_ni
    334 
    335 atomic_op_alias(atomic_cas_32,_atomic_cas_32)
    336 atomic_op_alias(atomic_cas_uint,_atomic_cas_32)
    337 __strong_alias(_atomic_cas_uint,_atomic_cas_32)
    338 atomic_op_alias(atomic_cas_ulong,_atomic_cas_32)
    339 __strong_alias(_atomic_cas_ulong,_atomic_cas_32)
    340 atomic_op_alias(atomic_cas_ptr,_atomic_cas_32)
    341 __strong_alias(_atomic_cas_ptr,_atomic_cas_32)
    342 
    343 atomic_op_alias(atomic_cas_32_ni,_atomic_cas_32)
    344 __strong_alias(_atomic_cas_32_ni,_atomic_cas_32)
    345 atomic_op_alias(atomic_cas_uint_ni,_atomic_cas_32)
    346 __strong_alias(_atomic_cas_uint_ni,_atomic_cas_32)
    347 atomic_op_alias(atomic_cas_ulong_ni,_atomic_cas_32)
    348 __strong_alias(_atomic_cas_ulong_ni,_atomic_cas_32)
    349 atomic_op_alias(atomic_cas_ptr_ni,_atomic_cas_32)
    350 __strong_alias(_atomic_cas_ptr_ni,_atomic_cas_32)
    351 
    352 //atomic_op_alias(atomic_cas_16,_atomic_cas_16)
    353 //atomic_op_alias(atomic_cas_16_ni,_atomic_cas_16)
    354 //atomic_op_alias(atomic_cas_8,_atomic_cas_8)
    355 //atomic_op_alias(atomic_cas_8_ni,_atomic_cas_8)
    356 #ifdef	__HAVE_ATOMIC_CAS_64_UP
    357 atomic_op_alias(atomic_cas_64_ni,_atomic_cas_64)
    358 __strong_alias(_atomic_cas_64_ni,_atomic_cas_64)
    359 crt_alias(__sync_val_compare_and_swap_8,_atomic_cas_64)
    360 #endif
    361 crt_alias(__sync_val_compare_and_swap_4,_atomic_cas_32)
    362 crt_alias(__sync_val_compare_and_swap_2,_atomic_cas_16)
    363 crt_alias(__sync_val_compare_and_swap_1,_atomic_cas_8)
    364