Home | History | Annotate | Line # | Download | only in atomic
atomic_init_testset.c revision 1.14
      1 /*	$NetBSD: atomic_init_testset.c,v 1.14 2014/02/24 17:18:27 martin Exp $	*/
      2 
      3 /*-
      4  * Copyright (c) 2008 The NetBSD Foundation, Inc.
      5  * All rights reserved.
      6  *
      7  * Redistribution and use in source and binary forms, with or without
      8  * modification, are permitted provided that the following conditions
      9  * are met:
     10  * 1. Redistributions of source code must retain the above copyright
     11  *    notice, this list of conditions and the following disclaimer.
     12  * 2. Redistributions in binary form must reproduce the above copyright
     13  *    notice, this list of conditions and the following disclaimer in the
     14  *    documentation and/or other materials provided with the distribution.
     15  *
     16  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     17  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     18  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     19  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     20  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     21  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     22  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     23  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     24  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     25  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     26  * POSSIBILITY OF SUCH DAMAGE.
     27  */
     28 
     29 /*
     30  * libc glue for atomic operations where the hardware does not provide
     31  * compare-and-swap.  It's assumed that this will only be used on 32-bit
     32  * platforms.
     33  *
     34  * This should be compiled with '-fno-reorder-blocks -fomit-frame-pointer'
     35  * if using gcc.
     36  */
     37 
     38 #include <sys/cdefs.h>
     39 __RCSID("$NetBSD: atomic_init_testset.c,v 1.14 2014/02/24 17:18:27 martin Exp $");
     40 
     41 #include "atomic_op_namespace.h"
     42 
     43 #include <sys/types.h>
     44 #include <sys/atomic.h>
     45 #include <sys/lock.h>
     46 #include <sys/ras.h>
     47 #include <sys/sysctl.h>
     48 
     49 #include <string.h>
     50 
     51 #define	I2	__SIMPLELOCK_UNLOCKED, __SIMPLELOCK_UNLOCKED,
     52 #define	I16	I2 I2 I2 I2 I2 I2 I2 I2
     53 #define	I128	I16 I16 I16 I16 I16 I16 I16 I16
     54 
     55 static __cpu_simple_lock_t atomic_locks[128] = { I128 };
     56 /*
     57  * Pick a lock out of above array depending on the object address
     58  * passed. Most variables used atomically will not be in the same
     59  * cacheline - and if they are, using the same lock is fine.
     60  */
     61 #define HASH(PTR)	(((uintptr_t)(PTR) >> 3) & 127)
     62 
     63 #ifdef	__HAVE_ASM_ATOMIC_CAS_UP
     64 extern uint32_t _atomic_cas_up(volatile uint32_t *, uint32_t, uint32_t);
     65 #else
     66 static uint32_t _atomic_cas_up(volatile uint32_t *, uint32_t, uint32_t);
     67 #endif
     68 static uint32_t (*_atomic_cas_fn)(volatile uint32_t *, uint32_t, uint32_t) =
     69     _atomic_cas_up;
     70 RAS_DECL(_atomic_cas);
     71 
     72 #ifdef	__HAVE_ASM_ATOMIC_CAS_16_UP
     73 extern uint16_t _atomic_cas_16_up(volatile uint16_t *, uint16_t, uint16_t);
     74 #else
     75 static uint16_t _atomic_cas_16_up(volatile uint16_t *, uint16_t, uint16_t);
     76 #endif
     77 static uint16_t (*_atomic_cas_16_fn)(volatile uint16_t *, uint16_t, uint16_t) =
     78     _atomic_cas_16_up;
     79 RAS_DECL(_atomic_cas_16);
     80 
     81 #ifdef	__HAVE_ASM_ATOMIC_CAS_8_UP
     82 extern uint8_t _atomic_cas_8_up(volatile uint8_t *, uint8_t, uint8_t);
     83 #else
     84 static uint8_t _atomic_cas_8_up(volatile uint8_t *, uint8_t, uint8_t);
     85 #endif
     86 static uint8_t (*_atomic_cas_8_fn)(volatile uint8_t *, uint8_t, uint8_t) =
     87     _atomic_cas_8_up;
     88 RAS_DECL(_atomic_cas_8);
     89 
     90 void	__libc_atomic_init(void) __attribute__ ((visibility("hidden")));
     91 
     92 #ifndef	__HAVE_ASM_ATOMIC_CAS_UP
     93 static uint32_t
     94 _atomic_cas_up(volatile uint32_t *ptr, uint32_t old, uint32_t new)
     95 {
     96 	uint32_t ret;
     97 
     98 	RAS_START(_atomic_cas);
     99 	ret = *ptr;
    100 	if (__predict_false(ret != old)) {
    101 		return ret;
    102 	}
    103 	*ptr = new;
    104 	RAS_END(_atomic_cas);
    105 
    106 	return ret;
    107 }
    108 #endif
    109 
    110 #ifndef	__HAVE_ASM_ATOMIC_CAS_16_UP
    111 static uint16_t
    112 _atomic_cas_16_up(volatile uint16_t *ptr, uint16_t old, uint16_t new)
    113 {
    114 	uint16_t ret;
    115 
    116 	RAS_START(_atomic_cas_16);
    117 	ret = *ptr;
    118 	if (__predict_false(ret != old)) {
    119 		return ret;
    120 	}
    121 	*ptr = new;
    122 	RAS_END(_atomic_cas_16);
    123 
    124 	return ret;
    125 }
    126 #endif
    127 
    128 #ifndef	__HAVE_ASM_ATOMIC_CAS_8_UP
    129 static uint8_t
    130 _atomic_cas_8_up(volatile uint8_t *ptr, uint8_t old, uint8_t new)
    131 {
    132 	uint8_t ret;
    133 
    134 	RAS_START(_atomic_cas_8);
    135 	ret = *ptr;
    136 	if (__predict_false(ret != old)) {
    137 		return ret;
    138 	}
    139 	*ptr = new;
    140 	RAS_END(_atomic_cas_8);
    141 
    142 	return ret;
    143 }
    144 #endif
    145 
    146 static uint32_t
    147 _atomic_cas_mp(volatile uint32_t *ptr, uint32_t old, uint32_t new)
    148 {
    149 	__cpu_simple_lock_t *lock;
    150 	uint32_t ret;
    151 
    152 	lock = &atomic_locks[HASH(ptr)];
    153 	__cpu_simple_lock(lock);
    154 	ret = *ptr;
    155 	if (__predict_true(ret == old)) {
    156 		*ptr = new;
    157 	}
    158 	__cpu_simple_unlock(lock);
    159 
    160 	return ret;
    161 }
    162 
    163 static uint16_t
    164 _atomic_cas_16_mp(volatile uint16_t *ptr, uint16_t old, uint16_t new)
    165 {
    166 	__cpu_simple_lock_t *lock;
    167 	uint16_t ret;
    168 
    169 	lock = &atomic_locks[HASH(ptr)];
    170 	__cpu_simple_lock(lock);
    171 	ret = *ptr;
    172 	if (__predict_true(ret == old)) {
    173 		*ptr = new;
    174 	}
    175 	__cpu_simple_unlock(lock);
    176 
    177 	return ret;
    178 }
    179 
    180 static uint8_t
    181 _atomic_cas_8_mp(volatile uint8_t *ptr, uint8_t old, uint8_t new)
    182 {
    183 	__cpu_simple_lock_t *lock;
    184 	uint8_t ret;
    185 
    186 	lock = &atomic_locks[HASH(ptr)];
    187 	__cpu_simple_lock(lock);
    188 	ret = *ptr;
    189 	if (__predict_true(ret == old)) {
    190 		*ptr = new;
    191 	}
    192 	__cpu_simple_unlock(lock);
    193 
    194 	return ret;
    195 }
    196 
    197 uint32_t
    198 _atomic_cas_32(volatile uint32_t *ptr, uint32_t old, uint32_t new)
    199 {
    200 
    201 	return (*_atomic_cas_fn)(ptr, old, new);
    202 }
    203 
    204 uint16_t _atomic_cas_16(volatile uint16_t *, uint16_t, uint16_t);
    205 
    206 uint16_t
    207 _atomic_cas_16(volatile uint16_t *ptr, uint16_t old, uint16_t new)
    208 {
    209 
    210 	return (*_atomic_cas_16_fn)(ptr, old, new);
    211 }
    212 
    213 uint8_t _atomic_cas_8(volatile uint8_t *, uint8_t, uint8_t);
    214 
    215 uint8_t
    216 _atomic_cas_8(volatile uint8_t *ptr, uint8_t old, uint8_t new)
    217 {
    218 
    219 	return (*_atomic_cas_8_fn)(ptr, old, new);
    220 }
    221 
    222 void __section(".text.startup")
    223 __libc_atomic_init(void)
    224 {
    225 	int ncpu, mib[2];
    226 	size_t len;
    227 
    228 	_atomic_cas_fn = _atomic_cas_mp;
    229 	_atomic_cas_16_fn = _atomic_cas_16_mp;
    230 	_atomic_cas_8_fn = _atomic_cas_8_mp;
    231 
    232 	mib[0] = CTL_HW;
    233 	mib[1] = HW_NCPU;
    234 	len = sizeof(ncpu);
    235 	if (sysctl(mib, 2, &ncpu, &len, NULL, 0) == -1)
    236 		return;
    237 	if (ncpu > 1)
    238 		return;
    239 	if (rasctl(RAS_ADDR(_atomic_cas), RAS_SIZE(_atomic_cas),
    240 	    RAS_INSTALL) == 0) {
    241 		_atomic_cas_fn = _atomic_cas_up;
    242 		return;
    243 	}
    244 
    245 	if (rasctl(RAS_ADDR(_atomic_cas_16), RAS_SIZE(_atomic_cas_16),
    246 	    RAS_INSTALL) == 0) {
    247 		_atomic_cas_16_fn = _atomic_cas_16_up;
    248 		return;
    249 	}
    250 
    251 	if (rasctl(RAS_ADDR(_atomic_cas_8), RAS_SIZE(_atomic_cas_8),
    252 	    RAS_INSTALL) == 0) {
    253 		_atomic_cas_8_fn = _atomic_cas_8_up;
    254 		return;
    255 	}
    256 }
    257 
    258 #undef atomic_cas_32
    259 #undef atomic_cas_uint
    260 #undef atomic_cas_ulong
    261 #undef atomic_cas_ptr
    262 #undef atomic_cas_32_ni
    263 #undef atomic_cas_uint_ni
    264 #undef atomic_cas_ulong_ni
    265 #undef atomic_cas_ptr_ni
    266 
    267 atomic_op_alias(atomic_cas_32,_atomic_cas_32)
    268 atomic_op_alias(atomic_cas_uint,_atomic_cas_32)
    269 __strong_alias(_atomic_cas_uint,_atomic_cas_32)
    270 atomic_op_alias(atomic_cas_ulong,_atomic_cas_32)
    271 __strong_alias(_atomic_cas_ulong,_atomic_cas_32)
    272 atomic_op_alias(atomic_cas_ptr,_atomic_cas_32)
    273 __strong_alias(_atomic_cas_ptr,_atomic_cas_32)
    274 
    275 atomic_op_alias(atomic_cas_32_ni,_atomic_cas_32)
    276 __strong_alias(_atomic_cas_32_ni,_atomic_cas_32)
    277 atomic_op_alias(atomic_cas_uint_ni,_atomic_cas_32)
    278 __strong_alias(_atomic_cas_uint_ni,_atomic_cas_32)
    279 atomic_op_alias(atomic_cas_ulong_ni,_atomic_cas_32)
    280 __strong_alias(_atomic_cas_ulong_ni,_atomic_cas_32)
    281 atomic_op_alias(atomic_cas_ptr_ni,_atomic_cas_32)
    282 __strong_alias(_atomic_cas_ptr_ni,_atomic_cas_32)
    283 
    284 crt_alias(__sync_val_compare_and_swap_4,_atomic_cas_32)
    285 crt_alias(__sync_val_compare_and_swap_2,_atomic_cas_16)
    286 crt_alias(__sync_val_compare_and_swap_1,_atomic_cas_8)
    287