Home | History | Annotate | Line # | Download | only in atomic
atomic_init_testset.c revision 1.15.26.1
      1  1.15.26.1  christos /*	$NetBSD: atomic_init_testset.c,v 1.15.26.1 2019/06/10 21:41:07 christos Exp $	*/
      2        1.1        ad 
      3        1.1        ad /*-
      4        1.1        ad  * Copyright (c) 2008 The NetBSD Foundation, Inc.
      5        1.1        ad  * All rights reserved.
      6        1.1        ad  *
      7        1.1        ad  * Redistribution and use in source and binary forms, with or without
      8        1.1        ad  * modification, are permitted provided that the following conditions
      9        1.1        ad  * are met:
     10        1.1        ad  * 1. Redistributions of source code must retain the above copyright
     11        1.1        ad  *    notice, this list of conditions and the following disclaimer.
     12        1.1        ad  * 2. Redistributions in binary form must reproduce the above copyright
     13        1.1        ad  *    notice, this list of conditions and the following disclaimer in the
     14        1.1        ad  *    documentation and/or other materials provided with the distribution.
     15        1.1        ad  *
     16        1.1        ad  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     17        1.1        ad  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     18        1.1        ad  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     19        1.1        ad  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     20        1.1        ad  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     21        1.1        ad  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     22        1.1        ad  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     23        1.1        ad  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     24        1.1        ad  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     25        1.1        ad  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     26        1.1        ad  * POSSIBILITY OF SUCH DAMAGE.
     27        1.1        ad  */
     28        1.1        ad 
     29        1.1        ad /*
     30        1.1        ad  * libc glue for atomic operations where the hardware does not provide
     31        1.1        ad  * compare-and-swap.  It's assumed that this will only be used on 32-bit
     32        1.1        ad  * platforms.
     33        1.1        ad  *
     34        1.1        ad  * This should be compiled with '-fno-reorder-blocks -fomit-frame-pointer'
     35        1.1        ad  * if using gcc.
     36        1.1        ad  */
     37        1.1        ad 
     38        1.1        ad #include <sys/cdefs.h>
     39  1.15.26.1  christos __RCSID("$NetBSD: atomic_init_testset.c,v 1.15.26.1 2019/06/10 21:41:07 christos Exp $");
     40        1.1        ad 
     41        1.1        ad #include "atomic_op_namespace.h"
     42        1.1        ad 
     43        1.1        ad #include <sys/types.h>
     44        1.1        ad #include <sys/atomic.h>
     45        1.1        ad #include <sys/lock.h>
     46        1.1        ad #include <sys/ras.h>
     47        1.1        ad #include <sys/sysctl.h>
     48        1.1        ad 
     49        1.1        ad #include <string.h>
     50        1.1        ad 
     51        1.1        ad #define	I2	__SIMPLELOCK_UNLOCKED, __SIMPLELOCK_UNLOCKED,
     52        1.1        ad #define	I16	I2 I2 I2 I2 I2 I2 I2 I2
     53        1.1        ad #define	I128	I16 I16 I16 I16 I16 I16 I16 I16
     54        1.1        ad 
     55        1.1        ad static __cpu_simple_lock_t atomic_locks[128] = { I128 };
     56       1.14    martin /*
     57       1.14    martin  * Pick a lock out of above array depending on the object address
     58       1.14    martin  * passed. Most variables used atomically will not be in the same
     59       1.14    martin  * cacheline - and if they are, using the same lock is fine.
     60       1.14    martin  */
     61       1.14    martin #define HASH(PTR)	(((uintptr_t)(PTR) >> 3) & 127)
     62        1.6     skrll 
     63        1.6     skrll #ifdef	__HAVE_ASM_ATOMIC_CAS_UP
     64        1.6     skrll extern uint32_t _atomic_cas_up(volatile uint32_t *, uint32_t, uint32_t);
     65        1.6     skrll #else
     66        1.6     skrll static uint32_t _atomic_cas_up(volatile uint32_t *, uint32_t, uint32_t);
     67        1.6     skrll #endif
     68        1.6     skrll static uint32_t (*_atomic_cas_fn)(volatile uint32_t *, uint32_t, uint32_t) =
     69        1.6     skrll     _atomic_cas_up;
     70       1.10      matt RAS_DECL(_atomic_cas);
     71       1.10      matt 
     72       1.15      matt #ifdef	__HAVE_ATOMIC_CAS_64_UP
     73       1.15      matt #ifdef	__HAVE_ASM_ATOMIC_CAS_64_UP
     74       1.15      matt extern uint64_t _atomic_cas_64_up(volatile uint64_t *, uint64_t, uint64_t);
     75       1.15      matt #else
     76       1.15      matt static uint64_t _atomic_cas_64_up(volatile uint64_t *, uint64_t, uint64_t);
     77       1.15      matt #endif
     78       1.15      matt static uint64_t (*_atomic_cas_64_fn)(volatile uint64_t *, uint64_t, uint64_t) =
     79       1.15      matt     _atomic_cas_64_up;
     80       1.15      matt RAS_DECL(_atomic_cas_64);
     81       1.15      matt #endif
     82       1.15      matt 
     83       1.10      matt #ifdef	__HAVE_ASM_ATOMIC_CAS_16_UP
     84       1.10      matt extern uint16_t _atomic_cas_16_up(volatile uint16_t *, uint16_t, uint16_t);
     85       1.10      matt #else
     86       1.10      matt static uint16_t _atomic_cas_16_up(volatile uint16_t *, uint16_t, uint16_t);
     87       1.10      matt #endif
     88       1.10      matt static uint16_t (*_atomic_cas_16_fn)(volatile uint16_t *, uint16_t, uint16_t) =
     89       1.10      matt     _atomic_cas_16_up;
     90       1.10      matt RAS_DECL(_atomic_cas_16);
     91       1.10      matt 
     92       1.10      matt #ifdef	__HAVE_ASM_ATOMIC_CAS_8_UP
     93       1.10      matt extern uint8_t _atomic_cas_8_up(volatile uint8_t *, uint8_t, uint8_t);
     94       1.10      matt #else
     95       1.10      matt static uint8_t _atomic_cas_8_up(volatile uint8_t *, uint8_t, uint8_t);
     96       1.10      matt #endif
     97       1.10      matt static uint8_t (*_atomic_cas_8_fn)(volatile uint8_t *, uint8_t, uint8_t) =
     98       1.10      matt     _atomic_cas_8_up;
     99       1.10      matt RAS_DECL(_atomic_cas_8);
    100        1.1        ad 
    101        1.2        ad void	__libc_atomic_init(void) __attribute__ ((visibility("hidden")));
    102        1.1        ad 
    103        1.6     skrll #ifndef	__HAVE_ASM_ATOMIC_CAS_UP
    104        1.1        ad static uint32_t
    105        1.1        ad _atomic_cas_up(volatile uint32_t *ptr, uint32_t old, uint32_t new)
    106        1.1        ad {
    107        1.1        ad 	uint32_t ret;
    108        1.1        ad 
    109        1.1        ad 	RAS_START(_atomic_cas);
    110        1.1        ad 	ret = *ptr;
    111        1.3        ad 	if (__predict_false(ret != old)) {
    112        1.1        ad 		return ret;
    113        1.1        ad 	}
    114        1.1        ad 	*ptr = new;
    115        1.1        ad 	RAS_END(_atomic_cas);
    116        1.1        ad 
    117        1.1        ad 	return ret;
    118        1.1        ad }
    119        1.5       scw #endif
    120        1.1        ad 
    121       1.15      matt #if defined(__HAVE_ATOMIC_CAS_64_UP) && !defined(__HAVE_ASM_ATOMIC_CAS_64_UP)
    122       1.15      matt static uint64_t
    123       1.15      matt _atomic_cas_64_up(volatile uint64_t *ptr, uint64_t old, uint64_t new)
    124       1.15      matt {
    125       1.15      matt 	uint64_t ret;
    126       1.15      matt 
    127       1.15      matt 	RAS_START(_atomic_cas_64);
    128       1.15      matt 	ret = *ptr;
    129       1.15      matt 	if (__predict_false(ret != old)) {
    130       1.15      matt 		return ret;
    131       1.15      matt 	}
    132       1.15      matt 	*ptr = new;
    133       1.15      matt 	RAS_END(_atomic_cas_64);
    134       1.15      matt 
    135       1.15      matt 	return ret;
    136       1.15      matt }
    137       1.15      matt #endif
    138       1.15      matt 
    139       1.10      matt #ifndef	__HAVE_ASM_ATOMIC_CAS_16_UP
    140       1.10      matt static uint16_t
    141       1.12    martin _atomic_cas_16_up(volatile uint16_t *ptr, uint16_t old, uint16_t new)
    142       1.10      matt {
    143       1.10      matt 	uint16_t ret;
    144       1.10      matt 
    145       1.10      matt 	RAS_START(_atomic_cas_16);
    146       1.10      matt 	ret = *ptr;
    147       1.10      matt 	if (__predict_false(ret != old)) {
    148       1.10      matt 		return ret;
    149       1.10      matt 	}
    150       1.10      matt 	*ptr = new;
    151       1.10      matt 	RAS_END(_atomic_cas_16);
    152       1.10      matt 
    153       1.10      matt 	return ret;
    154       1.10      matt }
    155       1.10      matt #endif
    156       1.10      matt 
    157       1.11    martin #ifndef	__HAVE_ASM_ATOMIC_CAS_8_UP
    158       1.10      matt static uint8_t
    159       1.11    martin _atomic_cas_8_up(volatile uint8_t *ptr, uint8_t old, uint8_t new)
    160       1.10      matt {
    161       1.10      matt 	uint8_t ret;
    162       1.10      matt 
    163       1.10      matt 	RAS_START(_atomic_cas_8);
    164       1.10      matt 	ret = *ptr;
    165       1.10      matt 	if (__predict_false(ret != old)) {
    166       1.10      matt 		return ret;
    167       1.10      matt 	}
    168       1.10      matt 	*ptr = new;
    169       1.12    martin 	RAS_END(_atomic_cas_8);
    170       1.10      matt 
    171       1.10      matt 	return ret;
    172       1.10      matt }
    173       1.10      matt #endif
    174       1.10      matt 
    175        1.1        ad static uint32_t
    176        1.1        ad _atomic_cas_mp(volatile uint32_t *ptr, uint32_t old, uint32_t new)
    177        1.1        ad {
    178        1.1        ad 	__cpu_simple_lock_t *lock;
    179        1.1        ad 	uint32_t ret;
    180        1.1        ad 
    181       1.14    martin 	lock = &atomic_locks[HASH(ptr)];
    182       1.14    martin 	__cpu_simple_lock(lock);
    183       1.14    martin 	ret = *ptr;
    184       1.14    martin 	if (__predict_true(ret == old)) {
    185       1.14    martin 		*ptr = new;
    186       1.14    martin 	}
    187       1.14    martin 	__cpu_simple_unlock(lock);
    188       1.14    martin 
    189       1.14    martin 	return ret;
    190       1.14    martin }
    191       1.14    martin 
    192       1.15      matt #ifdef	__HAVE_ATOMIC_CAS_64_UP
    193       1.15      matt static uint64_t
    194       1.15      matt _atomic_cas_64_mp(volatile uint64_t *ptr, uint64_t old, uint64_t new)
    195       1.15      matt {
    196       1.15      matt 	__cpu_simple_lock_t *lock;
    197       1.15      matt 	uint64_t ret;
    198       1.15      matt 
    199       1.15      matt 	lock = &atomic_locks[HASH(ptr)];
    200       1.15      matt 	__cpu_simple_lock(lock);
    201       1.15      matt 	ret = *ptr;
    202       1.15      matt 	if (__predict_true(ret == old)) {
    203       1.15      matt 		*ptr = new;
    204       1.15      matt 	}
    205       1.15      matt 	__cpu_simple_unlock(lock);
    206       1.15      matt 
    207       1.15      matt 	return ret;
    208       1.15      matt }
    209       1.15      matt #endif
    210       1.15      matt 
    211       1.14    martin static uint16_t
    212       1.14    martin _atomic_cas_16_mp(volatile uint16_t *ptr, uint16_t old, uint16_t new)
    213       1.14    martin {
    214       1.14    martin 	__cpu_simple_lock_t *lock;
    215       1.14    martin 	uint16_t ret;
    216       1.14    martin 
    217       1.14    martin 	lock = &atomic_locks[HASH(ptr)];
    218       1.14    martin 	__cpu_simple_lock(lock);
    219       1.14    martin 	ret = *ptr;
    220       1.14    martin 	if (__predict_true(ret == old)) {
    221       1.14    martin 		*ptr = new;
    222       1.14    martin 	}
    223       1.14    martin 	__cpu_simple_unlock(lock);
    224       1.14    martin 
    225       1.14    martin 	return ret;
    226       1.14    martin }
    227       1.14    martin 
    228       1.14    martin static uint8_t
    229       1.14    martin _atomic_cas_8_mp(volatile uint8_t *ptr, uint8_t old, uint8_t new)
    230       1.14    martin {
    231       1.14    martin 	__cpu_simple_lock_t *lock;
    232       1.14    martin 	uint8_t ret;
    233       1.14    martin 
    234       1.14    martin 	lock = &atomic_locks[HASH(ptr)];
    235        1.1        ad 	__cpu_simple_lock(lock);
    236        1.1        ad 	ret = *ptr;
    237        1.1        ad 	if (__predict_true(ret == old)) {
    238        1.1        ad 		*ptr = new;
    239        1.1        ad 	}
    240        1.1        ad 	__cpu_simple_unlock(lock);
    241        1.1        ad 
    242        1.1        ad 	return ret;
    243        1.1        ad }
    244        1.1        ad 
    245        1.1        ad uint32_t
    246        1.1        ad _atomic_cas_32(volatile uint32_t *ptr, uint32_t old, uint32_t new)
    247        1.1        ad {
    248        1.1        ad 
    249        1.1        ad 	return (*_atomic_cas_fn)(ptr, old, new);
    250        1.1        ad }
    251        1.1        ad 
    252       1.15      matt #ifdef	__HAVE_ATOMIC_CAS_64_UP
    253       1.15      matt uint64_t _atomic_cas_64(volatile uint64_t *, uint64_t, uint64_t);
    254       1.15      matt 
    255       1.15      matt uint64_t
    256       1.15      matt _atomic_cas_64(volatile uint64_t *ptr, uint64_t old, uint64_t new)
    257       1.15      matt {
    258       1.15      matt 
    259       1.15      matt 	return (*_atomic_cas_64_fn)(ptr, old, new);
    260       1.15      matt }
    261       1.15      matt #endif
    262       1.10      matt 
    263       1.10      matt uint16_t
    264       1.10      matt _atomic_cas_16(volatile uint16_t *ptr, uint16_t old, uint16_t new)
    265       1.10      matt {
    266       1.10      matt 
    267       1.10      matt 	return (*_atomic_cas_16_fn)(ptr, old, new);
    268       1.10      matt }
    269       1.10      matt 
    270       1.10      matt uint8_t _atomic_cas_8(volatile uint8_t *, uint8_t, uint8_t);
    271       1.10      matt 
    272       1.10      matt uint8_t
    273       1.10      matt _atomic_cas_8(volatile uint8_t *ptr, uint8_t old, uint8_t new)
    274       1.10      matt {
    275       1.10      matt 
    276       1.10      matt 	return (*_atomic_cas_8_fn)(ptr, old, new);
    277       1.10      matt }
    278       1.10      matt 
    279        1.9      matt void __section(".text.startup")
    280        1.2        ad __libc_atomic_init(void)
    281        1.1        ad {
    282        1.1        ad 	int ncpu, mib[2];
    283        1.1        ad 	size_t len;
    284        1.1        ad 
    285        1.1        ad 	_atomic_cas_fn = _atomic_cas_mp;
    286       1.15      matt #ifdef	__HAVE_ATOMIC_CAS_64_UP
    287       1.15      matt 	_atomic_cas_64_fn = _atomic_cas_64_mp;
    288       1.15      matt #endif
    289       1.14    martin 	_atomic_cas_16_fn = _atomic_cas_16_mp;
    290       1.14    martin 	_atomic_cas_8_fn = _atomic_cas_8_mp;
    291        1.1        ad 
    292        1.1        ad 	mib[0] = CTL_HW;
    293        1.1        ad 	mib[1] = HW_NCPU;
    294        1.1        ad 	len = sizeof(ncpu);
    295        1.1        ad 	if (sysctl(mib, 2, &ncpu, &len, NULL, 0) == -1)
    296        1.1        ad 		return;
    297        1.1        ad 	if (ncpu > 1)
    298        1.1        ad 		return;
    299        1.1        ad 	if (rasctl(RAS_ADDR(_atomic_cas), RAS_SIZE(_atomic_cas),
    300        1.1        ad 	    RAS_INSTALL) == 0) {
    301        1.1        ad 		_atomic_cas_fn = _atomic_cas_up;
    302        1.1        ad 		return;
    303        1.1        ad 	}
    304       1.10      matt 
    305       1.15      matt #ifdef	__HAVE_ATOMIC_CAS_64_UP
    306       1.15      matt 	if (rasctl(RAS_ADDR(_atomic_cas_64), RAS_SIZE(_atomic_cas_64),
    307       1.15      matt 	    RAS_INSTALL) == 0) {
    308       1.15      matt 		_atomic_cas_64_fn = _atomic_cas_64_up;
    309       1.15      matt 		return;
    310       1.15      matt 	}
    311       1.15      matt #endif
    312       1.15      matt 
    313       1.10      matt 	if (rasctl(RAS_ADDR(_atomic_cas_16), RAS_SIZE(_atomic_cas_16),
    314       1.10      matt 	    RAS_INSTALL) == 0) {
    315       1.10      matt 		_atomic_cas_16_fn = _atomic_cas_16_up;
    316       1.10      matt 		return;
    317       1.10      matt 	}
    318       1.10      matt 
    319       1.10      matt 	if (rasctl(RAS_ADDR(_atomic_cas_8), RAS_SIZE(_atomic_cas_8),
    320       1.10      matt 	    RAS_INSTALL) == 0) {
    321       1.10      matt 		_atomic_cas_8_fn = _atomic_cas_8_up;
    322       1.10      matt 		return;
    323       1.10      matt 	}
    324        1.1        ad }
    325        1.1        ad 
    326        1.1        ad #undef atomic_cas_32
    327        1.2        ad #undef atomic_cas_uint
    328        1.2        ad #undef atomic_cas_ulong
    329        1.2        ad #undef atomic_cas_ptr
    330        1.8     joerg #undef atomic_cas_32_ni
    331        1.8     joerg #undef atomic_cas_uint_ni
    332        1.8     joerg #undef atomic_cas_ulong_ni
    333        1.8     joerg #undef atomic_cas_ptr_ni
    334        1.2        ad 
    335        1.1        ad atomic_op_alias(atomic_cas_32,_atomic_cas_32)
    336        1.1        ad atomic_op_alias(atomic_cas_uint,_atomic_cas_32)
    337        1.1        ad __strong_alias(_atomic_cas_uint,_atomic_cas_32)
    338        1.1        ad atomic_op_alias(atomic_cas_ulong,_atomic_cas_32)
    339        1.1        ad __strong_alias(_atomic_cas_ulong,_atomic_cas_32)
    340        1.1        ad atomic_op_alias(atomic_cas_ptr,_atomic_cas_32)
    341        1.1        ad __strong_alias(_atomic_cas_ptr,_atomic_cas_32)
    342        1.2        ad 
    343        1.2        ad atomic_op_alias(atomic_cas_32_ni,_atomic_cas_32)
    344        1.2        ad __strong_alias(_atomic_cas_32_ni,_atomic_cas_32)
    345        1.2        ad atomic_op_alias(atomic_cas_uint_ni,_atomic_cas_32)
    346        1.2        ad __strong_alias(_atomic_cas_uint_ni,_atomic_cas_32)
    347        1.2        ad atomic_op_alias(atomic_cas_ulong_ni,_atomic_cas_32)
    348        1.2        ad __strong_alias(_atomic_cas_ulong_ni,_atomic_cas_32)
    349        1.2        ad atomic_op_alias(atomic_cas_ptr_ni,_atomic_cas_32)
    350        1.2        ad __strong_alias(_atomic_cas_ptr_ni,_atomic_cas_32)
    351       1.10      matt 
    352       1.15      matt //atomic_op_alias(atomic_cas_16,_atomic_cas_16)
    353       1.15      matt //atomic_op_alias(atomic_cas_16_ni,_atomic_cas_16)
    354       1.15      matt //atomic_op_alias(atomic_cas_8,_atomic_cas_8)
    355       1.15      matt //atomic_op_alias(atomic_cas_8_ni,_atomic_cas_8)
    356       1.15      matt #ifdef	__HAVE_ATOMIC_CAS_64_UP
    357  1.15.26.1  christos atomic_op_alias(atomic_cas_64_ni,_atomic_cas_64)
    358  1.15.26.1  christos __strong_alias(_atomic_cas_64_ni,_atomic_cas_64)
    359       1.15      matt crt_alias(__sync_val_compare_and_swap_8,_atomic_cas_64)
    360       1.15      matt #endif
    361       1.13    martin crt_alias(__sync_val_compare_and_swap_4,_atomic_cas_32)
    362       1.13    martin crt_alias(__sync_val_compare_and_swap_2,_atomic_cas_16)
    363       1.13    martin crt_alias(__sync_val_compare_and_swap_1,_atomic_cas_8)
    364