Home | History | Annotate | Line # | Download | only in linux
atomic.h revision 1.7.30.1
      1  1.7.30.1  christos /*	$NetBSD: atomic.h,v 1.7.30.1 2019/06/10 22:08:31 christos Exp $	*/
      2       1.2  riastrad 
      3       1.2  riastrad /*-
      4       1.2  riastrad  * Copyright (c) 2013 The NetBSD Foundation, Inc.
      5       1.2  riastrad  * All rights reserved.
      6       1.2  riastrad  *
      7       1.2  riastrad  * This code is derived from software contributed to The NetBSD Foundation
      8       1.2  riastrad  * by Taylor R. Campbell.
      9       1.2  riastrad  *
     10       1.2  riastrad  * Redistribution and use in source and binary forms, with or without
     11       1.2  riastrad  * modification, are permitted provided that the following conditions
     12       1.2  riastrad  * are met:
     13       1.2  riastrad  * 1. Redistributions of source code must retain the above copyright
     14       1.2  riastrad  *    notice, this list of conditions and the following disclaimer.
     15       1.2  riastrad  * 2. Redistributions in binary form must reproduce the above copyright
     16       1.2  riastrad  *    notice, this list of conditions and the following disclaimer in the
     17       1.2  riastrad  *    documentation and/or other materials provided with the distribution.
     18       1.2  riastrad  *
     19       1.2  riastrad  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     20       1.2  riastrad  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     21       1.2  riastrad  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     22       1.2  riastrad  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     23       1.2  riastrad  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     24       1.2  riastrad  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     25       1.2  riastrad  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     26       1.2  riastrad  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     27       1.2  riastrad  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     28       1.2  riastrad  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     29       1.2  riastrad  * POSSIBILITY OF SUCH DAMAGE.
     30       1.2  riastrad  */
     31       1.2  riastrad 
     32       1.2  riastrad #ifndef _LINUX_ATOMIC_H_
     33       1.2  riastrad #define _LINUX_ATOMIC_H_
     34       1.2  riastrad 
     35       1.2  riastrad #include <sys/atomic.h>
     36       1.2  riastrad 
     37       1.2  riastrad #include <machine/limits.h>
     38       1.2  riastrad 
     39  1.7.30.1  christos #if defined(MULTIPROCESSOR) && !defined(__HAVE_ATOMIC_AS_MEMBAR)
     40  1.7.30.1  christos #  define	smp_mb__before_atomic()		membar_exit()
     41  1.7.30.1  christos #  define	smp_mb__after_atomic()		membar_enter()
     42  1.7.30.1  christos #else
     43  1.7.30.1  christos #  define	smp_mb__before_atomic()		__insn_barrier()
     44  1.7.30.1  christos #  define	smp_mb__after_atomic()		__insn_barrier()
     45  1.7.30.1  christos #endif
     46  1.7.30.1  christos 
     47  1.7.30.1  christos /*
     48  1.7.30.1  christos  * atomic (u)int operations
     49  1.7.30.1  christos  *
     50  1.7.30.1  christos  *	Atomics that return a value, other than atomic_read, imply a
     51  1.7.30.1  christos  *	full memory_sync barrier.  Those that do not return a value
     52  1.7.30.1  christos  *	imply no memory barrier.
     53  1.7.30.1  christos  */
     54  1.7.30.1  christos 
     55       1.2  riastrad struct atomic {
     56       1.2  riastrad 	union {
     57       1.3  riastrad 		volatile int au_int;
     58       1.3  riastrad 		volatile unsigned int au_uint;
     59       1.2  riastrad 	} a_u;
     60       1.2  riastrad };
     61       1.2  riastrad 
     62       1.2  riastrad #define	ATOMIC_INIT(i)	{ .a_u = { .au_int = (i) } }
     63       1.2  riastrad 
     64       1.2  riastrad typedef struct atomic atomic_t;
     65       1.2  riastrad 
     66       1.2  riastrad static inline int
     67       1.2  riastrad atomic_read(atomic_t *atomic)
     68       1.2  riastrad {
     69  1.7.30.1  christos 	/* no membar */
     70       1.3  riastrad 	return atomic->a_u.au_int;
     71       1.2  riastrad }
     72       1.2  riastrad 
     73       1.2  riastrad static inline void
     74       1.2  riastrad atomic_set(atomic_t *atomic, int value)
     75       1.2  riastrad {
     76  1.7.30.1  christos 	/* no membar */
     77       1.2  riastrad 	atomic->a_u.au_int = value;
     78       1.2  riastrad }
     79       1.2  riastrad 
     80       1.2  riastrad static inline void
     81       1.2  riastrad atomic_add(int addend, atomic_t *atomic)
     82       1.2  riastrad {
     83  1.7.30.1  christos 	/* no membar */
     84       1.2  riastrad 	atomic_add_int(&atomic->a_u.au_uint, addend);
     85       1.2  riastrad }
     86       1.2  riastrad 
     87       1.2  riastrad static inline void
     88       1.2  riastrad atomic_sub(int subtrahend, atomic_t *atomic)
     89       1.2  riastrad {
     90  1.7.30.1  christos 	/* no membar */
     91       1.2  riastrad 	atomic_add_int(&atomic->a_u.au_uint, -subtrahend);
     92       1.2  riastrad }
     93       1.2  riastrad 
     94       1.2  riastrad static inline int
     95       1.2  riastrad atomic_add_return(int addend, atomic_t *atomic)
     96       1.2  riastrad {
     97  1.7.30.1  christos 	int v;
     98  1.7.30.1  christos 
     99  1.7.30.1  christos 	smp_mb__before_atomic();
    100  1.7.30.1  christos 	v = (int)atomic_add_int_nv(&atomic->a_u.au_uint, addend);
    101  1.7.30.1  christos 	smp_mb__after_atomic();
    102  1.7.30.1  christos 
    103  1.7.30.1  christos 	return v;
    104       1.2  riastrad }
    105       1.2  riastrad 
    106       1.2  riastrad static inline void
    107       1.2  riastrad atomic_inc(atomic_t *atomic)
    108       1.2  riastrad {
    109  1.7.30.1  christos 	/* no membar */
    110       1.2  riastrad 	atomic_inc_uint(&atomic->a_u.au_uint);
    111       1.2  riastrad }
    112       1.2  riastrad 
    113       1.2  riastrad static inline void
    114       1.2  riastrad atomic_dec(atomic_t *atomic)
    115       1.2  riastrad {
    116  1.7.30.1  christos 	/* no membar */
    117       1.2  riastrad 	atomic_dec_uint(&atomic->a_u.au_uint);
    118       1.2  riastrad }
    119       1.2  riastrad 
    120       1.2  riastrad static inline int
    121       1.2  riastrad atomic_inc_return(atomic_t *atomic)
    122       1.2  riastrad {
    123  1.7.30.1  christos 	int v;
    124  1.7.30.1  christos 
    125  1.7.30.1  christos 	smp_mb__before_atomic();
    126  1.7.30.1  christos 	v = (int)atomic_inc_uint_nv(&atomic->a_u.au_uint);
    127  1.7.30.1  christos 	smp_mb__after_atomic();
    128  1.7.30.1  christos 
    129  1.7.30.1  christos 	return v;
    130       1.2  riastrad }
    131       1.2  riastrad 
    132       1.2  riastrad static inline int
    133       1.2  riastrad atomic_dec_return(atomic_t *atomic)
    134       1.2  riastrad {
    135  1.7.30.1  christos 	int v;
    136  1.7.30.1  christos 
    137  1.7.30.1  christos 	smp_mb__before_atomic();
    138  1.7.30.1  christos 	v = (int)atomic_dec_uint_nv(&atomic->a_u.au_uint);
    139  1.7.30.1  christos 	smp_mb__after_atomic();
    140  1.7.30.1  christos 
    141  1.7.30.1  christos 	return v;
    142       1.2  riastrad }
    143       1.2  riastrad 
    144       1.2  riastrad static inline int
    145       1.2  riastrad atomic_dec_and_test(atomic_t *atomic)
    146       1.2  riastrad {
    147  1.7.30.1  christos 	/* membar implied by atomic_dec_return */
    148  1.7.30.1  christos 	return atomic_dec_return(atomic) == 0;
    149  1.7.30.1  christos }
    150  1.7.30.1  christos 
    151  1.7.30.1  christos static inline void
    152  1.7.30.1  christos atomic_or(int value, atomic_t *atomic)
    153  1.7.30.1  christos {
    154  1.7.30.1  christos 	/* no membar */
    155  1.7.30.1  christos 	atomic_or_uint(&atomic->a_u.au_uint, value);
    156       1.2  riastrad }
    157       1.2  riastrad 
    158       1.2  riastrad static inline void
    159       1.2  riastrad atomic_set_mask(unsigned long mask, atomic_t *atomic)
    160       1.2  riastrad {
    161  1.7.30.1  christos 	/* no membar */
    162       1.2  riastrad 	atomic_or_uint(&atomic->a_u.au_uint, mask);
    163       1.2  riastrad }
    164       1.2  riastrad 
    165       1.2  riastrad static inline void
    166       1.2  riastrad atomic_clear_mask(unsigned long mask, atomic_t *atomic)
    167       1.2  riastrad {
    168  1.7.30.1  christos 	/* no membar */
    169       1.2  riastrad 	atomic_and_uint(&atomic->a_u.au_uint, ~mask);
    170       1.2  riastrad }
    171       1.2  riastrad 
    172       1.2  riastrad static inline int
    173       1.2  riastrad atomic_add_unless(atomic_t *atomic, int addend, int zero)
    174       1.2  riastrad {
    175       1.2  riastrad 	int value;
    176       1.2  riastrad 
    177  1.7.30.1  christos 	smp_mb__before_atomic();
    178       1.2  riastrad 	do {
    179       1.2  riastrad 		value = atomic->a_u.au_int;
    180       1.2  riastrad 		if (value == zero)
    181  1.7.30.1  christos 			break;
    182       1.2  riastrad 	} while (atomic_cas_uint(&atomic->a_u.au_uint, value, (value + addend))
    183       1.2  riastrad 	    != value);
    184  1.7.30.1  christos 	smp_mb__after_atomic();
    185       1.2  riastrad 
    186  1.7.30.1  christos 	return value != zero;
    187       1.2  riastrad }
    188       1.2  riastrad 
    189       1.2  riastrad static inline int
    190       1.2  riastrad atomic_inc_not_zero(atomic_t *atomic)
    191       1.2  riastrad {
    192  1.7.30.1  christos 	/* membar implied by atomic_add_unless */
    193       1.2  riastrad 	return atomic_add_unless(atomic, 1, 0);
    194       1.2  riastrad }
    195       1.2  riastrad 
    196       1.5  riastrad static inline int
    197       1.5  riastrad atomic_xchg(atomic_t *atomic, int new)
    198       1.5  riastrad {
    199  1.7.30.1  christos 	int old;
    200  1.7.30.1  christos 
    201  1.7.30.1  christos 	smp_mb__before_atomic();
    202  1.7.30.1  christos 	old = (int)atomic_swap_uint(&atomic->a_u.au_uint, (unsigned)new);
    203  1.7.30.1  christos 	smp_mb__after_atomic();
    204  1.7.30.1  christos 
    205  1.7.30.1  christos 	return old;
    206       1.5  riastrad }
    207       1.5  riastrad 
    208       1.5  riastrad static inline int
    209  1.7.30.1  christos atomic_cmpxchg(atomic_t *atomic, int expect, int new)
    210       1.5  riastrad {
    211  1.7.30.1  christos 	int old;
    212  1.7.30.1  christos 
    213  1.7.30.1  christos 	/*
    214  1.7.30.1  christos 	 * XXX As an optimization, under Linux's semantics we are
    215  1.7.30.1  christos 	 * allowed to skip the memory barrier if the comparison fails,
    216  1.7.30.1  christos 	 * but taking advantage of that is not convenient here.
    217  1.7.30.1  christos 	 */
    218  1.7.30.1  christos 	smp_mb__before_atomic();
    219  1.7.30.1  christos 	old = (int)atomic_cas_uint(&atomic->a_u.au_uint, (unsigned)expect,
    220       1.5  riastrad 	    (unsigned)new);
    221  1.7.30.1  christos 	smp_mb__after_atomic();
    222  1.7.30.1  christos 
    223  1.7.30.1  christos 	return old;
    224       1.5  riastrad }
    225       1.5  riastrad 
    226       1.6  riastrad struct atomic64 {
    227       1.6  riastrad 	volatile uint64_t	a_v;
    228       1.6  riastrad };
    229       1.6  riastrad 
    230       1.6  riastrad typedef struct atomic64 atomic64_t;
    231       1.6  riastrad 
    232  1.7.30.1  christos #define	ATOMIC64_INIT(v)	{ .a_v = (v) }
    233  1.7.30.1  christos 
    234  1.7.30.1  christos int		linux_atomic64_init(void);
    235  1.7.30.1  christos void		linux_atomic64_fini(void);
    236  1.7.30.1  christos 
    237  1.7.30.1  christos #ifdef __HAVE_ATOMIC64_OPS
    238  1.7.30.1  christos 
    239       1.6  riastrad static inline uint64_t
    240       1.6  riastrad atomic64_read(const struct atomic64 *a)
    241       1.6  riastrad {
    242  1.7.30.1  christos 	/* no membar */
    243       1.6  riastrad 	return a->a_v;
    244       1.6  riastrad }
    245       1.6  riastrad 
    246       1.6  riastrad static inline void
    247       1.6  riastrad atomic64_set(struct atomic64 *a, uint64_t v)
    248       1.6  riastrad {
    249  1.7.30.1  christos 	/* no membar */
    250       1.6  riastrad 	a->a_v = v;
    251       1.6  riastrad }
    252       1.6  riastrad 
    253       1.6  riastrad static inline void
    254  1.7.30.1  christos atomic64_add(int64_t d, struct atomic64 *a)
    255       1.6  riastrad {
    256  1.7.30.1  christos 	/* no membar */
    257       1.6  riastrad 	atomic_add_64(&a->a_v, d);
    258       1.6  riastrad }
    259       1.6  riastrad 
    260       1.6  riastrad static inline void
    261  1.7.30.1  christos atomic64_sub(int64_t d, struct atomic64 *a)
    262       1.6  riastrad {
    263  1.7.30.1  christos 	/* no membar */
    264       1.6  riastrad 	atomic_add_64(&a->a_v, -d);
    265       1.6  riastrad }
    266       1.6  riastrad 
    267  1.7.30.1  christos static inline int64_t
    268  1.7.30.1  christos atomic64_add_return(int64_t d, struct atomic64 *a)
    269  1.7.30.1  christos {
    270  1.7.30.1  christos 	int64_t v;
    271  1.7.30.1  christos 
    272  1.7.30.1  christos 	smp_mb__before_atomic();
    273  1.7.30.1  christos 	v = (int64_t)atomic_add_64_nv(&a->a_v, d);
    274  1.7.30.1  christos 	smp_mb__after_atomic();
    275  1.7.30.1  christos 
    276  1.7.30.1  christos 	return v;
    277  1.7.30.1  christos }
    278  1.7.30.1  christos 
    279       1.6  riastrad static inline uint64_t
    280  1.7.30.1  christos atomic64_xchg(struct atomic64 *a, uint64_t new)
    281       1.6  riastrad {
    282  1.7.30.1  christos 	uint64_t old;
    283  1.7.30.1  christos 
    284  1.7.30.1  christos 	smp_mb__before_atomic();
    285  1.7.30.1  christos 	old = atomic_swap_64(&a->a_v, new);
    286  1.7.30.1  christos 	smp_mb__after_atomic();
    287  1.7.30.1  christos 
    288  1.7.30.1  christos 	return old;
    289  1.7.30.1  christos }
    290  1.7.30.1  christos 
    291  1.7.30.1  christos static inline uint64_t
    292  1.7.30.1  christos atomic64_cmpxchg(struct atomic64 *atomic, uint64_t expect, uint64_t new)
    293  1.7.30.1  christos {
    294  1.7.30.1  christos 	uint64_t old;
    295  1.7.30.1  christos 
    296  1.7.30.1  christos 	/*
    297  1.7.30.1  christos 	 * XXX As an optimization, under Linux's semantics we are
    298  1.7.30.1  christos 	 * allowed to skip the memory barrier if the comparison fails,
    299  1.7.30.1  christos 	 * but taking advantage of that is not convenient here.
    300  1.7.30.1  christos 	 */
    301  1.7.30.1  christos 	smp_mb__before_atomic();
    302  1.7.30.1  christos 	old = atomic_cas_64(&atomic->a_v, expect, new);
    303  1.7.30.1  christos 	smp_mb__after_atomic();
    304  1.7.30.1  christos 
    305  1.7.30.1  christos 	return old;
    306  1.7.30.1  christos }
    307  1.7.30.1  christos 
    308  1.7.30.1  christos #else  /* !defined(__HAVE_ATOMIC64_OPS) */
    309  1.7.30.1  christos 
    310  1.7.30.1  christos #define	atomic64_add		linux_atomic64_add
    311  1.7.30.1  christos #define	atomic64_add_return	linux_atomic64_add_return
    312  1.7.30.1  christos #define	atomic64_cmpxchg	linux_atomic64_cmpxchg
    313  1.7.30.1  christos #define	atomic64_read		linux_atomic64_read
    314  1.7.30.1  christos #define	atomic64_set		linux_atomic64_set
    315  1.7.30.1  christos #define	atomic64_sub		linux_atomic64_sub
    316  1.7.30.1  christos #define	atomic64_xchg		linux_atomic64_xchg
    317  1.7.30.1  christos 
    318  1.7.30.1  christos uint64_t	atomic64_read(const struct atomic64 *);
    319  1.7.30.1  christos void		atomic64_set(struct atomic64 *, uint64_t);
    320  1.7.30.1  christos void		atomic64_add(int64_t, struct atomic64 *);
    321  1.7.30.1  christos void		atomic64_sub(int64_t, struct atomic64 *);
    322  1.7.30.1  christos int64_t		atomic64_add_return(int64_t, struct atomic64 *);
    323  1.7.30.1  christos uint64_t	atomic64_xchg(struct atomic64 *, uint64_t);
    324  1.7.30.1  christos uint64_t	atomic64_cmpxchg(struct atomic64 *, uint64_t, uint64_t);
    325  1.7.30.1  christos 
    326  1.7.30.1  christos #endif
    327  1.7.30.1  christos 
    328  1.7.30.1  christos static inline int64_t
    329  1.7.30.1  christos atomic64_inc_return(struct atomic64 *a)
    330  1.7.30.1  christos {
    331  1.7.30.1  christos 	return atomic64_add_return(1, a);
    332  1.7.30.1  christos }
    333  1.7.30.1  christos 
    334  1.7.30.1  christos struct atomic_long {
    335  1.7.30.1  christos 	volatile unsigned long	al_v;
    336  1.7.30.1  christos };
    337  1.7.30.1  christos 
    338  1.7.30.1  christos typedef struct atomic_long atomic_long_t;
    339  1.7.30.1  christos 
    340  1.7.30.1  christos static inline long
    341  1.7.30.1  christos atomic_long_read(struct atomic_long *a)
    342  1.7.30.1  christos {
    343  1.7.30.1  christos 	/* no membar */
    344  1.7.30.1  christos 	return (unsigned long)a->al_v;
    345  1.7.30.1  christos }
    346  1.7.30.1  christos 
    347  1.7.30.1  christos static inline void
    348  1.7.30.1  christos atomic_long_set(struct atomic_long *a, long v)
    349  1.7.30.1  christos {
    350  1.7.30.1  christos 	/* no membar */
    351  1.7.30.1  christos 	a->al_v = v;
    352  1.7.30.1  christos }
    353  1.7.30.1  christos 
    354  1.7.30.1  christos static inline long
    355  1.7.30.1  christos atomic_long_add_unless(struct atomic_long *a, long addend, long zero)
    356  1.7.30.1  christos {
    357  1.7.30.1  christos 	long value;
    358  1.7.30.1  christos 
    359  1.7.30.1  christos 	smp_mb__before_atomic();
    360  1.7.30.1  christos 	do {
    361  1.7.30.1  christos 		value = (long)a->al_v;
    362  1.7.30.1  christos 		if (value == zero)
    363  1.7.30.1  christos 			break;
    364  1.7.30.1  christos 	} while (atomic_cas_ulong(&a->al_v, (unsigned long)value,
    365  1.7.30.1  christos 		(unsigned long)(value + addend)) != (unsigned long)value);
    366  1.7.30.1  christos 	smp_mb__after_atomic();
    367  1.7.30.1  christos 
    368  1.7.30.1  christos 	return value != zero;
    369  1.7.30.1  christos }
    370  1.7.30.1  christos 
    371  1.7.30.1  christos static inline long
    372  1.7.30.1  christos atomic_long_inc_not_zero(struct atomic_long *a)
    373  1.7.30.1  christos {
    374  1.7.30.1  christos 	/* membar implied by atomic_long_add_unless */
    375  1.7.30.1  christos 	return atomic_long_add_unless(a, 1, 0);
    376  1.7.30.1  christos }
    377  1.7.30.1  christos 
    378  1.7.30.1  christos static inline long
    379  1.7.30.1  christos atomic_long_cmpxchg(struct atomic_long *a, long expect, long new)
    380  1.7.30.1  christos {
    381  1.7.30.1  christos 	long old;
    382  1.7.30.1  christos 
    383  1.7.30.1  christos 	/*
    384  1.7.30.1  christos 	 * XXX As an optimization, under Linux's semantics we are
    385  1.7.30.1  christos 	 * allowed to skip the memory barrier if the comparison fails,
    386  1.7.30.1  christos 	 * but taking advantage of that is not convenient here.
    387  1.7.30.1  christos 	 */
    388  1.7.30.1  christos 	smp_mb__before_atomic();
    389  1.7.30.1  christos 	old = (long)atomic_cas_ulong(&a->al_v, (unsigned long)expect,
    390  1.7.30.1  christos 	    (unsigned long)new);
    391  1.7.30.1  christos 	smp_mb__after_atomic();
    392  1.7.30.1  christos 
    393  1.7.30.1  christos 	return old;
    394       1.6  riastrad }
    395       1.6  riastrad 
    396       1.2  riastrad static inline void
    397       1.2  riastrad set_bit(unsigned int bit, volatile unsigned long *ptr)
    398       1.2  riastrad {
    399       1.2  riastrad 	const unsigned int units = (sizeof(*ptr) * CHAR_BIT);
    400       1.2  riastrad 
    401  1.7.30.1  christos 	/* no memory barrier */
    402       1.2  riastrad 	atomic_or_ulong(&ptr[bit / units], (1UL << (bit % units)));
    403       1.2  riastrad }
    404       1.2  riastrad 
    405       1.2  riastrad static inline void
    406       1.2  riastrad clear_bit(unsigned int bit, volatile unsigned long *ptr)
    407       1.2  riastrad {
    408       1.2  riastrad 	const unsigned int units = (sizeof(*ptr) * CHAR_BIT);
    409       1.2  riastrad 
    410  1.7.30.1  christos 	/* no memory barrier */
    411       1.2  riastrad 	atomic_and_ulong(&ptr[bit / units], ~(1UL << (bit % units)));
    412       1.2  riastrad }
    413       1.2  riastrad 
    414       1.2  riastrad static inline void
    415       1.2  riastrad change_bit(unsigned int bit, volatile unsigned long *ptr)
    416       1.2  riastrad {
    417       1.2  riastrad 	const unsigned int units = (sizeof(*ptr) * CHAR_BIT);
    418       1.2  riastrad 	volatile unsigned long *const p = &ptr[bit / units];
    419       1.2  riastrad 	const unsigned long mask = (1UL << (bit % units));
    420       1.2  riastrad 	unsigned long v;
    421       1.2  riastrad 
    422  1.7.30.1  christos 	/* no memory barrier */
    423       1.2  riastrad 	do v = *p; while (atomic_cas_ulong(p, v, (v ^ mask)) != v);
    424       1.2  riastrad }
    425       1.2  riastrad 
    426  1.7.30.1  christos static inline int
    427       1.2  riastrad test_and_set_bit(unsigned int bit, volatile unsigned long *ptr)
    428       1.2  riastrad {
    429       1.2  riastrad 	const unsigned int units = (sizeof(*ptr) * CHAR_BIT);
    430       1.2  riastrad 	volatile unsigned long *const p = &ptr[bit / units];
    431       1.2  riastrad 	const unsigned long mask = (1UL << (bit % units));
    432       1.2  riastrad 	unsigned long v;
    433       1.2  riastrad 
    434  1.7.30.1  christos 	smp_mb__before_atomic();
    435       1.2  riastrad 	do v = *p; while (atomic_cas_ulong(p, v, (v | mask)) != v);
    436  1.7.30.1  christos 	smp_mb__after_atomic();
    437       1.2  riastrad 
    438       1.7  riastrad 	return ((v & mask) != 0);
    439       1.2  riastrad }
    440       1.2  riastrad 
    441  1.7.30.1  christos static inline int
    442       1.2  riastrad test_and_clear_bit(unsigned int bit, volatile unsigned long *ptr)
    443       1.2  riastrad {
    444       1.2  riastrad 	const unsigned int units = (sizeof(*ptr) * CHAR_BIT);
    445       1.2  riastrad 	volatile unsigned long *const p = &ptr[bit / units];
    446       1.2  riastrad 	const unsigned long mask = (1UL << (bit % units));
    447       1.2  riastrad 	unsigned long v;
    448       1.2  riastrad 
    449  1.7.30.1  christos 	smp_mb__before_atomic();
    450       1.2  riastrad 	do v = *p; while (atomic_cas_ulong(p, v, (v & ~mask)) != v);
    451  1.7.30.1  christos 	smp_mb__after_atomic();
    452       1.2  riastrad 
    453       1.7  riastrad 	return ((v & mask) != 0);
    454       1.2  riastrad }
    455       1.2  riastrad 
    456  1.7.30.1  christos static inline int
    457       1.2  riastrad test_and_change_bit(unsigned int bit, volatile unsigned long *ptr)
    458       1.2  riastrad {
    459       1.2  riastrad 	const unsigned int units = (sizeof(*ptr) * CHAR_BIT);
    460       1.2  riastrad 	volatile unsigned long *const p = &ptr[bit / units];
    461       1.2  riastrad 	const unsigned long mask = (1UL << (bit % units));
    462       1.2  riastrad 	unsigned long v;
    463       1.2  riastrad 
    464  1.7.30.1  christos 	smp_mb__before_atomic();
    465       1.2  riastrad 	do v = *p; while (atomic_cas_ulong(p, v, (v ^ mask)) != v);
    466  1.7.30.1  christos 	smp_mb__after_atomic();
    467       1.2  riastrad 
    468       1.7  riastrad 	return ((v & mask) != 0);
    469       1.2  riastrad }
    470       1.2  riastrad 
    471       1.2  riastrad #endif  /* _LINUX_ATOMIC_H_ */
    472