Home | History | Annotate | Line # | Download | only in atomic
atomic.S revision 1.7.2.3
      1  1.7.2.3  matt /*	atomic.S,v 1.7.2.2 2008/01/09 01:20:53 matt Exp	*/
      2  1.7.2.2  matt 
      3  1.7.2.2  matt /*-
      4  1.7.2.2  matt  * Copyright (c) 2007 The NetBSD Foundation, Inc.
      5  1.7.2.2  matt  * All rights reserved.
      6  1.7.2.2  matt  *
      7  1.7.2.2  matt  * This code is derived from software contributed to The NetBSD Foundation
      8  1.7.2.2  matt  * by Jason R. Thorpe, and by Andrew Doran.
      9  1.7.2.2  matt  *
     10  1.7.2.2  matt  * Redistribution and use in source and binary forms, with or without
     11  1.7.2.2  matt  * modification, are permitted provided that the following conditions
     12  1.7.2.2  matt  * are met:
     13  1.7.2.2  matt  * 1. Redistributions of source code must retain the above copyright
     14  1.7.2.2  matt  *    notice, this list of conditions and the following disclaimer.
     15  1.7.2.2  matt  * 2. Redistributions in binary form must reproduce the above copyright
     16  1.7.2.2  matt  *    notice, this list of conditions and the following disclaimer in the
     17  1.7.2.2  matt  *    documentation and/or other materials provided with the distribution.
     18  1.7.2.2  matt  * 3. All advertising materials mentioning features or use of this software
     19  1.7.2.2  matt  *    must display the following acknowledgement:
     20  1.7.2.2  matt  *	This product includes software developed by the NetBSD
     21  1.7.2.2  matt  *	Foundation, Inc. and its contributors.
     22  1.7.2.2  matt  * 4. Neither the name of The NetBSD Foundation nor the names of its
     23  1.7.2.2  matt  *    contributors may be used to endorse or promote products derived
     24  1.7.2.2  matt  *    from this software without specific prior written permission.
     25  1.7.2.2  matt  *
     26  1.7.2.2  matt  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     27  1.7.2.2  matt  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     28  1.7.2.2  matt  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     29  1.7.2.2  matt  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     30  1.7.2.2  matt  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     31  1.7.2.2  matt  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     32  1.7.2.2  matt  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     33  1.7.2.2  matt  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     34  1.7.2.2  matt  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     35  1.7.2.2  matt  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     36  1.7.2.2  matt  * POSSIBILITY OF SUCH DAMAGE.
     37  1.7.2.2  matt  */
     38  1.7.2.2  matt 
     39  1.7.2.2  matt #include <machine/asm.h>
     40  1.7.2.2  matt 
     41  1.7.2.2  matt #ifdef _KERNEL
     42  1.7.2.2  matt #define	LOCK(n)		.Lpatch/**/n:	lock
     43  1.7.2.2  matt #define	ALIAS(f, t)	STRONG_ALIAS(f,t)
     44  1.7.2.2  matt #define	END(a)		_ALIGN_TEXT; LABEL(a)
     45  1.7.2.2  matt #else
     46  1.7.2.2  matt #define	LOCK(n)		lock
     47  1.7.2.2  matt #define	ALIAS(f, t)	WEAK_ALIAS(f,t)
     48  1.7.2.2  matt #define	END(a)		/* nothing */
     49  1.7.2.2  matt #endif
     50  1.7.2.2  matt 
     51  1.7.2.2  matt 	.text
     52  1.7.2.2  matt 
     53  1.7.2.2  matt NENTRY(_atomic_add_32)
     54  1.7.2.2  matt 	movl	4(%esp), %edx
     55  1.7.2.2  matt 	movl	8(%esp), %eax
     56  1.7.2.2  matt 	LOCK(1)
     57  1.7.2.2  matt 	addl	%eax, (%edx)
     58  1.7.2.2  matt 	ret
     59  1.7.2.2  matt 
     60  1.7.2.2  matt NENTRY(_atomic_add_32_nv)
     61  1.7.2.2  matt 	movl	4(%esp), %edx
     62  1.7.2.2  matt 	movl	8(%esp), %eax
     63  1.7.2.2  matt 	movl	%eax, %ecx
     64  1.7.2.2  matt 	LOCK(2)
     65  1.7.2.2  matt 	xaddl	%eax, (%edx)
     66  1.7.2.2  matt 	addl	%ecx, %eax
     67  1.7.2.2  matt 	ret
     68  1.7.2.2  matt 
     69  1.7.2.2  matt NENTRY(_atomic_and_32)
     70  1.7.2.2  matt 	movl	4(%esp), %edx
     71  1.7.2.2  matt 	movl	8(%esp), %eax
     72  1.7.2.2  matt 	LOCK(3)
     73  1.7.2.2  matt 	andl	%eax, (%edx)
     74  1.7.2.2  matt 	ret
     75  1.7.2.2  matt 
     76  1.7.2.2  matt NENTRY(_atomic_and_32_nv)
     77  1.7.2.2  matt 	movl	4(%esp), %edx
     78  1.7.2.2  matt 	movl	(%edx), %eax
     79  1.7.2.2  matt 1:
     80  1.7.2.2  matt 	movl	%eax, %ecx
     81  1.7.2.2  matt 	andl	8(%esp), %ecx
     82  1.7.2.2  matt 	LOCK(4)
     83  1.7.2.2  matt 	cmpxchgl %ecx, (%edx)
     84  1.7.2.2  matt 	jnz	1b
     85  1.7.2.2  matt 	movl	%ecx, %eax
     86  1.7.2.2  matt 	ret
     87  1.7.2.2  matt 
     88  1.7.2.2  matt NENTRY(_atomic_dec_32)
     89  1.7.2.2  matt 	movl	4(%esp), %edx
     90  1.7.2.2  matt 	LOCK(5)
     91  1.7.2.2  matt 	decl	(%edx)
     92  1.7.2.2  matt 	ret
     93  1.7.2.2  matt 
     94  1.7.2.2  matt NENTRY(_atomic_dec_32_nv)
     95  1.7.2.2  matt 	movl	4(%esp), %edx
     96  1.7.2.2  matt 	movl	$-1, %eax
     97  1.7.2.2  matt 	LOCK(6)
     98  1.7.2.2  matt 	xaddl	%eax, (%edx)
     99  1.7.2.2  matt 	decl	%eax
    100  1.7.2.2  matt 	ret
    101  1.7.2.2  matt 
    102  1.7.2.2  matt NENTRY(_atomic_inc_32)
    103  1.7.2.2  matt 	movl	4(%esp), %edx
    104  1.7.2.2  matt 	LOCK(7)
    105  1.7.2.2  matt 	incl	(%edx)
    106  1.7.2.2  matt 	ret
    107  1.7.2.2  matt 
    108  1.7.2.2  matt NENTRY(_atomic_inc_32_nv)
    109  1.7.2.2  matt 	movl	4(%esp), %edx
    110  1.7.2.2  matt 	movl	$1, %eax
    111  1.7.2.2  matt 	LOCK(8)
    112  1.7.2.2  matt 	xaddl	%eax, (%edx)
    113  1.7.2.2  matt 	incl	%eax
    114  1.7.2.2  matt 	ret
    115  1.7.2.2  matt 
    116  1.7.2.2  matt NENTRY(_atomic_or_32)
    117  1.7.2.2  matt 	movl	4(%esp), %edx
    118  1.7.2.2  matt 	movl	8(%esp), %eax
    119  1.7.2.2  matt 	LOCK(9)
    120  1.7.2.2  matt 	orl	%eax, (%edx)
    121  1.7.2.2  matt 	ret
    122  1.7.2.2  matt 
    123  1.7.2.2  matt NENTRY(_atomic_or_32_nv)
    124  1.7.2.2  matt 	movl	4(%esp), %edx
    125  1.7.2.2  matt 	movl	(%edx), %eax
    126  1.7.2.2  matt 1:
    127  1.7.2.2  matt 	movl	%eax, %ecx
    128  1.7.2.2  matt 	orl	8(%esp), %ecx
    129  1.7.2.2  matt 	LOCK(10)
    130  1.7.2.2  matt 	cmpxchgl %ecx, (%edx)
    131  1.7.2.2  matt 	jnz	1b
    132  1.7.2.2  matt 	movl	%ecx, %eax
    133  1.7.2.2  matt 	ret
    134  1.7.2.2  matt 
    135  1.7.2.2  matt NENTRY(_atomic_swap_32)
    136  1.7.2.2  matt 	movl	4(%esp), %edx
    137  1.7.2.2  matt 	movl	8(%esp), %eax
    138  1.7.2.2  matt 	xchgl	%eax, (%edx)
    139  1.7.2.2  matt 	ret
    140  1.7.2.2  matt 
    141  1.7.2.2  matt NENTRY(_atomic_cas_32)
    142  1.7.2.2  matt 	movl	4(%esp), %edx
    143  1.7.2.2  matt 	movl	8(%esp), %eax
    144  1.7.2.2  matt 	movl	12(%esp), %ecx
    145  1.7.2.2  matt 	LOCK(12)
    146  1.7.2.2  matt 	cmpxchgl %ecx, (%edx)
    147  1.7.2.2  matt 	/* %eax now contains the old value */
    148  1.7.2.2  matt 	ret
    149  1.7.2.2  matt 
    150  1.7.2.3  matt NENTRY(_atomic_cas_32_ni)
    151  1.7.2.3  matt 	movl	4(%esp), %edx
    152  1.7.2.3  matt 	movl	8(%esp), %eax
    153  1.7.2.3  matt 	movl	12(%esp), %ecx
    154  1.7.2.3  matt 	cmpxchgl %ecx, (%edx)
    155  1.7.2.3  matt 	/* %eax now contains the old value */
    156  1.7.2.3  matt 	ret
    157  1.7.2.3  matt 
    158  1.7.2.2  matt NENTRY(_membar_consumer)
    159  1.7.2.2  matt 	LOCK(13)
    160  1.7.2.2  matt 	addl	$0, -4(%esp)
    161  1.7.2.2  matt 	ret
    162  1.7.2.2  matt END(membar_consumer_end)
    163  1.7.2.2  matt 
    164  1.7.2.2  matt NENTRY(_membar_producer)
    165  1.7.2.2  matt 	/* A store is enough */
    166  1.7.2.2  matt 	movl	$0, -4(%esp)
    167  1.7.2.2  matt 	ret
    168  1.7.2.2  matt END(membar_producer_end)
    169  1.7.2.2  matt 
    170  1.7.2.2  matt NENTRY(_membar_sync)
    171  1.7.2.2  matt 	LOCK(14)
    172  1.7.2.2  matt 	addl	$0, -4(%esp)
    173  1.7.2.2  matt 	ret
    174  1.7.2.2  matt END(membar_sync_end)
    175  1.7.2.2  matt 
    176  1.7.2.2  matt #ifdef _KERNEL
    177  1.7.2.2  matt NENTRY(_atomic_cas_64)
    178  1.7.2.2  matt 	cli
    179  1.7.2.2  matt 	pushl	%edi
    180  1.7.2.2  matt 	pushl	%ebx
    181  1.7.2.2  matt 	movl	12(%esp), %edi
    182  1.7.2.2  matt 	movl	16(%esp), %eax
    183  1.7.2.2  matt 	movl	20(%esp), %edx
    184  1.7.2.2  matt 	movl	24(%esp), %ebx
    185  1.7.2.2  matt 	movl	28(%esp), %ecx
    186  1.7.2.2  matt 	cmpl	0(%edi), %eax
    187  1.7.2.2  matt 	jne	2f
    188  1.7.2.2  matt 	cmpl	4(%edi), %edx
    189  1.7.2.2  matt 	jne	2f
    190  1.7.2.2  matt 	movl	%ebx, 0(%edi)
    191  1.7.2.2  matt 	movl	%ecx, 4(%edi)
    192  1.7.2.2  matt 1:
    193  1.7.2.2  matt 	popl	%ebx
    194  1.7.2.2  matt 	popl	%edi
    195  1.7.2.2  matt 	sti
    196  1.7.2.2  matt 	ret
    197  1.7.2.2  matt 2:
    198  1.7.2.2  matt 	movl	0(%edi), %eax
    199  1.7.2.2  matt 	movl	4(%edi), %edx
    200  1.7.2.2  matt 	jmp	1b
    201  1.7.2.2  matt END(_atomic_cas_64_end)
    202  1.7.2.2  matt 
    203  1.7.2.2  matt NENTRY(_atomic_cas_cx8)
    204  1.7.2.2  matt 	pushl	%edi
    205  1.7.2.2  matt 	pushl	%ebx
    206  1.7.2.2  matt 	movl	12(%esp), %edi
    207  1.7.2.2  matt 	movl	16(%esp), %eax
    208  1.7.2.2  matt 	movl	20(%esp), %edx
    209  1.7.2.2  matt 	movl	24(%esp), %ebx
    210  1.7.2.2  matt 	movl	28(%esp), %ecx
    211  1.7.2.2  matt 	LOCK(15)
    212  1.7.2.2  matt 	cmpxchg8b (%edi)
    213  1.7.2.2  matt 	popl	%ebx
    214  1.7.2.2  matt 	popl	%edi
    215  1.7.2.2  matt 	ret
    216  1.7.2.2  matt 	.space	32, 0x90
    217  1.7.2.2  matt END(_atomic_cas_cx8_end)
    218  1.7.2.2  matt 
    219  1.7.2.2  matt NENTRY(sse2_lfence)
    220  1.7.2.2  matt 	lfence
    221  1.7.2.2  matt 	ret
    222  1.7.2.2  matt END(sse2_lfence_end)
    223  1.7.2.2  matt 
    224  1.7.2.2  matt NENTRY(sse2_mfence)
    225  1.7.2.2  matt 	mfence
    226  1.7.2.2  matt 	ret
    227  1.7.2.2  matt END(sse2_mfence_end)
    228  1.7.2.2  matt 
    229  1.7.2.2  matt atomic_lockpatch:
    230  1.7.2.2  matt 	.globl	atomic_lockpatch
    231  1.7.2.2  matt 	.long	.Lpatch1, .Lpatch2, .Lpatch3, .Lpatch4, .Lpatch5
    232  1.7.2.2  matt 	.long	.Lpatch6, .Lpatch7, .Lpatch8, .Lpatch9, .Lpatch10
    233  1.7.2.2  matt 	.long	.Lpatch12, .Lpatch13, .Lpatch14, .Lpatch15, 0
    234  1.7.2.2  matt #else
    235  1.7.2.2  matt NENTRY(_atomic_cas_64)
    236  1.7.2.2  matt 	pushl	%edi
    237  1.7.2.2  matt 	pushl	%ebx
    238  1.7.2.2  matt 	movl	12(%esp), %edi
    239  1.7.2.2  matt 	movl	16(%esp), %eax
    240  1.7.2.2  matt 	movl	20(%esp), %edx
    241  1.7.2.2  matt 	movl	24(%esp), %ebx
    242  1.7.2.2  matt 	movl	28(%esp), %ecx
    243  1.7.2.2  matt 	lock
    244  1.7.2.2  matt 	cmpxchg8b (%edi)
    245  1.7.2.2  matt 	popl	%ebx
    246  1.7.2.2  matt 	popl	%edi
    247  1.7.2.2  matt 	ret
    248  1.7.2.2  matt #endif	/* _KERNEL */
    249  1.7.2.2  matt 
    250  1.7.2.2  matt ALIAS(atomic_add_32,_atomic_add_32)
    251  1.7.2.2  matt ALIAS(atomic_add_int,_atomic_add_32)
    252  1.7.2.2  matt ALIAS(atomic_add_long,_atomic_add_32)
    253  1.7.2.2  matt ALIAS(atomic_add_ptr,_atomic_add_32)
    254  1.7.2.2  matt 
    255  1.7.2.2  matt ALIAS(atomic_add_32_nv,_atomic_add_32_nv)
    256  1.7.2.2  matt ALIAS(atomic_add_int_nv,_atomic_add_32_nv)
    257  1.7.2.2  matt ALIAS(atomic_add_long_nv,_atomic_add_32_nv)
    258  1.7.2.2  matt ALIAS(atomic_add_ptr_nv,_atomic_add_32_nv)
    259  1.7.2.2  matt 
    260  1.7.2.2  matt ALIAS(atomic_and_32,_atomic_and_32)
    261  1.7.2.2  matt ALIAS(atomic_and_uint,_atomic_and_32)
    262  1.7.2.2  matt ALIAS(atomic_and_ulong,_atomic_and_32)
    263  1.7.2.2  matt ALIAS(atomic_and_ptr,_atomic_and_32)
    264  1.7.2.2  matt 
    265  1.7.2.2  matt ALIAS(atomic_and_32_nv,_atomic_and_32_nv)
    266  1.7.2.2  matt ALIAS(atomic_and_uint_nv,_atomic_and_32_nv)
    267  1.7.2.2  matt ALIAS(atomic_and_ulong_nv,_atomic_and_32_nv)
    268  1.7.2.2  matt ALIAS(atomic_and_ptr_nv,_atomic_and_32_nv)
    269  1.7.2.2  matt 
    270  1.7.2.2  matt ALIAS(atomic_dec_32,_atomic_dec_32)
    271  1.7.2.2  matt ALIAS(atomic_dec_uint,_atomic_dec_32)
    272  1.7.2.2  matt ALIAS(atomic_dec_ulong,_atomic_dec_32)
    273  1.7.2.2  matt ALIAS(atomic_dec_ptr,_atomic_dec_32)
    274  1.7.2.2  matt 
    275  1.7.2.2  matt ALIAS(atomic_dec_32_nv,_atomic_dec_32_nv)
    276  1.7.2.2  matt ALIAS(atomic_dec_uint_nv,_atomic_dec_32_nv)
    277  1.7.2.2  matt ALIAS(atomic_dec_ulong_nv,_atomic_dec_32_nv)
    278  1.7.2.2  matt ALIAS(atomic_dec_ptr_nv,_atomic_dec_32_nv)
    279  1.7.2.2  matt 
    280  1.7.2.2  matt ALIAS(atomic_inc_32,_atomic_inc_32)
    281  1.7.2.2  matt ALIAS(atomic_inc_uint,_atomic_inc_32)
    282  1.7.2.2  matt ALIAS(atomic_inc_ulong,_atomic_inc_32)
    283  1.7.2.2  matt ALIAS(atomic_inc_ptr,_atomic_inc_32)
    284  1.7.2.2  matt 
    285  1.7.2.2  matt ALIAS(atomic_inc_32_nv,_atomic_inc_32_nv)
    286  1.7.2.2  matt ALIAS(atomic_inc_uint_nv,_atomic_inc_32_nv)
    287  1.7.2.2  matt ALIAS(atomic_inc_ulong_nv,_atomic_inc_32_nv)
    288  1.7.2.2  matt ALIAS(atomic_inc_ptr_nv,_atomic_inc_32_nv)
    289  1.7.2.2  matt 
    290  1.7.2.2  matt ALIAS(atomic_or_32,_atomic_or_32)
    291  1.7.2.2  matt ALIAS(atomic_or_uint,_atomic_or_32)
    292  1.7.2.2  matt ALIAS(atomic_or_ulong,_atomic_or_32)
    293  1.7.2.2  matt ALIAS(atomic_or_ptr,_atomic_or_32)
    294  1.7.2.2  matt 
    295  1.7.2.2  matt ALIAS(atomic_or_32_nv,_atomic_or_32_nv)
    296  1.7.2.2  matt ALIAS(atomic_or_uint_nv,_atomic_or_32_nv)
    297  1.7.2.2  matt ALIAS(atomic_or_ulong_nv,_atomic_or_32_nv)
    298  1.7.2.2  matt ALIAS(atomic_or_ptr_nv,_atomic_or_32_nv)
    299  1.7.2.2  matt 
    300  1.7.2.2  matt ALIAS(atomic_swap_32,_atomic_swap_32)
    301  1.7.2.2  matt ALIAS(atomic_swap_uint,_atomic_swap_32)
    302  1.7.2.2  matt ALIAS(atomic_swap_ulong,_atomic_swap_32)
    303  1.7.2.2  matt ALIAS(atomic_swap_ptr,_atomic_swap_32)
    304  1.7.2.2  matt 
    305  1.7.2.2  matt ALIAS(atomic_cas_32,_atomic_cas_32)
    306  1.7.2.2  matt ALIAS(atomic_cas_uint,_atomic_cas_32)
    307  1.7.2.2  matt ALIAS(atomic_cas_ulong,_atomic_cas_32)
    308  1.7.2.2  matt ALIAS(atomic_cas_ptr,_atomic_cas_32)
    309  1.7.2.2  matt 
    310  1.7.2.3  matt ALIAS(atomic_cas_32_ni,_atomic_cas_32_ni)
    311  1.7.2.3  matt ALIAS(atomic_cas_uint_ni,_atomic_cas_32_ni)
    312  1.7.2.3  matt ALIAS(atomic_cas_ulong_ni,_atomic_cas_32_ni)
    313  1.7.2.3  matt ALIAS(atomic_cas_ptr_ni,_atomic_cas_32_ni)
    314  1.7.2.3  matt 
    315  1.7.2.2  matt ALIAS(atomic_cas_64,_atomic_cas_64)
    316  1.7.2.3  matt ALIAS(atomic_cas_64_ni,_atomic_cas_64)
    317  1.7.2.2  matt 
    318  1.7.2.2  matt ALIAS(membar_consumer,_membar_consumer)
    319  1.7.2.2  matt ALIAS(membar_producer,_membar_producer)
    320  1.7.2.3  matt ALIAS(membar_enter,_membar_consumer)
    321  1.7.2.3  matt ALIAS(membar_exit,_membar_producer)
    322  1.7.2.2  matt ALIAS(membar_sync,_membar_sync)
    323  1.7.2.2  matt 
    324  1.7.2.2  matt STRONG_ALIAS(_atomic_add_int,_atomic_add_32)
    325  1.7.2.2  matt STRONG_ALIAS(_atomic_add_long,_atomic_add_32)
    326  1.7.2.2  matt STRONG_ALIAS(_atomic_add_ptr,_atomic_add_32)
    327  1.7.2.2  matt 
    328  1.7.2.2  matt STRONG_ALIAS(_atomic_add_int_nv,_atomic_add_32_nv)
    329  1.7.2.2  matt STRONG_ALIAS(_atomic_add_long_nv,_atomic_add_32_nv)
    330  1.7.2.2  matt STRONG_ALIAS(_atomic_add_ptr_nv,_atomic_add_32_nv)
    331  1.7.2.2  matt 
    332  1.7.2.2  matt STRONG_ALIAS(_atomic_and_uint,_atomic_and_32)
    333  1.7.2.2  matt STRONG_ALIAS(_atomic_and_ulong,_atomic_and_32)
    334  1.7.2.2  matt STRONG_ALIAS(_atomic_and_ptr,_atomic_and_32)
    335  1.7.2.2  matt 
    336  1.7.2.2  matt STRONG_ALIAS(_atomic_and_uint_nv,_atomic_and_32_nv)
    337  1.7.2.2  matt STRONG_ALIAS(_atomic_and_ulong_nv,_atomic_and_32_nv)
    338  1.7.2.2  matt STRONG_ALIAS(_atomic_and_ptr_nv,_atomic_and_32_nv)
    339  1.7.2.2  matt 
    340  1.7.2.2  matt STRONG_ALIAS(_atomic_dec_uint,_atomic_dec_32)
    341  1.7.2.2  matt STRONG_ALIAS(_atomic_dec_ulong,_atomic_dec_32)
    342  1.7.2.2  matt STRONG_ALIAS(_atomic_dec_ptr,_atomic_dec_32)
    343  1.7.2.2  matt 
    344  1.7.2.2  matt STRONG_ALIAS(_atomic_dec_uint_nv,_atomic_dec_32_nv)
    345  1.7.2.2  matt STRONG_ALIAS(_atomic_dec_ulong_nv,_atomic_dec_32_nv)
    346  1.7.2.2  matt STRONG_ALIAS(_atomic_dec_ptr_nv,_atomic_dec_32_nv)
    347  1.7.2.2  matt 
    348  1.7.2.2  matt STRONG_ALIAS(_atomic_inc_uint,_atomic_inc_32)
    349  1.7.2.2  matt STRONG_ALIAS(_atomic_inc_ulong,_atomic_inc_32)
    350  1.7.2.2  matt STRONG_ALIAS(_atomic_inc_ptr,_atomic_inc_32)
    351  1.7.2.2  matt 
    352  1.7.2.2  matt STRONG_ALIAS(_atomic_inc_uint_nv,_atomic_inc_32_nv)
    353  1.7.2.2  matt STRONG_ALIAS(_atomic_inc_ulong_nv,_atomic_inc_32_nv)
    354  1.7.2.2  matt STRONG_ALIAS(_atomic_inc_ptr_nv,_atomic_inc_32_nv)
    355  1.7.2.2  matt 
    356  1.7.2.2  matt STRONG_ALIAS(_atomic_or_uint,_atomic_or_32)
    357  1.7.2.2  matt STRONG_ALIAS(_atomic_or_ulong,_atomic_or_32)
    358  1.7.2.2  matt STRONG_ALIAS(_atomic_or_ptr,_atomic_or_32)
    359  1.7.2.2  matt 
    360  1.7.2.2  matt STRONG_ALIAS(_atomic_or_uint_nv,_atomic_or_32_nv)
    361  1.7.2.2  matt STRONG_ALIAS(_atomic_or_ulong_nv,_atomic_or_32_nv)
    362  1.7.2.2  matt STRONG_ALIAS(_atomic_or_ptr_nv,_atomic_or_32_nv)
    363  1.7.2.2  matt 
    364  1.7.2.2  matt STRONG_ALIAS(_atomic_swap_uint,_atomic_swap_32)
    365  1.7.2.2  matt STRONG_ALIAS(_atomic_swap_ulong,_atomic_swap_32)
    366  1.7.2.2  matt STRONG_ALIAS(_atomic_swap_ptr,_atomic_swap_32)
    367  1.7.2.2  matt 
    368  1.7.2.2  matt STRONG_ALIAS(_atomic_cas_uint,_atomic_cas_32)
    369  1.7.2.2  matt STRONG_ALIAS(_atomic_cas_ulong,_atomic_cas_32)
    370  1.7.2.2  matt STRONG_ALIAS(_atomic_cas_ptr,_atomic_cas_32)
    371  1.7.2.3  matt 
    372  1.7.2.3  matt STRONG_ALIAS(_atomic_cas_uint_ni,_atomic_cas_32_ni)
    373  1.7.2.3  matt STRONG_ALIAS(_atomic_cas_ulong_ni,_atomic_cas_32_ni)
    374  1.7.2.3  matt STRONG_ALIAS(_atomic_cas_ptr_ni,_atomic_cas_32_ni)
    375  1.7.2.3  matt 
    376  1.7.2.3  matt STRONG_ALIAS(_membar_enter,_membar_consumer)
    377  1.7.2.3  matt STRONG_ALIAS(_membar_exit,_membar_producer)
    378