Home | History | Annotate | Line # | Download | only in atomic
atomic.S revision 1.22.24.3
      1 /*	$NetBSD: atomic.S,v 1.22.24.3 2020/04/21 19:37:43 martin Exp $	*/
      2 
      3 /*-
      4  * Copyright (c) 2007 The NetBSD Foundation, Inc.
      5  * All rights reserved.
      6  *
      7  * This code is derived from software contributed to The NetBSD Foundation
      8  * by Jason R. Thorpe, and by Andrew Doran.
      9  *
     10  * Redistribution and use in source and binary forms, with or without
     11  * modification, are permitted provided that the following conditions
     12  * are met:
     13  * 1. Redistributions of source code must retain the above copyright
     14  *    notice, this list of conditions and the following disclaimer.
     15  * 2. Redistributions in binary form must reproduce the above copyright
     16  *    notice, this list of conditions and the following disclaimer in the
     17  *    documentation and/or other materials provided with the distribution.
     18  *
     19  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     20  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     21  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     22  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     23  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     24  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     25  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     26  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     27  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     28  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     29  * POSSIBILITY OF SUCH DAMAGE.
     30  */
     31 
     32 #include <sys/param.h>
     33 #include <machine/asm.h>
     34 /*
     35  * __HAVE_ constants should not be in <machine/types.h>
     36  * because we can't use them from assembly. OTOH we
     37  * only need __HAVE_ATOMIC64_OPS here, and we don't.
     38  */
     39 #ifdef _KERNEL
     40 #define	ALIAS(f, t)	STRONG_ALIAS(f,t)
     41 #else
     42 #define	ALIAS(f, t)	WEAK_ALIAS(f,t)
     43 #endif
     44 
     45 #ifdef _HARDKERNEL
     46 #include "opt_xen.h"
     47 #define	LOCK(n)		.Lpatch ## n:	lock
     48 #define	ENDLABEL(a)	_ALIGN_TEXT; LABEL(a)
     49 #else
     50 #define	LOCK(n)		lock
     51 #define	ENDLABEL(a)	/* nothing */
     52 #endif
     53 
     54 	.text
     55 
     56 ENTRY(_atomic_add_32)
     57 	movl	4(%esp), %edx
     58 	movl	8(%esp), %eax
     59 	LOCK(1)
     60 	addl	%eax, (%edx)
     61 	ret
     62 END(_atomic_add_32)
     63 
     64 ENTRY(_atomic_add_32_nv)
     65 	movl	4(%esp), %edx
     66 	movl	8(%esp), %eax
     67 	movl	%eax, %ecx
     68 	LOCK(2)
     69 	xaddl	%eax, (%edx)
     70 	addl	%ecx, %eax
     71 	ret
     72 END(_atomic_add_32_nv)
     73 
     74 ENTRY(_atomic_and_32)
     75 	movl	4(%esp), %edx
     76 	movl	8(%esp), %eax
     77 	LOCK(3)
     78 	andl	%eax, (%edx)
     79 	ret
     80 END(_atomic_and_32)
     81 
     82 ENTRY(_atomic_and_32_nv)
     83 	movl	4(%esp), %edx
     84 	movl	(%edx), %eax
     85 0:
     86 	movl	%eax, %ecx
     87 	andl	8(%esp), %ecx
     88 	LOCK(4)
     89 	cmpxchgl %ecx, (%edx)
     90 	jnz	1f
     91 	movl	%ecx, %eax
     92 	ret
     93 1:
     94 	jmp	0b
     95 END(_atomic_and_32_nv)
     96 
     97 ENTRY(_atomic_dec_32)
     98 	movl	4(%esp), %edx
     99 	LOCK(5)
    100 	decl	(%edx)
    101 	ret
    102 END(_atomic_dec_32)
    103 
    104 ENTRY(_atomic_dec_32_nv)
    105 	movl	4(%esp), %edx
    106 	movl	$-1, %eax
    107 	LOCK(6)
    108 	xaddl	%eax, (%edx)
    109 	decl	%eax
    110 	ret
    111 END(_atomic_dec_32_nv)
    112 
    113 ENTRY(_atomic_inc_32)
    114 	movl	4(%esp), %edx
    115 	LOCK(7)
    116 	incl	(%edx)
    117 	ret
    118 END(_atomic_inc_32)
    119 
    120 ENTRY(_atomic_inc_32_nv)
    121 	movl	4(%esp), %edx
    122 	movl	$1, %eax
    123 	LOCK(8)
    124 	xaddl	%eax, (%edx)
    125 	incl	%eax
    126 	ret
    127 END(_atomic_inc_32_nv)
    128 
    129 ENTRY(_atomic_or_32)
    130 	movl	4(%esp), %edx
    131 	movl	8(%esp), %eax
    132 	LOCK(9)
    133 	orl	%eax, (%edx)
    134 	ret
    135 END(_atomic_or_32)
    136 
    137 ENTRY(_atomic_or_32_nv)
    138 	movl	4(%esp), %edx
    139 	movl	(%edx), %eax
    140 0:
    141 	movl	%eax, %ecx
    142 	orl	8(%esp), %ecx
    143 	LOCK(10)
    144 	cmpxchgl %ecx, (%edx)
    145 	jnz	1f
    146 	movl	%ecx, %eax
    147 	ret
    148 1:
    149 	jmp	0b
    150 END(_atomic_or_32_nv)
    151 
    152 ENTRY(_atomic_swap_32)
    153 	movl	4(%esp), %edx
    154 	movl	8(%esp), %eax
    155 	xchgl	%eax, (%edx)
    156 	ret
    157 END(_atomic_swap_32)
    158 
    159 ENTRY(_atomic_cas_32)
    160 	movl	4(%esp), %edx
    161 	movl	8(%esp), %eax
    162 	movl	12(%esp), %ecx
    163 	LOCK(12)
    164 	cmpxchgl %ecx, (%edx)
    165 	/* %eax now contains the old value */
    166 	ret
    167 END(_atomic_cas_32)
    168 
    169 ENTRY(_atomic_cas_32_ni)
    170 	movl	4(%esp), %edx
    171 	movl	8(%esp), %eax
    172 	movl	12(%esp), %ecx
    173 	cmpxchgl %ecx, (%edx)
    174 	/* %eax now contains the old value */
    175 	ret
    176 END(_atomic_cas_32_ni)
    177 
    178 ENTRY(_membar_consumer)
    179 	LOCK(13)
    180 	addl	$0, -4(%esp)
    181 	ret
    182 END(_membar_consumer)
    183 ENDLABEL(membar_consumer_end)
    184 
    185 ENTRY(_membar_producer)
    186 	/* A store is enough */
    187 	movl	$0, -4(%esp)
    188 	ret
    189 END(_membar_producer)
    190 ENDLABEL(membar_producer_end)
    191 
    192 ENTRY(_membar_sync)
    193 	LOCK(14)
    194 	addl	$0, -4(%esp)
    195 	ret
    196 END(_membar_sync)
    197 ENDLABEL(membar_sync_end)
    198 
    199 #if defined(__HAVE_ATOMIC64_OPS) || defined(_KERNEL)
    200 #ifdef XEN
    201 STRONG_ALIAS(_atomic_cas_64,_atomic_cas_cx8)
    202 #else
    203 ENTRY(_atomic_cas_64)
    204 #ifdef _HARDKERNEL
    205 	pushf
    206 	cli
    207 #endif /* _HARDKERNEL */
    208 	pushl	%edi
    209 	pushl	%ebx
    210 	movl	12(%esp), %edi
    211 	movl	16(%esp), %eax
    212 	movl	20(%esp), %edx
    213 	movl	24(%esp), %ebx
    214 	movl	28(%esp), %ecx
    215 	cmpl	0(%edi), %eax
    216 	jne	2f
    217 	cmpl	4(%edi), %edx
    218 	jne	2f
    219 	movl	%ebx, 0(%edi)
    220 	movl	%ecx, 4(%edi)
    221 1:
    222 	popl	%ebx
    223 	popl	%edi
    224 #ifdef _HARDKERNEL
    225 	popf
    226 #endif /* _HARDKERNEL */
    227 	ret
    228 2:
    229 	movl	0(%edi), %eax
    230 	movl	4(%edi), %edx
    231 	jmp	1b
    232 END(_atomic_cas_64)
    233 ENDLABEL(_atomic_cas_64_end)
    234 #endif /* !XEN */
    235 
    236 ENTRY(_atomic_cas_cx8)
    237 	pushl	%edi
    238 	pushl	%ebx
    239 	movl	12(%esp), %edi
    240 	movl	16(%esp), %eax
    241 	movl	20(%esp), %edx
    242 	movl	24(%esp), %ebx
    243 	movl	28(%esp), %ecx
    244 	LOCK(15)
    245 	cmpxchg8b (%edi)
    246 	popl	%ebx
    247 	popl	%edi
    248 	ret
    249 #ifdef _HARDKERNEL
    250 #ifdef GPROF
    251 	.space	16, 0x90
    252 #else
    253 	.space	32, 0x90
    254 #endif
    255 #endif /* _HARDKERNEL */
    256 END(_atomic_cas_cx8)
    257 ENDLABEL(_atomic_cas_cx8_end)
    258 #endif /* __HAVE_ATOMIC64_OPS || _KERNEL */
    259 
    260 #ifdef _HARDKERNEL
    261 ENTRY(sse2_lfence)
    262 	lfence
    263 	ret
    264 END(sse2_lfence)
    265 ENDLABEL(sse2_lfence_end)
    266 
    267 ENTRY(sse2_mfence)
    268 	mfence
    269 	ret
    270 END(sse2_mfence)
    271 ENDLABEL(sse2_mfence_end)
    272 
    273 atomic_lockpatch:
    274 	.globl	atomic_lockpatch
    275 	.long	.Lpatch1, .Lpatch2, .Lpatch3, .Lpatch4, .Lpatch5
    276 	.long	.Lpatch6, .Lpatch7, .Lpatch8, .Lpatch9, .Lpatch10
    277 	.long	.Lpatch12, .Lpatch13, .Lpatch14, .Lpatch15, 0
    278 #endif	/* _HARDKERNEL */
    279 
    280 ALIAS(atomic_add_32,_atomic_add_32)
    281 ALIAS(atomic_add_int,_atomic_add_32)
    282 ALIAS(atomic_add_long,_atomic_add_32)
    283 ALIAS(atomic_add_ptr,_atomic_add_32)
    284 
    285 ALIAS(atomic_add_32_nv,_atomic_add_32_nv)
    286 ALIAS(atomic_add_int_nv,_atomic_add_32_nv)
    287 ALIAS(atomic_add_long_nv,_atomic_add_32_nv)
    288 ALIAS(atomic_add_ptr_nv,_atomic_add_32_nv)
    289 
    290 ALIAS(atomic_and_32,_atomic_and_32)
    291 ALIAS(atomic_and_uint,_atomic_and_32)
    292 ALIAS(atomic_and_ulong,_atomic_and_32)
    293 ALIAS(atomic_and_ptr,_atomic_and_32)
    294 
    295 ALIAS(atomic_and_32_nv,_atomic_and_32_nv)
    296 ALIAS(atomic_and_uint_nv,_atomic_and_32_nv)
    297 ALIAS(atomic_and_ulong_nv,_atomic_and_32_nv)
    298 ALIAS(atomic_and_ptr_nv,_atomic_and_32_nv)
    299 
    300 ALIAS(atomic_dec_32,_atomic_dec_32)
    301 ALIAS(atomic_dec_uint,_atomic_dec_32)
    302 ALIAS(atomic_dec_ulong,_atomic_dec_32)
    303 ALIAS(atomic_dec_ptr,_atomic_dec_32)
    304 
    305 ALIAS(atomic_dec_32_nv,_atomic_dec_32_nv)
    306 ALIAS(atomic_dec_uint_nv,_atomic_dec_32_nv)
    307 ALIAS(atomic_dec_ulong_nv,_atomic_dec_32_nv)
    308 ALIAS(atomic_dec_ptr_nv,_atomic_dec_32_nv)
    309 
    310 ALIAS(atomic_inc_32,_atomic_inc_32)
    311 ALIAS(atomic_inc_uint,_atomic_inc_32)
    312 ALIAS(atomic_inc_ulong,_atomic_inc_32)
    313 ALIAS(atomic_inc_ptr,_atomic_inc_32)
    314 
    315 ALIAS(atomic_inc_32_nv,_atomic_inc_32_nv)
    316 ALIAS(atomic_inc_uint_nv,_atomic_inc_32_nv)
    317 ALIAS(atomic_inc_ulong_nv,_atomic_inc_32_nv)
    318 ALIAS(atomic_inc_ptr_nv,_atomic_inc_32_nv)
    319 
    320 ALIAS(atomic_or_32,_atomic_or_32)
    321 ALIAS(atomic_or_uint,_atomic_or_32)
    322 ALIAS(atomic_or_ulong,_atomic_or_32)
    323 ALIAS(atomic_or_ptr,_atomic_or_32)
    324 
    325 ALIAS(atomic_or_32_nv,_atomic_or_32_nv)
    326 ALIAS(atomic_or_uint_nv,_atomic_or_32_nv)
    327 ALIAS(atomic_or_ulong_nv,_atomic_or_32_nv)
    328 ALIAS(atomic_or_ptr_nv,_atomic_or_32_nv)
    329 
    330 ALIAS(atomic_swap_32,_atomic_swap_32)
    331 ALIAS(atomic_swap_uint,_atomic_swap_32)
    332 ALIAS(atomic_swap_ulong,_atomic_swap_32)
    333 ALIAS(atomic_swap_ptr,_atomic_swap_32)
    334 
    335 ALIAS(atomic_cas_32,_atomic_cas_32)
    336 ALIAS(atomic_cas_uint,_atomic_cas_32)
    337 ALIAS(atomic_cas_ulong,_atomic_cas_32)
    338 ALIAS(atomic_cas_ptr,_atomic_cas_32)
    339 
    340 ALIAS(atomic_cas_32_ni,_atomic_cas_32_ni)
    341 ALIAS(atomic_cas_uint_ni,_atomic_cas_32_ni)
    342 ALIAS(atomic_cas_ulong_ni,_atomic_cas_32_ni)
    343 ALIAS(atomic_cas_ptr_ni,_atomic_cas_32_ni)
    344 
    345 #if defined(__HAVE_ATOMIC64_OPS) || defined(_KERNEL)
    346 ALIAS(atomic_cas_64,_atomic_cas_64)
    347 ALIAS(atomic_cas_64_ni,_atomic_cas_64)
    348 ALIAS(__sync_val_compare_and_swap_8,_atomic_cas_64)
    349 #endif /* __HAVE_ATOMIC64_OPS || _KERNEL */
    350 
    351 ALIAS(membar_consumer,_membar_consumer)
    352 ALIAS(membar_producer,_membar_producer)
    353 ALIAS(membar_enter,_membar_consumer)
    354 ALIAS(membar_exit,_membar_producer)
    355 ALIAS(membar_sync,_membar_sync)
    356 
    357 STRONG_ALIAS(_atomic_add_int,_atomic_add_32)
    358 STRONG_ALIAS(_atomic_add_long,_atomic_add_32)
    359 STRONG_ALIAS(_atomic_add_ptr,_atomic_add_32)
    360 
    361 STRONG_ALIAS(_atomic_add_int_nv,_atomic_add_32_nv)
    362 STRONG_ALIAS(_atomic_add_long_nv,_atomic_add_32_nv)
    363 STRONG_ALIAS(_atomic_add_ptr_nv,_atomic_add_32_nv)
    364 
    365 STRONG_ALIAS(_atomic_and_uint,_atomic_and_32)
    366 STRONG_ALIAS(_atomic_and_ulong,_atomic_and_32)
    367 STRONG_ALIAS(_atomic_and_ptr,_atomic_and_32)
    368 
    369 STRONG_ALIAS(_atomic_and_uint_nv,_atomic_and_32_nv)
    370 STRONG_ALIAS(_atomic_and_ulong_nv,_atomic_and_32_nv)
    371 STRONG_ALIAS(_atomic_and_ptr_nv,_atomic_and_32_nv)
    372 
    373 STRONG_ALIAS(_atomic_dec_uint,_atomic_dec_32)
    374 STRONG_ALIAS(_atomic_dec_ulong,_atomic_dec_32)
    375 STRONG_ALIAS(_atomic_dec_ptr,_atomic_dec_32)
    376 
    377 STRONG_ALIAS(_atomic_dec_uint_nv,_atomic_dec_32_nv)
    378 STRONG_ALIAS(_atomic_dec_ulong_nv,_atomic_dec_32_nv)
    379 STRONG_ALIAS(_atomic_dec_ptr_nv,_atomic_dec_32_nv)
    380 
    381 STRONG_ALIAS(_atomic_inc_uint,_atomic_inc_32)
    382 STRONG_ALIAS(_atomic_inc_ulong,_atomic_inc_32)
    383 STRONG_ALIAS(_atomic_inc_ptr,_atomic_inc_32)
    384 
    385 STRONG_ALIAS(_atomic_inc_uint_nv,_atomic_inc_32_nv)
    386 STRONG_ALIAS(_atomic_inc_ulong_nv,_atomic_inc_32_nv)
    387 STRONG_ALIAS(_atomic_inc_ptr_nv,_atomic_inc_32_nv)
    388 
    389 STRONG_ALIAS(_atomic_or_uint,_atomic_or_32)
    390 STRONG_ALIAS(_atomic_or_ulong,_atomic_or_32)
    391 STRONG_ALIAS(_atomic_or_ptr,_atomic_or_32)
    392 
    393 STRONG_ALIAS(_atomic_or_uint_nv,_atomic_or_32_nv)
    394 STRONG_ALIAS(_atomic_or_ulong_nv,_atomic_or_32_nv)
    395 STRONG_ALIAS(_atomic_or_ptr_nv,_atomic_or_32_nv)
    396 
    397 STRONG_ALIAS(_atomic_swap_uint,_atomic_swap_32)
    398 STRONG_ALIAS(_atomic_swap_ulong,_atomic_swap_32)
    399 STRONG_ALIAS(_atomic_swap_ptr,_atomic_swap_32)
    400 
    401 STRONG_ALIAS(_atomic_cas_uint,_atomic_cas_32)
    402 STRONG_ALIAS(_atomic_cas_ulong,_atomic_cas_32)
    403 STRONG_ALIAS(_atomic_cas_ptr,_atomic_cas_32)
    404 
    405 STRONG_ALIAS(_atomic_cas_uint_ni,_atomic_cas_32_ni)
    406 STRONG_ALIAS(_atomic_cas_ulong_ni,_atomic_cas_32_ni)
    407 STRONG_ALIAS(_atomic_cas_ptr_ni,_atomic_cas_32_ni)
    408 
    409 STRONG_ALIAS(_membar_enter,_membar_consumer)
    410 STRONG_ALIAS(_membar_exit,_membar_producer)
    411