1 1.32 riastrad /* $NetBSD: atomic.S,v 1.32 2025/09/06 02:53:21 riastradh Exp $ */ 2 1.1 ad 3 1.1 ad /*- 4 1.1 ad * Copyright (c) 2007 The NetBSD Foundation, Inc. 5 1.1 ad * All rights reserved. 6 1.1 ad * 7 1.1 ad * This code is derived from software contributed to The NetBSD Foundation 8 1.1 ad * by Jason R. Thorpe, and by Andrew Doran. 9 1.1 ad * 10 1.1 ad * Redistribution and use in source and binary forms, with or without 11 1.1 ad * modification, are permitted provided that the following conditions 12 1.1 ad * are met: 13 1.1 ad * 1. Redistributions of source code must retain the above copyright 14 1.1 ad * notice, this list of conditions and the following disclaimer. 15 1.1 ad * 2. Redistributions in binary form must reproduce the above copyright 16 1.1 ad * notice, this list of conditions and the following disclaimer in the 17 1.1 ad * documentation and/or other materials provided with the distribution. 18 1.23 riastrad * 19 1.1 ad * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 20 1.1 ad * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 21 1.1 ad * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 22 1.1 ad * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 23 1.1 ad * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 24 1.1 ad * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 25 1.1 ad * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 26 1.1 ad * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 27 1.1 ad * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 28 1.1 ad * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 29 1.1 ad * POSSIBILITY OF SUCH DAMAGE. 30 1.1 ad */ 31 1.1 ad 32 1.14 pooka #include <sys/param.h> 33 1.1 ad #include <machine/asm.h> 34 1.1 ad 35 1.15 pooka #ifdef _KERNEL 36 1.15 pooka #define ALIAS(f, t) STRONG_ALIAS(f,t) 37 1.15 pooka #else 38 1.15 pooka #define ALIAS(f, t) WEAK_ALIAS(f,t) 39 1.15 pooka #endif 40 1.15 pooka 41 1.31 riastrad #ifdef _HARDKERNEL 42 1.31 riastrad #include <machine/frameasm.h> 43 1.31 riastrad #define LOCK HOTPATCH(HP_NAME_NOLOCK, 1); lock 44 1.31 riastrad #else 45 1.31 riastrad #define LOCK lock 46 1.31 riastrad #endif 47 1.31 riastrad 48 1.1 ad .text 49 1.1 ad 50 1.1 ad /* 32-bit */ 51 1.1 ad 52 1.12 chs ENTRY(_atomic_add_32) 53 1.21 maxv LOCK 54 1.1 ad addl %esi, (%rdi) 55 1.1 ad ret 56 1.17 uebayasi END(_atomic_add_32) 57 1.1 ad 58 1.12 chs ENTRY(_atomic_add_32_nv) 59 1.1 ad movl %esi, %eax 60 1.21 maxv LOCK 61 1.1 ad xaddl %eax, (%rdi) 62 1.1 ad addl %esi, %eax 63 1.1 ad ret 64 1.17 uebayasi END(_atomic_add_32_nv) 65 1.1 ad 66 1.12 chs ENTRY(_atomic_and_32) 67 1.21 maxv LOCK 68 1.1 ad andl %esi, (%rdi) 69 1.1 ad ret 70 1.17 uebayasi END(_atomic_and_32) 71 1.1 ad 72 1.12 chs ENTRY(_atomic_and_32_nv) 73 1.1 ad movl (%rdi), %eax 74 1.1 ad 1: 75 1.1 ad movl %eax, %ecx 76 1.1 ad andl %esi, %ecx 77 1.21 maxv LOCK 78 1.1 ad cmpxchgl %ecx, (%rdi) 79 1.1 ad jnz 1b 80 1.1 ad movl %ecx, %eax 81 1.1 ad ret 82 1.17 uebayasi END(_atomic_and_32_nv) 83 1.1 ad 84 1.12 chs ENTRY(_atomic_dec_32) 85 1.21 maxv LOCK 86 1.1 ad decl (%rdi) 87 1.1 ad ret 88 1.17 uebayasi END(_atomic_dec_32) 89 1.1 ad 90 1.12 chs ENTRY(_atomic_dec_32_nv) 91 1.1 ad movl $-1, %eax 92 1.21 maxv LOCK 93 1.1 ad xaddl %eax, (%rdi) 94 1.1 ad decl %eax 95 1.1 ad ret 96 1.17 uebayasi END(_atomic_dec_32_nv) 97 1.1 ad 98 1.12 chs ENTRY(_atomic_inc_32) 99 1.21 maxv LOCK 100 1.1 ad incl (%rdi) 101 1.1 ad ret 102 1.17 uebayasi END(_atomic_inc_32) 103 1.1 ad 104 1.12 chs ENTRY(_atomic_inc_32_nv) 105 1.1 ad movl $1, %eax 106 1.21 maxv LOCK 107 1.1 ad xaddl %eax, (%rdi) 108 1.1 ad incl %eax 109 1.1 ad ret 110 1.17 uebayasi END(_atomic_inc_32_nv) 111 1.1 ad 112 1.12 chs ENTRY(_atomic_or_32) 113 1.21 maxv LOCK 114 1.1 ad orl %esi, (%rdi) 115 1.1 ad ret 116 1.17 uebayasi END(_atomic_or_32) 117 1.1 ad 118 1.12 chs ENTRY(_atomic_or_32_nv) 119 1.1 ad movl (%rdi), %eax 120 1.1 ad 1: 121 1.1 ad movl %eax, %ecx 122 1.1 ad orl %esi, %ecx 123 1.21 maxv LOCK 124 1.1 ad cmpxchgl %ecx, (%rdi) 125 1.1 ad jnz 1b 126 1.1 ad movl %ecx, %eax 127 1.1 ad ret 128 1.17 uebayasi END(_atomic_or_32_nv) 129 1.1 ad 130 1.12 chs ENTRY(_atomic_swap_32) 131 1.5 ad movl %esi, %eax 132 1.5 ad xchgl %eax, (%rdi) 133 1.1 ad ret 134 1.17 uebayasi END(_atomic_swap_32) 135 1.1 ad 136 1.12 chs ENTRY(_atomic_cas_32) 137 1.1 ad movl %esi, %eax 138 1.21 maxv LOCK 139 1.1 ad cmpxchgl %edx, (%rdi) 140 1.1 ad /* %eax now contains the old value */ 141 1.1 ad ret 142 1.17 uebayasi END(_atomic_cas_32) 143 1.1 ad 144 1.12 chs ENTRY(_atomic_cas_32_ni) 145 1.9 ad movl %esi, %eax 146 1.9 ad cmpxchgl %edx, (%rdi) 147 1.9 ad /* %eax now contains the old value */ 148 1.9 ad ret 149 1.17 uebayasi END(_atomic_cas_32_ni) 150 1.9 ad 151 1.1 ad /* 64-bit */ 152 1.1 ad 153 1.12 chs ENTRY(_atomic_add_64) 154 1.21 maxv LOCK 155 1.1 ad addq %rsi, (%rdi) 156 1.1 ad ret 157 1.17 uebayasi END(_atomic_add_64) 158 1.1 ad 159 1.12 chs ENTRY(_atomic_add_64_nv) 160 1.1 ad movq %rsi, %rax 161 1.21 maxv LOCK 162 1.1 ad xaddq %rax, (%rdi) 163 1.1 ad addq %rsi, %rax 164 1.1 ad ret 165 1.17 uebayasi END(_atomic_add_64_nv) 166 1.1 ad 167 1.12 chs ENTRY(_atomic_and_64) 168 1.21 maxv LOCK 169 1.1 ad andq %rsi, (%rdi) 170 1.1 ad ret 171 1.17 uebayasi END(_atomic_and_64) 172 1.1 ad 173 1.12 chs ENTRY(_atomic_and_64_nv) 174 1.1 ad movq (%rdi), %rax 175 1.1 ad 1: 176 1.1 ad movq %rax, %rcx 177 1.1 ad andq %rsi, %rcx 178 1.21 maxv LOCK 179 1.1 ad cmpxchgq %rcx, (%rdi) 180 1.1 ad jnz 1b 181 1.1 ad movq %rcx, %rax 182 1.1 ad ret 183 1.17 uebayasi END(_atomic_and_64_nv) 184 1.1 ad 185 1.12 chs ENTRY(_atomic_dec_64) 186 1.21 maxv LOCK 187 1.1 ad decq (%rdi) 188 1.1 ad ret 189 1.17 uebayasi END(_atomic_dec_64) 190 1.1 ad 191 1.12 chs ENTRY(_atomic_dec_64_nv) 192 1.1 ad movq $-1, %rax 193 1.21 maxv LOCK 194 1.1 ad xaddq %rax, (%rdi) 195 1.1 ad decq %rax 196 1.1 ad ret 197 1.17 uebayasi END(_atomic_dec_64_nv) 198 1.1 ad 199 1.12 chs ENTRY(_atomic_inc_64) 200 1.21 maxv LOCK 201 1.1 ad incq (%rdi) 202 1.1 ad ret 203 1.17 uebayasi END(_atomic_inc_64) 204 1.1 ad 205 1.12 chs ENTRY(_atomic_inc_64_nv) 206 1.1 ad movq $1, %rax 207 1.21 maxv LOCK 208 1.1 ad xaddq %rax, (%rdi) 209 1.1 ad incq %rax 210 1.1 ad ret 211 1.17 uebayasi END(_atomic_inc_64_nv) 212 1.1 ad 213 1.12 chs ENTRY(_atomic_or_64) 214 1.21 maxv LOCK 215 1.1 ad orq %rsi, (%rdi) 216 1.1 ad ret 217 1.17 uebayasi END(_atomic_or_64) 218 1.1 ad 219 1.12 chs ENTRY(_atomic_or_64_nv) 220 1.1 ad movq (%rdi), %rax 221 1.1 ad 1: 222 1.1 ad movq %rax, %rcx 223 1.1 ad orq %rsi, %rcx 224 1.21 maxv LOCK 225 1.1 ad cmpxchgq %rcx, (%rdi) 226 1.1 ad jnz 1b 227 1.1 ad movq %rcx, %rax 228 1.1 ad ret 229 1.17 uebayasi END(_atomic_or_64_nv) 230 1.1 ad 231 1.12 chs ENTRY(_atomic_swap_64) 232 1.5 ad movq %rsi, %rax 233 1.5 ad xchgq %rax, (%rdi) 234 1.1 ad ret 235 1.17 uebayasi END(_atomic_swap_64) 236 1.1 ad 237 1.12 chs ENTRY(_atomic_cas_64) 238 1.1 ad movq %rsi, %rax 239 1.21 maxv LOCK 240 1.7 ad cmpxchgq %rdx, (%rdi) 241 1.1 ad /* %eax now contains the old value */ 242 1.1 ad ret 243 1.17 uebayasi END(_atomic_cas_64) 244 1.1 ad 245 1.12 chs ENTRY(_atomic_cas_64_ni) 246 1.9 ad movq %rsi, %rax 247 1.9 ad cmpxchgq %rdx, (%rdi) 248 1.9 ad /* %eax now contains the old value */ 249 1.9 ad ret 250 1.17 uebayasi END(_atomic_cas_64_ni) 251 1.9 ad 252 1.1 ad /* memory barriers */ 253 1.1 ad 254 1.28 riastrad ENTRY(_membar_acquire) 255 1.24 riastrad /* 256 1.24 riastrad * Every load from normal memory is a load-acquire on x86, so 257 1.24 riastrad * there is never any need for explicit barriers to order 258 1.24 riastrad * load-before-anything. 259 1.24 riastrad */ 260 1.1 ad ret 261 1.28 riastrad END(_membar_acquire) 262 1.1 ad 263 1.28 riastrad ENTRY(_membar_release) 264 1.25 riastrad /* 265 1.25 riastrad * Every store to normal memory is a store-release on x86, so 266 1.25 riastrad * there is never any need for explicit barriers to order 267 1.25 riastrad * anything-before-store. 268 1.25 riastrad */ 269 1.1 ad ret 270 1.28 riastrad END(_membar_release) 271 1.1 ad 272 1.12 chs ENTRY(_membar_sync) 273 1.26 riastrad /* 274 1.29 riastrad * MFENCE, or a serializing instruction like a locked ADDQ, 275 1.26 riastrad * is necessary to order store-before-load. Every other 276 1.26 riastrad * ordering -- load-before-anything, anything-before-store -- 277 1.26 riastrad * is already guaranteed without explicit barriers. 278 1.29 riastrad * 279 1.29 riastrad * Empirically it turns out locked ADDQ is cheaper than MFENCE, 280 1.29 riastrad * so we use that, with an offset below the return address on 281 1.29 riastrad * the stack to avoid a false dependency with RET. (It might 282 1.29 riastrad * even be better to use a much lower offset, say -128, to 283 1.29 riastrad * avoid false dependencies for subsequent callees of the 284 1.29 riastrad * caller.) 285 1.29 riastrad * 286 1.29 riastrad * https://pvk.ca/Blog/2014/10/19/performance-optimisation-~-writing-an-essay/ 287 1.29 riastrad * https://shipilev.net/blog/2014/on-the-fence-with-dependencies/ 288 1.29 riastrad * https://www.agner.org/optimize/instruction_tables.pdf 289 1.30 riastrad * 290 1.32 riastrad * Sync with paravirt_membar_sync in 291 1.32 riastrad * sys/arch/amd64/amd64/cpufunc.S. 292 1.26 riastrad */ 293 1.21 maxv LOCK 294 1.1 ad addq $0, -8(%rsp) 295 1.1 ad ret 296 1.17 uebayasi END(_membar_sync) 297 1.1 ad 298 1.1 ad ALIAS(atomic_add_32,_atomic_add_32) 299 1.1 ad ALIAS(atomic_add_64,_atomic_add_64) 300 1.4 ad ALIAS(atomic_add_int,_atomic_add_32) 301 1.4 ad ALIAS(atomic_add_long,_atomic_add_64) 302 1.1 ad ALIAS(atomic_add_ptr,_atomic_add_64) 303 1.1 ad 304 1.1 ad ALIAS(atomic_add_32_nv,_atomic_add_32_nv) 305 1.1 ad ALIAS(atomic_add_64_nv,_atomic_add_64_nv) 306 1.4 ad ALIAS(atomic_add_int_nv,_atomic_add_32_nv) 307 1.4 ad ALIAS(atomic_add_long_nv,_atomic_add_64_nv) 308 1.1 ad ALIAS(atomic_add_ptr_nv,_atomic_add_64_nv) 309 1.1 ad 310 1.1 ad ALIAS(atomic_and_32,_atomic_and_32) 311 1.1 ad ALIAS(atomic_and_64,_atomic_and_64) 312 1.1 ad ALIAS(atomic_and_uint,_atomic_and_32) 313 1.1 ad ALIAS(atomic_and_ulong,_atomic_and_64) 314 1.1 ad ALIAS(atomic_and_ptr,_atomic_and_64) 315 1.1 ad 316 1.1 ad ALIAS(atomic_and_32_nv,_atomic_and_32_nv) 317 1.1 ad ALIAS(atomic_and_64_nv,_atomic_and_64_nv) 318 1.1 ad ALIAS(atomic_and_uint_nv,_atomic_and_32_nv) 319 1.1 ad ALIAS(atomic_and_ulong_nv,_atomic_and_64_nv) 320 1.1 ad ALIAS(atomic_and_ptr_nv,_atomic_and_64_nv) 321 1.1 ad 322 1.1 ad ALIAS(atomic_dec_32,_atomic_dec_32) 323 1.1 ad ALIAS(atomic_dec_64,_atomic_dec_64) 324 1.1 ad ALIAS(atomic_dec_uint,_atomic_dec_32) 325 1.1 ad ALIAS(atomic_dec_ulong,_atomic_dec_64) 326 1.1 ad ALIAS(atomic_dec_ptr,_atomic_dec_64) 327 1.1 ad 328 1.1 ad ALIAS(atomic_dec_32_nv,_atomic_dec_32_nv) 329 1.1 ad ALIAS(atomic_dec_64_nv,_atomic_dec_64_nv) 330 1.1 ad ALIAS(atomic_dec_uint_nv,_atomic_dec_32_nv) 331 1.1 ad ALIAS(atomic_dec_ulong_nv,_atomic_dec_64_nv) 332 1.1 ad ALIAS(atomic_dec_ptr_nv,_atomic_dec_64_nv) 333 1.1 ad 334 1.1 ad ALIAS(atomic_inc_32,_atomic_inc_32) 335 1.1 ad ALIAS(atomic_inc_64,_atomic_inc_64) 336 1.1 ad ALIAS(atomic_inc_uint,_atomic_inc_32) 337 1.1 ad ALIAS(atomic_inc_ulong,_atomic_inc_64) 338 1.1 ad ALIAS(atomic_inc_ptr,_atomic_inc_64) 339 1.1 ad 340 1.1 ad ALIAS(atomic_inc_32_nv,_atomic_inc_32_nv) 341 1.1 ad ALIAS(atomic_inc_64_nv,_atomic_inc_64_nv) 342 1.1 ad ALIAS(atomic_inc_uint_nv,_atomic_inc_32_nv) 343 1.1 ad ALIAS(atomic_inc_ulong_nv,_atomic_inc_64_nv) 344 1.1 ad ALIAS(atomic_inc_ptr_nv,_atomic_inc_64_nv) 345 1.1 ad 346 1.1 ad ALIAS(atomic_or_32,_atomic_or_32) 347 1.18 isaki ALIAS(atomic_or_64,_atomic_or_64) 348 1.1 ad ALIAS(atomic_or_uint,_atomic_or_32) 349 1.1 ad ALIAS(atomic_or_ulong,_atomic_or_64) 350 1.1 ad ALIAS(atomic_or_ptr,_atomic_or_64) 351 1.1 ad 352 1.1 ad ALIAS(atomic_or_32_nv,_atomic_or_32_nv) 353 1.1 ad ALIAS(atomic_or_64_nv,_atomic_or_64_nv) 354 1.1 ad ALIAS(atomic_or_uint_nv,_atomic_or_32_nv) 355 1.1 ad ALIAS(atomic_or_ulong_nv,_atomic_or_64_nv) 356 1.1 ad ALIAS(atomic_or_ptr_nv,_atomic_or_64_nv) 357 1.1 ad 358 1.1 ad ALIAS(atomic_swap_32,_atomic_swap_32) 359 1.1 ad ALIAS(atomic_swap_64,_atomic_swap_64) 360 1.1 ad ALIAS(atomic_swap_uint,_atomic_swap_32) 361 1.1 ad ALIAS(atomic_swap_ulong,_atomic_swap_64) 362 1.1 ad ALIAS(atomic_swap_ptr,_atomic_swap_64) 363 1.1 ad 364 1.1 ad ALIAS(atomic_cas_32,_atomic_cas_32) 365 1.1 ad ALIAS(atomic_cas_64,_atomic_cas_64) 366 1.1 ad ALIAS(atomic_cas_uint,_atomic_cas_32) 367 1.1 ad ALIAS(atomic_cas_ulong,_atomic_cas_64) 368 1.1 ad ALIAS(atomic_cas_ptr,_atomic_cas_64) 369 1.1 ad 370 1.9 ad ALIAS(atomic_cas_32_ni,_atomic_cas_32_ni) 371 1.9 ad ALIAS(atomic_cas_64_ni,_atomic_cas_64_ni) 372 1.9 ad ALIAS(atomic_cas_uint_ni,_atomic_cas_32_ni) 373 1.9 ad ALIAS(atomic_cas_ulong_ni,_atomic_cas_64_ni) 374 1.9 ad ALIAS(atomic_cas_ptr_ni,_atomic_cas_64_ni) 375 1.9 ad 376 1.28 riastrad ALIAS(membar_acquire,_membar_acquire) 377 1.28 riastrad ALIAS(membar_release,_membar_release) 378 1.28 riastrad ALIAS(membar_sync,_membar_sync) 379 1.28 riastrad 380 1.28 riastrad ALIAS(membar_consumer,_membar_acquire) 381 1.28 riastrad ALIAS(membar_producer,_membar_release) 382 1.27 riastrad ALIAS(membar_enter,_membar_sync) 383 1.28 riastrad ALIAS(membar_exit,_membar_release) 384 1.1 ad ALIAS(membar_sync,_membar_sync) 385 1.6 ad 386 1.6 ad STRONG_ALIAS(_atomic_add_int,_atomic_add_32) 387 1.6 ad STRONG_ALIAS(_atomic_add_long,_atomic_add_64) 388 1.6 ad STRONG_ALIAS(_atomic_add_ptr,_atomic_add_64) 389 1.6 ad 390 1.6 ad STRONG_ALIAS(_atomic_add_int_nv,_atomic_add_32_nv) 391 1.6 ad STRONG_ALIAS(_atomic_add_long_nv,_atomic_add_64_nv) 392 1.6 ad STRONG_ALIAS(_atomic_add_ptr_nv,_atomic_add_64_nv) 393 1.6 ad 394 1.6 ad STRONG_ALIAS(_atomic_and_uint,_atomic_and_32) 395 1.6 ad STRONG_ALIAS(_atomic_and_ulong,_atomic_and_64) 396 1.6 ad STRONG_ALIAS(_atomic_and_ptr,_atomic_and_64) 397 1.6 ad 398 1.6 ad STRONG_ALIAS(_atomic_and_uint_nv,_atomic_and_32_nv) 399 1.6 ad STRONG_ALIAS(_atomic_and_ulong_nv,_atomic_and_64_nv) 400 1.6 ad STRONG_ALIAS(_atomic_and_ptr_nv,_atomic_and_64_nv) 401 1.6 ad 402 1.6 ad STRONG_ALIAS(_atomic_dec_uint,_atomic_dec_32) 403 1.6 ad STRONG_ALIAS(_atomic_dec_ulong,_atomic_dec_64) 404 1.6 ad STRONG_ALIAS(_atomic_dec_ptr,_atomic_dec_64) 405 1.6 ad 406 1.6 ad STRONG_ALIAS(_atomic_dec_uint_nv,_atomic_dec_32_nv) 407 1.6 ad STRONG_ALIAS(_atomic_dec_ulong_nv,_atomic_dec_64_nv) 408 1.6 ad STRONG_ALIAS(_atomic_dec_ptr_nv,_atomic_dec_64_nv) 409 1.6 ad 410 1.6 ad STRONG_ALIAS(_atomic_inc_uint,_atomic_inc_32) 411 1.6 ad STRONG_ALIAS(_atomic_inc_ulong,_atomic_inc_64) 412 1.6 ad STRONG_ALIAS(_atomic_inc_ptr,_atomic_inc_64) 413 1.6 ad 414 1.6 ad STRONG_ALIAS(_atomic_inc_uint_nv,_atomic_inc_32_nv) 415 1.6 ad STRONG_ALIAS(_atomic_inc_ulong_nv,_atomic_inc_64_nv) 416 1.6 ad STRONG_ALIAS(_atomic_inc_ptr_nv,_atomic_inc_64_nv) 417 1.6 ad 418 1.6 ad STRONG_ALIAS(_atomic_or_uint,_atomic_or_32) 419 1.6 ad STRONG_ALIAS(_atomic_or_ulong,_atomic_or_64) 420 1.6 ad STRONG_ALIAS(_atomic_or_ptr,_atomic_or_64) 421 1.6 ad 422 1.6 ad STRONG_ALIAS(_atomic_or_uint_nv,_atomic_or_32_nv) 423 1.6 ad STRONG_ALIAS(_atomic_or_ulong_nv,_atomic_or_64_nv) 424 1.6 ad STRONG_ALIAS(_atomic_or_ptr_nv,_atomic_or_64_nv) 425 1.6 ad 426 1.6 ad STRONG_ALIAS(_atomic_swap_uint,_atomic_swap_32) 427 1.6 ad STRONG_ALIAS(_atomic_swap_ulong,_atomic_swap_64) 428 1.6 ad STRONG_ALIAS(_atomic_swap_ptr,_atomic_swap_64) 429 1.6 ad 430 1.6 ad STRONG_ALIAS(_atomic_cas_uint,_atomic_cas_32) 431 1.6 ad STRONG_ALIAS(_atomic_cas_ulong,_atomic_cas_64) 432 1.6 ad STRONG_ALIAS(_atomic_cas_ptr,_atomic_cas_64) 433 1.8 ad 434 1.9 ad STRONG_ALIAS(_atomic_cas_uint_ni,_atomic_cas_32_ni) 435 1.9 ad STRONG_ALIAS(_atomic_cas_ulong_ni,_atomic_cas_64_ni) 436 1.9 ad STRONG_ALIAS(_atomic_cas_ptr_ni,_atomic_cas_64_ni) 437 1.9 ad 438 1.28 riastrad STRONG_ALIAS(_membar_consumer,_membar_acquire) 439 1.28 riastrad STRONG_ALIAS(_membar_producer,_membar_release) 440 1.27 riastrad STRONG_ALIAS(_membar_enter,_membar_sync) 441 1.28 riastrad STRONG_ALIAS(_membar_exit,_membar_release) 442