atomic_swap.S revision 1.17
11.17Sskrll/* $NetBSD: atomic_swap.S,v 1.17 2021/04/26 21:40:21 skrll Exp $ */ 21.2Smatt 31.2Smatt/*- 41.3Smatt * Copyright (c) 2007,2012 The NetBSD Foundation, Inc. 51.2Smatt * All rights reserved. 61.2Smatt * 71.2Smatt * This code is derived from software contributed to The NetBSD Foundation 81.2Smatt * by Jason R. Thorpe and Matt Thomas. 91.2Smatt * 101.2Smatt * Redistribution and use in source and binary forms, with or without 111.2Smatt * modification, are permitted provided that the following conditions 121.2Smatt * are met: 131.2Smatt * 1. Redistributions of source code must retain the above copyright 141.2Smatt * notice, this list of conditions and the following disclaimer. 151.2Smatt * 2. Redistributions in binary form must reproduce the above copyright 161.2Smatt * notice, this list of conditions and the following disclaimer in the 171.2Smatt * documentation and/or other materials provided with the distribution. 181.16Sskrll * 191.2Smatt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 201.2Smatt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 211.2Smatt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 221.2Smatt * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 231.2Smatt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 241.2Smatt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 251.2Smatt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 261.2Smatt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 271.2Smatt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 281.2Smatt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 291.2Smatt * POSSIBILITY OF SUCH DAMAGE. 301.2Smatt */ 311.2Smatt 321.2Smatt#include "atomic_op_asm.h" 331.2Smatt 341.3Smatt/* 351.3Smatt * While SWP{B} is sufficient on its own for pre-ARMv7 CPUs, on MP ARMv7 cores 361.3Smatt * SWP{B} is disabled since it's no longer atomic among multiple CPUs. They 371.3Smatt * will actually raise an UNDEFINED exception. 381.3Smatt * 391.3Smatt * So if we use the LDREX/STREX template, but use a SWP instruction followed 401.3Smatt * by a MOV instruction (using a temporary register), that gives a handler 411.3Smatt * for the SWP UNDEFINED exception enough information to "patch" this instance 421.3Smatt * SWP with correct forms of LDREX/STREX. (note that this would happen even 431.3Smatt * "read-only" pages. If the page gets tossed, we will get another exception 441.3Smatt * and fix yet again). 451.3Smatt */ 461.3Smatt 471.2SmattENTRY_NP(_atomic_swap_32) 481.6Smatt mov ip, r0 491.3Smatt1: 501.3Smatt#ifdef _ARM_ARCH_6 511.6Smatt ldrex r0, [ip] 521.4Smatt cmp r0, r1 531.6Smatt#ifdef __thumb__ 541.6Smatt beq 99f 551.6Smatt strex r3, r1, [ip] 561.6Smatt cmp r3, #0 571.3Smatt#else 581.6Smatt strexne r3, r1, [ip] 591.6Smatt cmpne r3, #0 601.6Smatt#endif 611.6Smatt#else 621.6Smatt swp r0, r1, [ip] 631.4Smatt cmp r0, r1 641.9Smatt movsne r3, #0 651.6Smatt cmpne r3, #0 661.3Smatt#endif 671.3Smatt bne 1b 681.4Smatt#ifdef _ARM_ARCH_7 691.4Smatt dmb 701.4Smatt#else 711.6Smatt mcr p15, 0, r3, c7, c10, 5 /* data memory barrier */ 721.4Smatt#endif 731.6Smatt99: 741.2Smatt RET 751.7SmattEND(_atomic_swap_32) 761.7Smatt 771.2SmattATOMIC_OP_ALIAS(atomic_swap_32,_atomic_swap_32) 781.2SmattATOMIC_OP_ALIAS(atomic_swap_uint,_atomic_swap_32) 791.2SmattATOMIC_OP_ALIAS(atomic_swap_ulong,_atomic_swap_32) 801.2SmattATOMIC_OP_ALIAS(atomic_swap_ptr,_atomic_swap_32) 811.10SmattCRT_ALIAS(__sync_lock_test_and_set_4,_atomic_swap_32) 821.11SjoergCRT_ALIAS(__atomic_exchange_4,_atomic_swap_32) 831.2SmattSTRONG_ALIAS(_atomic_swap_uint,_atomic_swap_32) 841.2SmattSTRONG_ALIAS(_atomic_swap_ulong,_atomic_swap_32) 851.2SmattSTRONG_ALIAS(_atomic_swap_ptr,_atomic_swap_32) 861.2Smatt 871.14Sjustin#if (!defined(_KERNEL) || !defined(_RUMPKERNEL)) && !defined(_STANDALONE) 881.8SmattENTRY_NP(__sync_lock_release_4) 891.8Smatt mov r1, #0 901.12Sskrll#ifdef _ARM_ARCH_7 911.17Sskrll dmb ishst 921.12Sskrll#else 931.13Sskrll mcr p15, 0, r1, c7, c10, 5 /* data memory barrier */ 941.12Sskrll#endif 951.15Sskrll str r1, [r0] 961.8Smatt RET 971.8SmattEND(__sync_lock_release_4) 981.10Smatt#endif 991.8Smatt 1001.2SmattENTRY_NP(_atomic_swap_8) 1011.7Smatt mov ip, r0 1021.3Smatt1: 1031.3Smatt#ifdef _ARM_ARCH_6 1041.7Smatt ldrexb r0, [ip] 1051.7Smatt strexb r3, r1, [ip] 1061.3Smatt#else 1071.7Smatt swpb r0, r1, [ip] 1081.3Smatt mov r3, #0 1091.3Smatt#endif 1101.3Smatt cmp r3, #0 1111.3Smatt bne 1b 1121.4Smatt#ifdef _ARM_ARCH_7 1131.4Smatt dmb 1141.4Smatt#else 1151.4Smatt mcr p15, 0, ip, c7, c10, 5 /* data memory barrier */ 1161.4Smatt#endif 1171.2Smatt RET 1181.7SmattEND(_atomic_swap_8) 1191.7Smatt 1201.2SmattATOMIC_OP_ALIAS(atomic_swap_8,_atomic_swap_8) 1211.2SmattATOMIC_OP_ALIAS(atomic_swap_char,_atomic_swap_8) 1221.2SmattATOMIC_OP_ALIAS(atomic_swap_uchar,_atomic_swap_8) 1231.10SmattCRT_ALIAS(__sync_lock_test_and_set_1,_atomic_swap_8) 1241.11SjoergCRT_ALIAS(__atomic_exchange_1,_atomic_swap_8) 1251.3SmattSTRONG_ALIAS(_atomic_swap_char,_atomic_swap_8) 1261.3SmattSTRONG_ALIAS(_atomic_swap_uchar,_atomic_swap_8) 1271.8Smatt 1281.14Sjustin#if (!defined(_KERNEL) || !defined(_RUMPKERNEL)) && !defined(_STANDALONE) 1291.8SmattENTRY_NP(__sync_lock_release_1) 1301.8Smatt mov r1, #0 1311.12Sskrll#ifdef _ARM_ARCH_7 1321.17Sskrll dmb ishst 1331.12Sskrll#else 1341.13Sskrll mcr p15, 0, r1, c7, c10, 5 /* data memory barrier */ 1351.12Sskrll#endif 1361.8Smatt strb r1, [r0] 1371.8Smatt RET 1381.8SmattEND(__sync_lock_release_1) 1391.10Smatt#endif 140