atomic_swap.S revision 1.7
11.7Smatt/* $NetBSD: atomic_swap.S,v 1.7 2013/08/11 04:41:17 matt Exp $ */ 21.2Smatt 31.2Smatt/*- 41.3Smatt * Copyright (c) 2007,2012 The NetBSD Foundation, Inc. 51.2Smatt * All rights reserved. 61.2Smatt * 71.2Smatt * This code is derived from software contributed to The NetBSD Foundation 81.2Smatt * by Jason R. Thorpe and Matt Thomas. 91.2Smatt * 101.2Smatt * Redistribution and use in source and binary forms, with or without 111.2Smatt * modification, are permitted provided that the following conditions 121.2Smatt * are met: 131.2Smatt * 1. Redistributions of source code must retain the above copyright 141.2Smatt * notice, this list of conditions and the following disclaimer. 151.2Smatt * 2. Redistributions in binary form must reproduce the above copyright 161.2Smatt * notice, this list of conditions and the following disclaimer in the 171.2Smatt * documentation and/or other materials provided with the distribution. 181.2Smatt * 191.2Smatt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 201.2Smatt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 211.2Smatt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 221.2Smatt * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 231.2Smatt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 241.2Smatt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 251.2Smatt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 261.2Smatt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 271.2Smatt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 281.2Smatt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 291.2Smatt * POSSIBILITY OF SUCH DAMAGE. 301.2Smatt */ 311.2Smatt 321.2Smatt#include "atomic_op_asm.h" 331.2Smatt 341.3Smatt/* 351.3Smatt * While SWP{B} is sufficient on its own for pre-ARMv7 CPUs, on MP ARMv7 cores 361.3Smatt * SWP{B} is disabled since it's no longer atomic among multiple CPUs. They 371.3Smatt * will actually raise an UNDEFINED exception. 381.3Smatt * 391.3Smatt * So if we use the LDREX/STREX template, but use a SWP instruction followed 401.3Smatt * by a MOV instruction (using a temporary register), that gives a handler 411.3Smatt * for the SWP UNDEFINED exception enough information to "patch" this instance 421.3Smatt * SWP with correct forms of LDREX/STREX. (note that this would happen even 431.3Smatt * "read-only" pages. If the page gets tossed, we will get another exception 441.3Smatt * and fix yet again). 451.3Smatt */ 461.3Smatt 471.2SmattENTRY_NP(_atomic_swap_32) 481.6Smatt mov ip, r0 491.3Smatt1: 501.3Smatt#ifdef _ARM_ARCH_6 511.6Smatt ldrex r0, [ip] 521.4Smatt cmp r0, r1 531.6Smatt#ifdef __thumb__ 541.6Smatt beq 99f 551.6Smatt strex r3, r1, [ip] 561.6Smatt cmp r3, #0 571.3Smatt#else 581.6Smatt strexne r3, r1, [ip] 591.6Smatt cmpne r3, #0 601.6Smatt#endif 611.6Smatt#else 621.6Smatt swp r0, r1, [ip] 631.4Smatt cmp r0, r1 641.6Smatt movnes r3, #0 651.6Smatt cmpne r3, #0 661.3Smatt#endif 671.3Smatt bne 1b 681.4Smatt#ifdef _ARM_ARCH_7 691.4Smatt dmb 701.4Smatt#else 711.6Smatt mcr p15, 0, r3, c7, c10, 5 /* data memory barrier */ 721.4Smatt#endif 731.6Smatt99: 741.2Smatt RET 751.7SmattEND(_atomic_swap_32) 761.7Smatt 771.2SmattATOMIC_OP_ALIAS(atomic_swap_32,_atomic_swap_32) 781.2SmattATOMIC_OP_ALIAS(atomic_swap_uint,_atomic_swap_32) 791.2SmattATOMIC_OP_ALIAS(atomic_swap_ulong,_atomic_swap_32) 801.2SmattATOMIC_OP_ALIAS(atomic_swap_ptr,_atomic_swap_32) 811.2SmattSTRONG_ALIAS(_atomic_swap_uint,_atomic_swap_32) 821.2SmattSTRONG_ALIAS(_atomic_swap_ulong,_atomic_swap_32) 831.2SmattSTRONG_ALIAS(_atomic_swap_ptr,_atomic_swap_32) 841.2Smatt 851.2SmattENTRY_NP(_atomic_swap_8) 861.7Smatt mov ip, r0 871.3Smatt1: 881.3Smatt#ifdef _ARM_ARCH_6 891.7Smatt ldrexb r0, [ip] 901.7Smatt strexb r3, r1, [ip] 911.3Smatt#else 921.7Smatt swpb r0, r1, [ip] 931.3Smatt mov r3, #0 941.3Smatt#endif 951.3Smatt cmp r3, #0 961.3Smatt bne 1b 971.4Smatt#ifdef _ARM_ARCH_7 981.4Smatt dmb 991.4Smatt#else 1001.4Smatt mcr p15, 0, ip, c7, c10, 5 /* data memory barrier */ 1011.4Smatt#endif 1021.2Smatt RET 1031.7SmattEND(_atomic_swap_8) 1041.7Smatt 1051.2SmattATOMIC_OP_ALIAS(atomic_swap_8,_atomic_swap_8) 1061.2SmattATOMIC_OP_ALIAS(atomic_swap_char,_atomic_swap_8) 1071.2SmattATOMIC_OP_ALIAS(atomic_swap_uchar,_atomic_swap_8) 1081.3SmattSTRONG_ALIAS(_atomic_swap_char,_atomic_swap_8) 1091.3SmattSTRONG_ALIAS(_atomic_swap_uchar,_atomic_swap_8) 110