11.13Sskrll/* $NetBSD: atomic_and_64.S,v 1.13 2021/07/28 07:32:20 skrll Exp $ */ 21.1Smatt/*- 31.2Smatt * Copyright (c) 2012 The NetBSD Foundation, Inc. 41.1Smatt * All rights reserved. 51.1Smatt * 61.1Smatt * This code is derived from software contributed to The NetBSD Foundation 71.1Smatt * by Matt Thomas <matt@3am-software.com> 81.1Smatt * 91.1Smatt * Redistribution and use in source and binary forms, with or without 101.1Smatt * modification, are permitted provided that the following conditions 111.1Smatt * are met: 121.1Smatt * 1. Redistributions of source code must retain the above copyright 131.1Smatt * notice, this list of conditions and the following disclaimer. 141.1Smatt * 2. Redistributions in binary form must reproduce the above copyright 151.1Smatt * notice, this list of conditions and the following disclaimer in the 161.1Smatt * documentation and/or other materials provided with the distribution. 171.1Smatt * 181.1Smatt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 191.1Smatt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 201.1Smatt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 211.1Smatt * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 221.1Smatt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 231.1Smatt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 241.1Smatt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 251.1Smatt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 261.1Smatt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 271.1Smatt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 281.1Smatt * POSSIBILITY OF SUCH DAMAGE. 291.1Smatt */ 301.1Smatt 311.1Smatt#include "atomic_op_asm.h" 321.1Smatt 331.12Sskrll#if defined(_ARM_ARCH_6) 341.1Smatt 351.1SmattENTRY_NP(_atomic_and_64_nv) 361.11Sskrll push {r3, r4} /* save temporary */ 371.1Smatt#ifndef __ARM_EABI__ 381.1Smatt mov r3, r2 391.1Smatt mov r2, r1 401.1Smatt#endif 411.5Smatt mov ip, r0 /* need r0 for return value */ 421.9Sjoerg1: ldrexd r0, r1, [ip] /* load old value */ 431.6Smatt ands r0, r0, r2 /* calculate new value */ 441.6Smatt ands r1, r1, r3 /* calculate new value */ 451.9Sjoerg strexd r4, r0, r1, [ip] /* try to store */ 461.5Smatt cmp r4, #0 /* succeed? */ 471.1Smatt bne 1b /* no, try again? */ 481.11Sskrll pop {r3, r4} /* restore temporary */ 491.1Smatt RET /* return new value */ 501.5SmattEND(_atomic_and_64_nv) 511.5Smatt 521.5SmattSTRONG_ALIAS(_atomic_and_64,_atomic_and_64_nv) 531.1SmattATOMIC_OP_ALIAS(atomic_and_64_nv,_atomic_and_64_nv) 541.5SmattATOMIC_OP_ALIAS(atomic_and_64,_atomic_and_64_nv) 551.13Sskrll 561.13SskrllENTRY_NP(__sync_and_and_fetch_8) 571.13Sskrll push {r4, lr} 581.13Sskrll DMB 591.13Sskrll bl _atomic_and_64_nv 601.13Sskrll DMB 611.13Sskrll pop {r4, pc} 621.13SskrllEND(__sync_and_and_fetch_8) 631.1Smatt 641.1Smatt#endif /* _ARM_ARCH_6 */ 65