1 1.2 matt /* $NetBSD: atomic_nand_32.S,v 1.2 2015/03/27 06:42:37 matt Exp $ */ 2 1.1 matt /*- 3 1.1 matt * Copyright (c) 2014 The NetBSD Foundation, Inc. 4 1.1 matt * All rights reserved. 5 1.1 matt * 6 1.1 matt * This code is derived from software contributed to The NetBSD Foundation 7 1.1 matt * by Matt Thomas <matt (at) 3am-software.com> 8 1.1 matt * 9 1.1 matt * Redistribution and use in source and binary forms, with or without 10 1.1 matt * modification, are permitted provided that the following conditions 11 1.1 matt * are met: 12 1.1 matt * 1. Redistributions of source code must retain the above copyright 13 1.1 matt * notice, this list of conditions and the following disclaimer. 14 1.1 matt * 2. Redistributions in binary form must reproduce the above copyright 15 1.1 matt * notice, this list of conditions and the following disclaimer in the 16 1.1 matt * documentation and/or other materials provided with the distribution. 17 1.1 matt * 18 1.1 matt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 19 1.1 matt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 20 1.1 matt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 21 1.1 matt * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 22 1.1 matt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 23 1.1 matt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 24 1.1 matt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 25 1.1 matt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 26 1.1 matt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 27 1.1 matt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 28 1.1 matt * POSSIBILITY OF SUCH DAMAGE. 29 1.1 matt */ 30 1.1 matt 31 1.1 matt #include "atomic_op_asm.h" 32 1.1 matt 33 1.1 matt ENTRY_NP(_atomic_nand_32) 34 1.2 matt mv t0, a0 35 1.2 matt 1: lr.w a0, 0(t0) 36 1.2 matt not t2, a0 37 1.2 matt and t2, t2, a1 38 1.2 matt sc.w t1, t2, 0(t0) 39 1.2 matt bnez t1, 1b 40 1.1 matt ret 41 1.1 matt END(_atomic_nand_32) 42 1.1 matt 43 1.1 matt ATOMIC_OP_ALIAS(atomic_nand_32,_atomic_nand_32) 44 1.1 matt ATOMIC_OP_ALIAS(atomic_nand_uint,_atomic_nand_32) 45 1.1 matt STRONG_ALIAS(_atomic_nand_uint,_atomic_nand_32) 46 1.1 matt #ifndef _LP64 47 1.1 matt ATOMIC_OP_ALIAS(atomic_nand_ulong,_atomic_nand_32) 48 1.1 matt STRONG_ALIAS(_atomic_nand_ulong,_atomic_nand_32) 49 1.1 matt #endif 50 1.1 matt CRT_ALIAS(__sync_fetch_and_nand_4,_atomic_nand_32) 51 1.1 matt CRT_ALIAS(__atomic_fetch_nand_4,_atomic_nand_32) 52 1.1 matt 53 1.1 matt ENTRY_NP(_atomic_nand_32_nv) 54 1.2 matt mv t0, a0 55 1.2 matt 1: lr.w a0, 0(t0) 56 1.2 matt not a0, a0 57 1.2 matt and a0, a0, a1 58 1.2 matt sc.w t1, a0, 0(t0) 59 1.2 matt bnez t1, 1b 60 1.1 matt ret 61 1.1 matt END(_atomic_nand_32_nv) 62 1.1 matt 63 1.1 matt ATOMIC_OP_ALIAS(atomic_nand_32_nv,_atomic_nand_32_nv) 64 1.1 matt ATOMIC_OP_ALIAS(atomic_nand_uint_nv,_atomic_nand_32_nv) 65 1.1 matt STRONG_ALIAS(_atomic_nand_uint_nv,_atomic_nand_32_nv) 66 1.1 matt #ifndef _LP64 67 1.1 matt ATOMIC_OP_ALIAS(atomic_nand_ulong_nv,_atomic_nand_32_nv) 68 1.1 matt STRONG_ALIAS(_atomic_nand_ulong_nv,_atomic_nand_32_nv) 69 1.1 matt #endif 70 1.1 matt CRT_ALIAS(__sync_nand_and_fetch_4,_atomic_nand_32_nv) 71 1.1 matt CRT_ALIAS(__atomic_nand_fetch_4,_atomic_nand_32_nv) 72