11.5Sskrll/*	$NetBSD: atomic_nand_32.S,v 1.5 2021/07/28 07:32:20 skrll Exp $	*/
21.1Smatt
31.1Smatt/*-
41.1Smatt * Copyright (c) 2013 The NetBSD Foundation, Inc.
51.1Smatt * All rights reserved.
61.1Smatt *
71.1Smatt * This code is derived from software contributed to The NetBSD Foundation
81.1Smatt * by Matt Thomas <matt@3am-software.com>
91.1Smatt *
101.1Smatt * Redistribution and use in source and binary forms, with or without
111.1Smatt * modification, are permitted provided that the following conditions
121.1Smatt * are met:
131.1Smatt * 1. Redistributions of source code must retain the above copyright
141.1Smatt *    notice, this list of conditions and the following disclaimer.
151.1Smatt * 2. Redistributions in binary form must reproduce the above copyright
161.1Smatt *    notice, this list of conditions and the following disclaimer in the
171.1Smatt *    documentation and/or other materials provided with the distribution.
181.1Smatt *
191.1Smatt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
201.1Smatt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
211.1Smatt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
221.1Smatt * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
231.1Smatt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
241.1Smatt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
251.1Smatt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
261.1Smatt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
271.1Smatt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
281.1Smatt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
291.1Smatt * POSSIBILITY OF SUCH DAMAGE.
301.1Smatt */
311.1Smatt
321.1Smatt#include "atomic_op_asm.h"
331.1Smatt
341.4Sskrll#if defined(_ARM_ARCH_6)
351.1Smatt
361.1SmattENTRY_NP(_atomic_nand_32)
371.1Smatt	mov	ip, r0
381.1Smatt1:	ldrex	r0, [ip]		/* load old value (to be returned) */
391.3Sskrll	ands	r3, r0, r1		/* calculate new value step 1 */
401.3Sskrll	mvns	r3, r3			/* ... complement for new value */
411.1Smatt	strex	r2, r3, [ip]		/* try to store */
421.1Smatt	cmp	r2, #0			/*   succeed? */
431.1Smatt	bne	1b			/*     no, try again */
441.1Smatt	RET				/* return old value */
451.1SmattEND(_atomic_nand_32)
461.1Smatt
471.1SmattATOMIC_OP_ALIAS(atomic_nand_32,_atomic_nand_32)
481.1SmattATOMIC_OP_ALIAS(atomic_nand_uint,_atomic_nand_32)
491.1SmattATOMIC_OP_ALIAS(atomic_nand_ulong,_atomic_nand_32)
501.1SmattSTRONG_ALIAS(_atomic_nand_uint,_atomic_nand_32)
511.1SmattSTRONG_ALIAS(_atomic_nand_ulong,_atomic_nand_32)
521.1Smatt
531.5SskrllENTRY_NP(__sync_fetch_and_nand_4)
541.5Sskrll	push	{r4, lr}
551.5Sskrll	DMB
561.5Sskrll	bl	_atomic_nand_32
571.5Sskrll	DMB
581.5Sskrll	pop	{r4, pc}
591.5SskrllEND(__sync_fetch_and_nand_4)
601.5Sskrll
611.5Sskrll
621.1SmattENTRY_NP(_atomic_nand_32_nv)
631.1Smatt	mov	ip, r0			/* need r0 for return value */
641.1Smatt1:	ldrex	r0, [ip]		/* load old value */
651.3Sskrll	ands	r0, r0, r1		/* calculate new value step 1 */
661.3Sskrll	mvns	r0, r0			/* ... complement for new value */
671.1Smatt	strex	r2, r0, [ip]		/* try to store */
681.1Smatt	cmp	r2, #0			/*   succeed? */
691.1Smatt	bne	1b			/*     no, try again? */
701.1Smatt	RET				/* return new value */
711.1SmattEND(_atomic_nand_32_nv)
721.1Smatt
731.1SmattATOMIC_OP_ALIAS(atomic_nand_32_nv,_atomic_nand_32_nv)
741.1SmattATOMIC_OP_ALIAS(atomic_nand_uint_nv,_atomic_nand_32_nv)
751.1SmattATOMIC_OP_ALIAS(atomic_nand_ulong_nv,_atomic_nand_32_nv)
761.1SmattSTRONG_ALIAS(_atomic_nand_uint_nv,_atomic_nand_32_nv)
771.1SmattSTRONG_ALIAS(_atomic_nand_ulong_nv,_atomic_nand_32_nv)
781.1Smatt
791.5SskrllENTRY_NP(__sync_nand_and_fetch_4)
801.5Sskrll	push	{r4, lr}
811.5Sskrll	DMB
821.5Sskrll	bl	_atomic_nand_32_nv
831.5Sskrll	DMB
841.5Sskrll	pop	{r4, pc}
851.5SskrllEND(__sync_nand_and_fetch_4)
861.5Sskrll
871.5Sskrll
881.1Smatt#endif /* _ARM_ARCH_6 */
89