11.10Sskrll/*	$NetBSD: atomic_and_32.S,v 1.10 2021/07/28 07:32:20 skrll Exp $	*/
21.2Smatt
31.2Smatt/*-
41.2Smatt * Copyright (c) 2008 The NetBSD Foundation, Inc.
51.2Smatt * All rights reserved.
61.2Smatt *
71.2Smatt * This code is derived from software contributed to The NetBSD Foundation
81.2Smatt * by Matt Thomas <matt@3am-software.com>
91.2Smatt *
101.2Smatt * Redistribution and use in source and binary forms, with or without
111.2Smatt * modification, are permitted provided that the following conditions
121.2Smatt * are met:
131.2Smatt * 1. Redistributions of source code must retain the above copyright
141.2Smatt *    notice, this list of conditions and the following disclaimer.
151.2Smatt * 2. Redistributions in binary form must reproduce the above copyright
161.2Smatt *    notice, this list of conditions and the following disclaimer in the
171.2Smatt *    documentation and/or other materials provided with the distribution.
181.2Smatt *
191.2Smatt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
201.2Smatt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
211.2Smatt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
221.2Smatt * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
231.2Smatt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
241.2Smatt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
251.2Smatt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
261.2Smatt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
271.2Smatt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
281.2Smatt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
291.2Smatt * POSSIBILITY OF SUCH DAMAGE.
301.2Smatt */
311.2Smatt
321.2Smatt#include "atomic_op_asm.h"
331.2Smatt
341.9Sskrll#if defined(_ARM_ARCH_6)
351.2Smatt
361.2SmattENTRY_NP(_atomic_and_32)
371.6Smatt	mov	ip, r0
381.6Smatt1:	ldrex	r0, [ip]		/* load old value (to be returned) */
391.6Smatt	ands	r3, r0, r1		/* calculate new value */
401.6Smatt	strex	r2, r3, [ip]		/* try to store */
411.4Smatt	cmp	r2, #0			/*   succeed? */
421.2Smatt	bne	1b			/*     no, try again */
431.2Smatt	RET				/* return old value */
441.4SmattEND(_atomic_and_32)
451.4Smatt
461.2SmattATOMIC_OP_ALIAS(atomic_and_32,_atomic_and_32)
471.2SmattATOMIC_OP_ALIAS(atomic_and_uint,_atomic_and_32)
481.2SmattATOMIC_OP_ALIAS(atomic_and_ulong,_atomic_and_32)
491.8SjoergCRT_ALIAS(__atomic_fetch_and_4,_atomic_and_32)
501.2SmattSTRONG_ALIAS(_atomic_and_uint,_atomic_and_32)
511.2SmattSTRONG_ALIAS(_atomic_and_ulong,_atomic_and_32)
521.2Smatt
531.10SskrllENTRY_NP(__sync_fetch_and_and_4)
541.10Sskrll	push	{r4, lr}
551.10Sskrll	DMB
561.10Sskrll	bl	_atomic_and_32
571.10Sskrll	DMB
581.10Sskrll	pop	{r4, pc}
591.10SskrllEND(__sync_fetch_and_and_4)
601.10Sskrll
611.10Sskrll
621.2SmattENTRY_NP(_atomic_and_32_nv)
631.5Smatt	mov	ip, r0			/* need r0 for return value */
641.5Smatt1:	ldrex	r0, [ip]		/* load old value */
651.5Smatt	ands	r0, r0, r1		/* calculate new value (return value) */
661.5Smatt	strex	r2, r0, [ip]		/* try to store */
671.2Smatt	cmp	r2, #0			/*   succeed? */
681.2Smatt	bne	1b			/*     no, try again? */
691.2Smatt	RET				/* return new value */
701.4SmattEND(_atomic_and_32_nv)
711.4Smatt
721.2SmattATOMIC_OP_ALIAS(atomic_and_32_nv,_atomic_and_32_nv)
731.2SmattATOMIC_OP_ALIAS(atomic_and_uint_nv,_atomic_and_32_nv)
741.2SmattATOMIC_OP_ALIAS(atomic_and_ulong_nv,_atomic_and_32_nv)
751.2SmattSTRONG_ALIAS(_atomic_and_uint_nv,_atomic_and_32_nv)
761.2SmattSTRONG_ALIAS(_atomic_and_ulong_nv,_atomic_and_32_nv)
771.2Smatt
781.10SskrllENTRY_NP(__sync_and_and_fetch_4)
791.10Sskrll	push	{r4, lr}
801.10Sskrll	DMB
811.10Sskrll	bl	_atomic_and_32_nv
821.10Sskrll	DMB
831.10Sskrll	pop	{r4, pc}
841.10SskrllEND(__sync_and_and_fetch_4)
851.10Sskrll
861.2Smatt#endif /* _ARM_ARCH_6 */
87