11.10Sskrll/*	$NetBSD: atomic_or_32.S,v 1.10 2021/07/28 07:32:20 skrll Exp $	*/
21.2Smatt/*-
31.2Smatt * Copyright (c) 2008 The NetBSD Foundation, Inc.
41.2Smatt * All rights reserved.
51.2Smatt *
61.2Smatt * This code is derived from software contributed to The NetBSD Foundation
71.2Smatt * by Matt Thomas <matt@3am-software.com>
81.2Smatt *
91.2Smatt * Redistribution and use in source and binary forms, with or without
101.2Smatt * modification, are permitted provided that the following conditions
111.2Smatt * are met:
121.2Smatt * 1. Redistributions of source code must retain the above copyright
131.2Smatt *    notice, this list of conditions and the following disclaimer.
141.2Smatt * 2. Redistributions in binary form must reproduce the above copyright
151.2Smatt *    notice, this list of conditions and the following disclaimer in the
161.2Smatt *    documentation and/or other materials provided with the distribution.
171.2Smatt *
181.2Smatt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
191.2Smatt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
201.2Smatt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
211.2Smatt * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
221.2Smatt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
231.2Smatt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
241.2Smatt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
251.2Smatt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
261.2Smatt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
271.2Smatt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
281.2Smatt * POSSIBILITY OF SUCH DAMAGE.
291.2Smatt */
301.2Smatt
311.2Smatt#include "atomic_op_asm.h"
321.2Smatt
331.9Sskrll#if defined(_ARM_ARCH_6)
341.2Smatt
351.2SmattENTRY_NP(_atomic_or_32)
361.6Smatt	mov	ip, r0
371.6Smatt1:	ldrex	r0, [ip]		/* load old value (to be returned) */
381.6Smatt	orrs	r3, r0, r1		/* calculate new value */
391.6Smatt	strex	r2, r3, [ip]		/* try to store */
401.4Smatt	cmp	r2, #0			/*   succeed? */
411.2Smatt	bne	1b			/*     no, try again */
421.2Smatt	RET				/* return old value */
431.4SmattEND(_atomic_or_32)
441.4Smatt
451.2SmattATOMIC_OP_ALIAS(atomic_or_32,_atomic_or_32)
461.2SmattATOMIC_OP_ALIAS(atomic_or_uint,_atomic_or_32)
471.2SmattATOMIC_OP_ALIAS(atomic_or_ulong,_atomic_or_32)
481.8SjoergCRT_ALIAS(__atomic_fetch_or_4,_atomic_or_32)
491.2SmattSTRONG_ALIAS(_atomic_or_uint,_atomic_or_32)
501.2SmattSTRONG_ALIAS(_atomic_or_ulong,_atomic_or_32)
511.2Smatt
521.10SskrllENTRY_NP(__sync_fetch_and_or_4)
531.10Sskrll	push	{r4, lr}
541.10Sskrll	DMB
551.10Sskrll	bl	_atomic_or_32
561.10Sskrll	DMB
571.10Sskrll	pop	{r4, pc}
581.10SskrllEND(__sync_fetch_and_or_4)
591.10Sskrll
601.10Sskrll
611.2SmattENTRY_NP(_atomic_or_32_nv)
621.4Smatt	mov	ip, r0			/* need r0 for return value */
631.4Smatt1:	ldrex	r0, [ip]		/* load old value */
641.5Smatt	orrs	r0, r0, r1		/* calculate new value (return value) */
651.4Smatt	strex	r2, r0, [ip]		/* try to store */
661.2Smatt	cmp	r2, #0			/*   succeed? */
671.2Smatt	bne	1b			/*     no, try again? */
681.2Smatt	RET				/* return new value */
691.4SmattEND(_atomic_or_32_nv)
701.4Smatt
711.2SmattATOMIC_OP_ALIAS(atomic_or_32_nv,_atomic_or_32_nv)
721.2SmattATOMIC_OP_ALIAS(atomic_or_uint_nv,_atomic_or_32_nv)
731.2SmattATOMIC_OP_ALIAS(atomic_or_ulong_nv,_atomic_or_32_nv)
741.2SmattSTRONG_ALIAS(_atomic_or_uint_nv,_atomic_or_32_nv)
751.2SmattSTRONG_ALIAS(_atomic_or_ulong_nv,_atomic_or_32_nv)
761.2Smatt
771.10SskrllENTRY_NP(__sync_or_and_fetch_4)
781.10Sskrll	push	{r4, lr}
791.10Sskrll	DMB
801.10Sskrll	bl	_atomic_or_32_nv
811.10Sskrll	DMB
821.10Sskrll	pop	{r4, pc}
831.10SskrllEND(__sync_or_and_fetch_4)
841.10Sskrll
851.10Sskrll
861.2Smatt#endif /* _ARM_ARCH_6 */
87