11.10Sskrll/*	$NetBSD: atomic_add_32.S,v 1.10 2021/07/28 07:32:20 skrll Exp $	*/
21.2Smatt
31.2Smatt/*-
41.2Smatt * Copyright (c) 2008 The NetBSD Foundation, Inc.
51.2Smatt * All rights reserved.
61.2Smatt *
71.2Smatt * This code is derived from software contributed to The NetBSD Foundation
81.2Smatt * by Matt Thomas <matt@3am-software.com>
91.2Smatt *
101.2Smatt * Redistribution and use in source and binary forms, with or without
111.2Smatt * modification, are permitted provided that the following conditions
121.2Smatt * are met:
131.2Smatt * 1. Redistributions of source code must retain the above copyright
141.2Smatt *    notice, this list of conditions and the following disclaimer.
151.2Smatt * 2. Redistributions in binary form must reproduce the above copyright
161.2Smatt *    notice, this list of conditions and the following disclaimer in the
171.2Smatt *    documentation and/or other materials provided with the distribution.
181.2Smatt *
191.2Smatt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
201.2Smatt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
211.2Smatt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
221.2Smatt * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
231.2Smatt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
241.2Smatt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
251.2Smatt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
261.2Smatt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
271.2Smatt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
281.2Smatt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
291.2Smatt * POSSIBILITY OF SUCH DAMAGE.
301.2Smatt */
311.2Smatt
321.2Smatt#include "atomic_op_asm.h"
331.2Smatt
341.9Sskrll#if defined(_ARM_ARCH_6)
351.2Smatt
361.6SmattENTRY_NP(_atomic_sub_32)
371.6Smatt	negs	r1, r1
381.6Smatt	/* FALLTHROUGH */
391.2SmattENTRY_NP(_atomic_add_32)
401.6Smatt	mov	ip, r0
411.6Smatt1:	ldrex	r0, [ip]		/* load old value */
421.6Smatt	adds	r3, r0, r1		/* calculate new value */
431.6Smatt	strex	r2, r3, [ip]		/* try to store */
441.4Smatt	cmp	r2, #0			/*   succeed? */
451.2Smatt	bne	1b			/*     no, try again */
461.2Smatt	RET				/* return old value */
471.4SmattEND(_atomic_add_32)
481.6SmattEND(_atomic_sub_32)
491.4Smatt
501.10SskrllENTRY_NP(__sync_fetch_and_add_4)
511.10Sskrll	push    {r4, lr}
521.10Sskrll	DMB
531.10Sskrll	bl	_atomic_add_32
541.10Sskrll	DMB
551.10Sskrll	pop	{r4, pc}
561.10SskrllEND(__sync_fetch_and_add_4)
571.10Sskrll
581.10SskrllENTRY_NP(__sync_fetch_and_sub_4)
591.10Sskrll	push    {r4, lr}
601.10Sskrll	DMB
611.10Sskrll	bl	_atomic_add_32
621.10Sskrll	DMB
631.10Sskrll	pop	{r4, pc}
641.10SskrllEND(__sync_fetch_and_sub_4)
651.10Sskrll
661.10Sskrll
671.2SmattATOMIC_OP_ALIAS(atomic_add_32,_atomic_add_32)
681.2SmattATOMIC_OP_ALIAS(atomic_add_int,_atomic_add_32)
691.2SmattATOMIC_OP_ALIAS(atomic_add_long,_atomic_add_32)
701.2SmattATOMIC_OP_ALIAS(atomic_add_ptr,_atomic_add_32)
711.8SjoergCRT_ALIAS(__atomic_fetch_add_4,_atomic_add_32)
721.2SmattSTRONG_ALIAS(_atomic_add_int,_atomic_add_32)
731.2SmattSTRONG_ALIAS(_atomic_add_long,_atomic_add_32)
741.2SmattSTRONG_ALIAS(_atomic_add_ptr,_atomic_add_32)
751.2Smatt
761.6SmattATOMIC_OP_ALIAS(atomic_sub_32,_atomic_sub_32)
771.6SmattATOMIC_OP_ALIAS(atomic_sub_int,_atomic_sub_32)
781.6SmattATOMIC_OP_ALIAS(atomic_sub_long,_atomic_sub_32)
791.6SmattATOMIC_OP_ALIAS(atomic_sub_ptr,_atomic_sub_32)
801.8SjoergCRT_ALIAS(__atomic_fetch_sub_4,_atomic_sub_32)
811.6SmattSTRONG_ALIAS(_atomic_sub_int,_atomic_sub_32)
821.6SmattSTRONG_ALIAS(_atomic_sub_long,_atomic_sub_32)
831.6SmattSTRONG_ALIAS(_atomic_sub_ptr,_atomic_sub_32)
841.6Smatt
851.6SmattENTRY_NP(_atomic_sub_32_nv)
861.6Smatt	negs	r1, r1
871.6Smatt	/* FALLTHROUGH */
881.2SmattENTRY_NP(_atomic_add_32_nv)
891.4Smatt	mov	ip, r0			/* need r0 for return value */
901.4Smatt1:	ldrex	r0, [ip]		/* load old value */
911.5Smatt	adds	r0, r0, r1		/* calculate new value (return value) */
921.4Smatt	strex	r2, r0, [ip]		/* try to store */
931.2Smatt	cmp	r2, #0			/*   succeed? */
941.2Smatt	bne	1b			/*     no, try again? */
951.2Smatt	RET				/* return new value */
961.6SmattEND(_atomic_add_32_nv)
971.6SmattEND(_atomic_sub_32_nv)
981.10Sskrll
991.2SmattATOMIC_OP_ALIAS(atomic_add_32_nv,_atomic_add_32_nv)
1001.2SmattATOMIC_OP_ALIAS(atomic_add_int_nv,_atomic_add_32_nv)
1011.2SmattATOMIC_OP_ALIAS(atomic_add_long_nv,_atomic_add_32_nv)
1021.2SmattATOMIC_OP_ALIAS(atomic_add_ptr_nv,_atomic_add_32_nv)
1031.2SmattSTRONG_ALIAS(_atomic_add_int_nv,_atomic_add_32_nv)
1041.2SmattSTRONG_ALIAS(_atomic_add_long_nv,_atomic_add_32_nv)
1051.2SmattSTRONG_ALIAS(_atomic_add_ptr_nv,_atomic_add_32_nv)
1061.2Smatt
1071.6SmattATOMIC_OP_ALIAS(atomic_sub_32_nv,_atomic_sub_32_nv)
1081.6SmattATOMIC_OP_ALIAS(atomic_sub_int_nv,_atomic_sub_32_nv)
1091.6SmattATOMIC_OP_ALIAS(atomic_sub_long_nv,_atomic_sub_32_nv)
1101.6SmattATOMIC_OP_ALIAS(atomic_sub_ptr_nv,_atomic_sub_32_nv)
1111.6SmattSTRONG_ALIAS(_atomic_sub_int_nv,_atomic_sub_32_nv)
1121.6SmattSTRONG_ALIAS(_atomic_sub_long_nv,_atomic_sub_32_nv)
1131.6SmattSTRONG_ALIAS(_atomic_sub_ptr_nv,_atomic_sub_32_nv)
1141.10Sskrll
1151.10SskrllENTRY_NP(__sync_add_and_fetch_4)
1161.10Sskrll	push    {r4, lr}
1171.10Sskrll	DMB
1181.10Sskrll	bl	_atomic_add_32_nv
1191.10Sskrll	DMB
1201.10Sskrll	pop	{r4, pc}
1211.10SskrllEND(__sync_add_and_fetch_4)
1221.10Sskrll
1231.10SskrllENTRY_NP(__sync_sub_and_fetch_4)
1241.10Sskrll	push    {r4, lr}
1251.10Sskrll	DMB
1261.10Sskrll	bl	_atomic_sub_32_nv
1271.10Sskrll	DMB
1281.10Sskrll	pop	{r4, pc}
1291.10SskrllEND(__sync_sub_and_fetch_4)
1301.10Sskrll
1311.2Smatt#endif /* _ARM_ARCH_6 */
132