11.9Sskrll/*	$NetBSD: atomic_inc_32.S,v 1.9 2021/07/28 07:32:20 skrll Exp $	*/
21.2Smatt/*-
31.2Smatt * Copyright (c) 2008 The NetBSD Foundation, Inc.
41.2Smatt * All rights reserved.
51.2Smatt *
61.2Smatt * This code is derived from software contributed to The NetBSD Foundation
71.2Smatt * by Matt Thomas <matt@3am-software.com>
81.2Smatt *
91.2Smatt * Redistribution and use in source and binary forms, with or without
101.2Smatt * modification, are permitted provided that the following conditions
111.2Smatt * are met:
121.2Smatt * 1. Redistributions of source code must retain the above copyright
131.2Smatt *    notice, this list of conditions and the following disclaimer.
141.2Smatt * 2. Redistributions in binary form must reproduce the above copyright
151.2Smatt *    notice, this list of conditions and the following disclaimer in the
161.2Smatt *    documentation and/or other materials provided with the distribution.
171.2Smatt *
181.2Smatt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
191.2Smatt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
201.2Smatt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
211.2Smatt * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
221.2Smatt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
231.2Smatt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
241.2Smatt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
251.2Smatt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
261.2Smatt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
271.2Smatt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
281.2Smatt * POSSIBILITY OF SUCH DAMAGE.
291.2Smatt */
301.2Smatt
311.2Smatt#include "atomic_op_asm.h"
321.2Smatt
331.8Sskrll#if defined(_ARM_ARCH_6)
341.2Smatt
351.2SmattENTRY_NP(_atomic_inc_32)
361.5Smatt1:	ldrex	r3, [r0]		/* load old value (return value) */
371.6Smatt	adds	r3, r3, #1		/* calculate new value */
381.5Smatt	strex	r2, r3, [r0]		/* try to store */
391.5Smatt	cmp	r2, #0			/*   succeed? */
401.2Smatt	bne	1b			/*     no, try again? */
411.2Smatt	RET				/* return new value */
421.7SmattEND(_atomic_inc_32)
431.9Sskrll
441.2SmattATOMIC_OP_ALIAS(atomic_inc_32,_atomic_inc_32)
451.2SmattATOMIC_OP_ALIAS(atomic_inc_uint,_atomic_inc_32)
461.2SmattATOMIC_OP_ALIAS(atomic_inc_ulong,_atomic_inc_32)
471.2SmattATOMIC_OP_ALIAS(atomic_inc_ptr,_atomic_inc_32)
481.2SmattSTRONG_ALIAS(_atomic_inc_uint,_atomic_inc_32)
491.2SmattSTRONG_ALIAS(_atomic_inc_ulong,_atomic_inc_32)
501.2SmattSTRONG_ALIAS(_atomic_inc_ptr,_atomic_inc_32)
511.2Smatt
521.9Sskrll
531.2SmattENTRY_NP(_atomic_inc_32_nv)
541.5Smatt	mov	ip, r0			/* need r0 for return value */
551.5Smatt1:	ldrex	r0, [ip]		/* load old value */
561.6Smatt	adds	r0, r0, #1		/* calculate new value (return value) */
571.5Smatt	strex	r2, r0, [ip]		/* try to store */
581.5Smatt	cmp	r2, #0			/*   succeed? */
591.2Smatt	bne	1b			/*     no, try again? */
601.2Smatt	RET				/* return new value */
611.7SmattEND(_atomic_inc_32_nv)
621.2SmattATOMIC_OP_ALIAS(atomic_inc_32_nv,_atomic_inc_32_nv)
631.2SmattATOMIC_OP_ALIAS(atomic_inc_uint_nv,_atomic_inc_32_nv)
641.2SmattATOMIC_OP_ALIAS(atomic_inc_ulong_nv,_atomic_inc_32_nv)
651.2SmattATOMIC_OP_ALIAS(atomic_inc_ptr_nv,_atomic_inc_32_nv)
661.2SmattSTRONG_ALIAS(_atomic_inc_uint_nv,_atomic_inc_32_nv)
671.2SmattSTRONG_ALIAS(_atomic_inc_ulong_nv,_atomic_inc_32_nv)
681.2SmattSTRONG_ALIAS(_atomic_inc_ptr_nv,_atomic_inc_32_nv)
691.2Smatt
701.4Schs#endif /* _ARM_ARCH_6 */
71