11.2Sskrll/* $NetBSD: atomic_inc_64.S,v 1.2 2020/08/12 12:59:57 skrll Exp $ */
21.1Smatt
31.1Smatt/*-
41.1Smatt * Copyright (c) 2014 The NetBSD Foundation, Inc.
51.1Smatt * All rights reserved.
61.1Smatt *
71.1Smatt * This code is derived from software contributed to The NetBSD Foundation
81.1Smatt * by Matt Thomas of 3am Software Foundry.
91.1Smatt *
101.1Smatt * Redistribution and use in source and binary forms, with or without
111.1Smatt * modification, are permitted provided that the following conditions
121.1Smatt * are met:
131.1Smatt * 1. Redistributions of source code must retain the above copyright
141.1Smatt *    notice, this list of conditions and the following disclaimer.
151.1Smatt * 2. Redistributions in binary form must reproduce the above copyright
161.1Smatt *    notice, this list of conditions and the following disclaimer in the
171.1Smatt *    documentation and/or other materials provided with the distribution.
181.1Smatt *
191.1Smatt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
201.1Smatt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
211.1Smatt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
221.1Smatt * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
231.1Smatt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
241.1Smatt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
251.1Smatt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
261.1Smatt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
271.1Smatt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
281.1Smatt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
291.1Smatt * POSSIBILITY OF SUCH DAMAGE.
301.1Smatt */
311.1Smatt
321.1Smatt#include "atomic_op_asm.h"
331.1Smatt
341.1SmattENTRY_NP(_atomic_inc_64)
351.1Smatt1:	ldxr	x2, [x0]		/* load old value (return value) */
361.1Smatt	add	x2, x2, #1		/* calculate new value */
371.1Smatt	stxr	w3, x2, [x0]		/* try to store */
381.2Sskrll	cbnz	w3, 2f			/*   succeed? no, try again? */
391.1Smatt	ret				/* return new value */
401.2Sskrll2:	b	1b
411.1SmattEND(_atomic_inc_64)
421.1Smatt
431.1SmattATOMIC_OP_ALIAS(atomic_inc_64,_atomic_inc_64)
441.1SmattATOMIC_OP_ALIAS(atomic_inc_ptr,_atomic_inc_64)
451.1SmattATOMIC_OP_ALIAS(atomic_inc_ulong,_atomic_inc_64)
461.1SmattSTRONG_ALIAS(_atomic_inc_ptr,_atomic_inc_64)
471.1SmattSTRONG_ALIAS(_atomic_inc_ulong,_atomic_inc_64)
481.1Smatt
491.1SmattENTRY_NP(_atomic_inc_64_nv)
501.1Smatt	mov	x4, x0			/* need r0 for return value */
511.1Smatt1:	ldxr	x0, [x4]		/* load old value */
521.1Smatt	add	x0, x0, #1		/* calculate new value (return value) */
531.1Smatt	stxr	w3, x0, [x4]		/* try to store */
541.2Sskrll	cbnz	w3, 2f			/*   succeed? no, try again? */
551.1Smatt	ret				/* return new value */
561.2Sskrll2:	b	1b
571.1SmattEND(_atomic_inc_64_nv)
581.1Smatt
591.1SmattATOMIC_OP_ALIAS(atomic_inc_64_nv,_atomic_inc_64_nv)
601.1SmattATOMIC_OP_ALIAS(atomic_inc_ptr_nv,_atomic_inc_64_nv)
611.1SmattATOMIC_OP_ALIAS(atomic_inc_ulong_nv,_atomic_inc_64_nv)
621.1SmattSTRONG_ALIAS(_atomic_inc_ptr_nv,_atomic_inc_64_nv)
631.1SmattSTRONG_ALIAS(_atomic_inc_ulong_nv,_atomic_inc_64_nv)
64