11.15Sandvar/*	$NetBSD: atomic_swap_64.S,v 1.15 2021/08/01 21:58:56 andvar Exp $	*/
21.1Smatt/*-
31.2Smatt * Copyright (c) 2012 The NetBSD Foundation, Inc.
41.1Smatt * All rights reserved.
51.1Smatt *
61.1Smatt * This code is derived from software contributed to The NetBSD Foundation
71.2Smatt * by Matt Thomas.
81.1Smatt *
91.1Smatt * Redistribution and use in source and binary forms, with or without
101.1Smatt * modification, are permitted provided that the following conditions
111.1Smatt * are met:
121.1Smatt * 1. Redistributions of source code must retain the above copyright
131.1Smatt *    notice, this list of conditions and the following disclaimer.
141.1Smatt * 2. Redistributions in binary form must reproduce the above copyright
151.1Smatt *    notice, this list of conditions and the following disclaimer in the
161.1Smatt *    documentation and/or other materials provided with the distribution.
171.11Sskrll *
181.1Smatt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
191.1Smatt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
201.1Smatt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
211.1Smatt * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
221.1Smatt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
231.1Smatt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
241.1Smatt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
251.1Smatt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
261.1Smatt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
271.1Smatt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
281.1Smatt * POSSIBILITY OF SUCH DAMAGE.
291.1Smatt */
301.1Smatt
311.1Smatt#include "atomic_op_asm.h"
321.1Smatt
331.14SskrllENTRY_NP(__sync_lock_test_and_set_8)
341.14Sskrll	DMB
351.14Sskrll	/* FALLTHROUGH */
361.14Sskrll
371.1SmattENTRY_NP(_atomic_swap_64)
381.13Sskrll	push	{r3, r4}		/* save temporary */
391.4Smatt	mov	ip, r0			/* return value will be in r0 */
401.1Smatt#ifndef __ARM_EABI__
411.15Sandvar	mov	r3, r2			/* r2 will be overwritten by r1 */
421.1Smatt	mov	r2, r1			/* and r1 will be overwritten by ldrexd */
431.1Smatt#endif
441.7Sjoerg1:	ldrexd	r0, r1, [ip]		/* load old value */
451.7Sjoerg	strexd	r4, r2, r3, [ip]	/* store new value */
461.4Smatt	cmp	r4, #0			/*   succeed? */
471.1Smatt	bne	1b			/*    no, try again */
481.13Sskrll	pop	{r3, r4}		/* restore temporary */
491.1Smatt	RET
501.6SmattEND(_atomic_swap_64)
511.14SskrllEND(__sync_lock_test_and_set_8)
521.14Sskrll
531.14Sskrll
541.1SmattATOMIC_OP_ALIAS(atomic_swap_64,_atomic_swap_64)
551.9SjoergCRT_ALIAS(__atomic_exchange_8,_atomic_swap_64)
561.6Smatt
571.10Sjustin#if (!defined(_KERNEL) || !defined(_RUMPKERNEL)) && !defined(_STANDALONE)
581.6SmattENTRY_NP(__sync_lock_release_8)
591.6Smatt	mov	r2, #0
601.6Smatt	mov	r3, #0
611.14Sskrll
621.14Sskrll	DMB
631.7Sjoerg	strd	r2, r3, [r0]
641.6Smatt	RET
651.6SmattEND(__sync_lock_release_8)
661.8Smatt#endif
67