Home | History | Annotate | Line # | Download | only in atomic
      1 /* $NetBSD: atomic_nand_64.S,v 1.5 2021/07/29 10:29:05 skrll Exp $ */
      2 
      3 /*-
      4  * Copyright (c) 2014 The NetBSD Foundation, Inc.
      5  * All rights reserved.
      6  *
      7  * This code is derived from software contributed to The NetBSD Foundation
      8  * by Matt Thomas of 3am Software Foundry.
      9  *
     10  * Redistribution and use in source and binary forms, with or without
     11  * modification, are permitted provided that the following conditions
     12  * are met:
     13  * 1. Redistributions of source code must retain the above copyright
     14  *    notice, this list of conditions and the following disclaimer.
     15  * 2. Redistributions in binary form must reproduce the above copyright
     16  *    notice, this list of conditions and the following disclaimer in the
     17  *    documentation and/or other materials provided with the distribution.
     18  *
     19  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     20  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     21  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     22  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     23  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     24  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     25  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     26  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     27  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     28  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     29  * POSSIBILITY OF SUCH DAMAGE.
     30  */
     31 
     32 #include "atomic_op_asm.h"
     33 
     34 /*
     35  * { tmp = *ptr; *ptr = ~(tmp & value); return tmp; }   // nand
     36  */
     37 ENTRY_NP(_atomic_nand_64)
     38 	mov	x4, x0			/* need r0 for return value */
     39 1:	ldxr	x0, [x4]		/* load old value (*ptr) */
     40 	and	x2, x0, x1		/* x2 =  (*ptr & value) */
     41 	mvn	x2, x2			/* x2 = ~(*ptr & value) */
     42 	stxr	w3, x2, [x4]		/* try to store */
     43 	cbnz	w3, 2f			/*   succeed? no, try again */
     44 	ret				/* return old value */
     45 2:	b	1b
     46 END(_atomic_nand_64)
     47 
     48 ATOMIC_OP_ALIAS(atomic_nand_64,_atomic_nand_64)
     49 ATOMIC_OP_ALIAS(atomic_nand_ulong,_atomic_nand_64)
     50 STRONG_ALIAS(_atomic_nand_ulong,_atomic_nand_64)
     51 
     52 ENTRY_NP(__sync_fetch_and_nand_8)
     53 	mov	x4, x0			/* need r0 for return value */
     54 	dmb	ish
     55 1:	ldxr	x0, [x4]		/* load old value (*ptr) */
     56 	and	x2, x0, x1		/* x2 =  (*ptr & value) */
     57 	mvn	x2, x2			/* x2 = ~(*ptr & value) */
     58 	stxr	w3, x2, [x4]		/* try to store */
     59 	cbnz	w3, 2f			/*   succeed? no, try again */
     60 	dmb	ish
     61 	ret				/* return old value */
     62 2:	b	1b
     63 END(__sync_fetch_and_nand_8)
     64 
     65 
     66 /*
     67  * { tmp = ~(*ptr & value); *ptr = tmp; return *ptr; }   // nand
     68  */
     69 ENTRY_NP(_atomic_nand_64_nv)
     70 	mov	x4, x0			/* need r0 for return value */
     71 1:	ldxr	x0, [x4]		/* load old value (*ptr) */
     72 	and	x0, x0, x1		/* x0 =  (*ptr & value) */
     73 	mvn	x0, x0			/* x0 = ~(*ptr & value), return value */
     74 	stxr	w3, x0, [x4]		/* try to store */
     75 	cbnz	w3, 2f			/*   succeed? no, try again? */
     76 	ret				/* return new value */
     77 2:	b	1b
     78 END(_atomic_nand_64_nv)
     79 
     80 ATOMIC_OP_ALIAS(atomic_nand_64_nv,_atomic_nand_64_nv)
     81 ATOMIC_OP_ALIAS(atomic_nand_ulong_nv,_atomic_nand_64_nv)
     82 STRONG_ALIAS(_atomic_nand_ulong_nv,_atomic_nand_64_nv)
     83 
     84 ENTRY_NP(__sync_nand_and_fetch_8)
     85 	mov	x4, x0			/* need r0 for return value */
     86 	dmb	ish
     87 1:	ldxr	x0, [x4]		/* load old value (*ptr) */
     88 	and	x0, x0, x1		/* x0 =  (*ptr & value) */
     89 	mvn	x0, x0			/* x0 = ~(*ptr & value) */
     90 	stxr	w3, x0, [x4]		/* try to store */
     91 	cbnz	w3, 2f			/*   succeed? no, try again? */
     92 	dmb	ish
     93 	ret				/* return new value */
     94 2:	b	1b
     95 END(__sync_nand_and_fetch_8)
     96