Home | History | Annotate | Line # | Download | only in include
      1  1.7     skrll /*	$NetBSD: mutex.h,v 1.7 2024/11/25 22:04:14 skrll Exp $	*/
      2  1.1      matt 
      3  1.1      matt /*-
      4  1.1      matt  * Copyright (c) 2002, 2007 The NetBSD Foundation, Inc.
      5  1.1      matt  * All rights reserved.
      6  1.1      matt  *
      7  1.1      matt  * This code is derived from software contributed to The NetBSD Foundation
      8  1.1      matt  * by Jason R. Thorpe and Andrew Doran.
      9  1.1      matt  *
     10  1.1      matt  * Redistribution and use in source and binary forms, with or without
     11  1.1      matt  * modification, are permitted provided that the following conditions
     12  1.1      matt  * are met:
     13  1.1      matt  * 1. Redistributions of source code must retain the above copyright
     14  1.1      matt  *    notice, this list of conditions and the following disclaimer.
     15  1.1      matt  * 2. Redistributions in binary form must reproduce the above copyright
     16  1.1      matt  *    notice, this list of conditions and the following disclaimer in the
     17  1.1      matt  *    documentation and/or other materials provided with the distribution.
     18  1.1      matt  *
     19  1.1      matt  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     20  1.1      matt  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     21  1.1      matt  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     22  1.1      matt  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     23  1.1      matt  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     24  1.1      matt  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     25  1.1      matt  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     26  1.1      matt  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     27  1.1      matt  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     28  1.1      matt  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     29  1.1      matt  * POSSIBILITY OF SUCH DAMAGE.
     30  1.1      matt  */
     31  1.1      matt 
     32  1.1      matt #ifndef _RISCV_MUTEX_H_
     33  1.1      matt #define	_RISCV_MUTEX_H_
     34  1.1      matt 
     35  1.6  riastrad #include <sys/types.h>
     36  1.6  riastrad 
     37  1.1      matt #ifndef __MUTEX_PRIVATE
     38  1.1      matt 
     39  1.1      matt struct kmutex {
     40  1.1      matt 	uintptr_t	mtx_pad1;
     41  1.1      matt };
     42  1.1      matt 
     43  1.1      matt #else	/* __MUTEX_PRIVATE */
     44  1.1      matt 
     45  1.5  riastrad #include <sys/cdefs.h>
     46  1.5  riastrad 
     47  1.1      matt #include <sys/param.h>
     48  1.1      matt 
     49  1.5  riastrad #include <machine/intr.h>
     50  1.5  riastrad 
     51  1.1      matt struct kmutex {
     52  1.1      matt 	volatile uintptr_t	mtx_owner;
     53  1.1      matt };
     54  1.1      matt 
     55  1.7     skrll #ifdef _KERNEL
     56  1.7     skrll 
     57  1.1      matt #ifdef _LP64
     58  1.1      matt #define MTX_ASMOP_SFX ".d"		// doubleword atomic op
     59  1.1      matt #else
     60  1.1      matt #define MTX_ASMOP_SFX ".w"		// word atomic op
     61  1.1      matt #endif
     62  1.1      matt 
     63  1.1      matt #define	MTX_LOCK			__BIT(8)	// just one bit
     64  1.1      matt #define	MTX_IPL				__BITS(7,4)	// only need 4 bits
     65  1.1      matt 
     66  1.1      matt #undef MUTEX_SPIN_IPL			// override <sys/mutex.h>
     67  1.1      matt #define	MUTEX_SPIN_IPL(a)		riscv_mutex_spin_ipl(a)
     68  1.1      matt #define	MUTEX_INITIALIZE_SPIN_IPL(a,b)	riscv_mutex_initialize_spin_ipl(a,b)
     69  1.1      matt #define MUTEX_SPINBIT_LOCK_INIT(a)	riscv_mutex_spinbit_lock_init(a)
     70  1.1      matt #define MUTEX_SPINBIT_LOCK_TRY(a)	riscv_mutex_spinbit_lock_try(a)
     71  1.1      matt #define MUTEX_SPINBIT_LOCKED_P(a)	riscv_mutex_spinbit_locked_p(a)
     72  1.1      matt #define MUTEX_SPINBIT_LOCK_UNLOCK(a)	riscv_mutex_spinbit_lock_unlock(a)
     73  1.1      matt 
     74  1.1      matt static inline ipl_cookie_t
     75  1.1      matt riscv_mutex_spin_ipl(kmutex_t *__mtx)
     76  1.1      matt {
     77  1.1      matt 	return (ipl_cookie_t){._spl = __SHIFTOUT(__mtx->mtx_owner, MTX_IPL)};
     78  1.1      matt }
     79  1.1      matt 
     80  1.1      matt static inline void
     81  1.1      matt riscv_mutex_initialize_spin_ipl(kmutex_t *__mtx, int ipl)
     82  1.1      matt {
     83  1.1      matt 	__mtx->mtx_owner = (__mtx->mtx_owner & ~MTX_IPL)
     84  1.1      matt 	    | __SHIFTIN(ipl, MTX_IPL);
     85  1.1      matt }
     86  1.1      matt 
     87  1.1      matt static inline void
     88  1.1      matt riscv_mutex_spinbit_lock_init(kmutex_t *__mtx)
     89  1.1      matt {
     90  1.1      matt 	__mtx->mtx_owner &= ~MTX_LOCK;
     91  1.1      matt }
     92  1.1      matt 
     93  1.1      matt static inline bool
     94  1.2      maxv riscv_mutex_spinbit_locked_p(const kmutex_t *__mtx)
     95  1.1      matt {
     96  1.1      matt 	return (__mtx->mtx_owner & MTX_LOCK) != 0;
     97  1.1      matt }
     98  1.1      matt 
     99  1.1      matt static inline bool
    100  1.1      matt riscv_mutex_spinbit_lock_try(kmutex_t *__mtx)
    101  1.1      matt {
    102  1.1      matt 	uintptr_t __old;
    103  1.1      matt 	__asm __volatile(
    104  1.1      matt 		"amoor" MTX_ASMOP_SFX ".aq\t%0, %1, (%2)"
    105  1.1      matt 	   :	"=r"(__old)
    106  1.1      matt 	   :	"r"(MTX_LOCK), "r"(__mtx));
    107  1.1      matt 	return (__old & MTX_LOCK) == 0;
    108  1.1      matt }
    109  1.1      matt 
    110  1.1      matt static inline void
    111  1.1      matt riscv_mutex_spinbit_lock_unlock(kmutex_t *__mtx)
    112  1.1      matt {
    113  1.1      matt 	__asm __volatile(
    114  1.1      matt 		"amoand" MTX_ASMOP_SFX ".rl\tx0, %0, (%1)"
    115  1.1      matt 	   ::	"r"(~MTX_LOCK), "r"(__mtx));
    116  1.1      matt }
    117  1.1      matt 
    118  1.7     skrll #endif /* _KERNEL */
    119  1.7     skrll 
    120  1.1      matt #if 0
    121  1.1      matt #define	__HAVE_MUTEX_STUBS		1
    122  1.1      matt #define	__HAVE_SPIN_MUTEX_STUBS		1
    123  1.1      matt #endif
    124  1.1      matt #define	__HAVE_SIMPLE_MUTEXES		1
    125  1.1      matt 
    126  1.1      matt #endif	/* __MUTEX_PRIVATE */
    127  1.1      matt 
    128  1.1      matt #endif /* _RISCV_MUTEX_H_ */
    129