Home | History | Annotate | Line # | Download | only in include
mutex.h revision 1.4.4.1
      1  1.4.4.1  martin /*	$NetBSD: mutex.h,v 1.4.4.1 2023/08/09 17:42:03 martin Exp $	*/
      2      1.1    matt 
      3      1.1    matt /*-
      4      1.1    matt  * Copyright (c) 2002, 2007 The NetBSD Foundation, Inc.
      5      1.1    matt  * All rights reserved.
      6      1.1    matt  *
      7      1.1    matt  * This code is derived from software contributed to The NetBSD Foundation
      8      1.1    matt  * by Jason R. Thorpe and Andrew Doran.
      9      1.1    matt  *
     10      1.1    matt  * Redistribution and use in source and binary forms, with or without
     11      1.1    matt  * modification, are permitted provided that the following conditions
     12      1.1    matt  * are met:
     13      1.1    matt  * 1. Redistributions of source code must retain the above copyright
     14      1.1    matt  *    notice, this list of conditions and the following disclaimer.
     15      1.1    matt  * 2. Redistributions in binary form must reproduce the above copyright
     16      1.1    matt  *    notice, this list of conditions and the following disclaimer in the
     17      1.1    matt  *    documentation and/or other materials provided with the distribution.
     18      1.1    matt  *
     19      1.1    matt  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     20      1.1    matt  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     21      1.1    matt  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     22      1.1    matt  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     23      1.1    matt  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     24      1.1    matt  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     25      1.1    matt  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     26      1.1    matt  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     27      1.1    matt  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     28      1.1    matt  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     29      1.1    matt  * POSSIBILITY OF SUCH DAMAGE.
     30      1.1    matt  */
     31      1.1    matt 
     32      1.1    matt #ifndef _RISCV_MUTEX_H_
     33      1.1    matt #define	_RISCV_MUTEX_H_
     34      1.1    matt 
     35  1.4.4.1  martin #include <sys/types.h>
     36  1.4.4.1  martin 
     37      1.1    matt #ifndef __MUTEX_PRIVATE
     38      1.1    matt 
     39      1.1    matt struct kmutex {
     40      1.1    matt 	uintptr_t	mtx_pad1;
     41      1.1    matt };
     42      1.1    matt 
     43      1.1    matt #else	/* __MUTEX_PRIVATE */
     44      1.1    matt 
     45  1.4.4.1  martin #include <sys/cdefs.h>
     46  1.4.4.1  martin 
     47      1.1    matt #include <sys/param.h>
     48      1.1    matt 
     49  1.4.4.1  martin #include <machine/intr.h>
     50  1.4.4.1  martin 
     51      1.1    matt struct kmutex {
     52      1.1    matt 	volatile uintptr_t	mtx_owner;
     53      1.1    matt };
     54      1.1    matt 
     55      1.1    matt #ifdef _LP64
     56      1.1    matt #define MTX_ASMOP_SFX ".d"		// doubleword atomic op
     57      1.1    matt #else
     58      1.1    matt #define MTX_ASMOP_SFX ".w"		// word atomic op
     59      1.1    matt #endif
     60      1.1    matt 
     61      1.1    matt #define	MTX_LOCK			__BIT(8)	// just one bit
     62      1.1    matt #define	MTX_IPL				__BITS(7,4)	// only need 4 bits
     63      1.1    matt 
     64      1.1    matt #undef MUTEX_SPIN_IPL			// override <sys/mutex.h>
     65      1.1    matt #define	MUTEX_SPIN_IPL(a)		riscv_mutex_spin_ipl(a)
     66      1.1    matt #define	MUTEX_INITIALIZE_SPIN_IPL(a,b)	riscv_mutex_initialize_spin_ipl(a,b)
     67      1.1    matt #define MUTEX_SPINBIT_LOCK_INIT(a)	riscv_mutex_spinbit_lock_init(a)
     68      1.1    matt #define MUTEX_SPINBIT_LOCK_TRY(a)	riscv_mutex_spinbit_lock_try(a)
     69      1.1    matt #define MUTEX_SPINBIT_LOCKED_P(a)	riscv_mutex_spinbit_locked_p(a)
     70      1.1    matt #define MUTEX_SPINBIT_LOCK_UNLOCK(a)	riscv_mutex_spinbit_lock_unlock(a)
     71      1.1    matt 
     72      1.1    matt static inline ipl_cookie_t
     73      1.1    matt riscv_mutex_spin_ipl(kmutex_t *__mtx)
     74      1.1    matt {
     75      1.1    matt 	return (ipl_cookie_t){._spl = __SHIFTOUT(__mtx->mtx_owner, MTX_IPL)};
     76      1.1    matt }
     77      1.1    matt 
     78      1.1    matt static inline void
     79      1.1    matt riscv_mutex_initialize_spin_ipl(kmutex_t *__mtx, int ipl)
     80      1.1    matt {
     81      1.1    matt 	__mtx->mtx_owner = (__mtx->mtx_owner & ~MTX_IPL)
     82      1.1    matt 	    | __SHIFTIN(ipl, MTX_IPL);
     83      1.1    matt }
     84      1.1    matt 
     85      1.1    matt static inline void
     86      1.1    matt riscv_mutex_spinbit_lock_init(kmutex_t *__mtx)
     87      1.1    matt {
     88      1.1    matt 	__mtx->mtx_owner &= ~MTX_LOCK;
     89      1.1    matt }
     90      1.1    matt 
     91      1.1    matt static inline bool
     92      1.2    maxv riscv_mutex_spinbit_locked_p(const kmutex_t *__mtx)
     93      1.1    matt {
     94      1.1    matt 	return (__mtx->mtx_owner & MTX_LOCK) != 0;
     95      1.1    matt }
     96      1.1    matt 
     97      1.1    matt static inline bool
     98      1.1    matt riscv_mutex_spinbit_lock_try(kmutex_t *__mtx)
     99      1.1    matt {
    100      1.1    matt 	uintptr_t __old;
    101      1.1    matt 	__asm __volatile(
    102      1.1    matt 		"amoor" MTX_ASMOP_SFX ".aq\t%0, %1, (%2)"
    103      1.1    matt 	   :	"=r"(__old)
    104      1.1    matt 	   :	"r"(MTX_LOCK), "r"(__mtx));
    105      1.1    matt 	return (__old & MTX_LOCK) == 0;
    106      1.1    matt }
    107      1.1    matt 
    108      1.1    matt static inline void
    109      1.1    matt riscv_mutex_spinbit_lock_unlock(kmutex_t *__mtx)
    110      1.1    matt {
    111      1.1    matt 	__asm __volatile(
    112      1.1    matt 		"amoand" MTX_ASMOP_SFX ".rl\tx0, %0, (%1)"
    113      1.1    matt 	   ::	"r"(~MTX_LOCK), "r"(__mtx));
    114      1.1    matt }
    115      1.1    matt 
    116      1.1    matt #if 0
    117      1.1    matt #define	__HAVE_MUTEX_STUBS		1
    118      1.1    matt #define	__HAVE_SPIN_MUTEX_STUBS		1
    119      1.1    matt #endif
    120      1.1    matt #define	__HAVE_SIMPLE_MUTEXES		1
    121      1.1    matt 
    122      1.1    matt #endif	/* __MUTEX_PRIVATE */
    123      1.1    matt 
    124      1.1    matt #endif /* _RISCV_MUTEX_H_ */
    125