Home | History | Annotate | Line # | Download | only in include
lock.h revision 1.23
      1  1.23  riastrad /* 	$NetBSD: lock.h,v 1.23 2022/02/12 17:17:53 riastradh Exp $	*/
      2   1.1  fredette 
      3   1.1  fredette /*-
      4   1.1  fredette  * Copyright (c) 1998, 1999, 2000, 2001 The NetBSD Foundation, Inc.
      5   1.1  fredette  * All rights reserved.
      6   1.1  fredette  *
      7   1.1  fredette  * This code is derived from software contributed to The NetBSD Foundation
      8   1.1  fredette  * by Jason R. Thorpe of the Numerical Aerospace Simulation Facility,
      9   1.1  fredette  * NASA Ames Research Center, and Matthew Fredette.
     10   1.1  fredette  *
     11   1.1  fredette  * Redistribution and use in source and binary forms, with or without
     12   1.1  fredette  * modification, are permitted provided that the following conditions
     13   1.1  fredette  * are met:
     14   1.1  fredette  * 1. Redistributions of source code must retain the above copyright
     15   1.1  fredette  *    notice, this list of conditions and the following disclaimer.
     16   1.1  fredette  * 2. Redistributions in binary form must reproduce the above copyright
     17   1.1  fredette  *    notice, this list of conditions and the following disclaimer in the
     18   1.1  fredette  *    documentation and/or other materials provided with the distribution.
     19   1.1  fredette  *
     20   1.1  fredette  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     21   1.1  fredette  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     22   1.1  fredette  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     23   1.1  fredette  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     24   1.1  fredette  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     25   1.1  fredette  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     26   1.1  fredette  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     27   1.1  fredette  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     28   1.1  fredette  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     29   1.1  fredette  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     30   1.1  fredette  * POSSIBILITY OF SUCH DAMAGE.
     31   1.1  fredette  */
     32   1.1  fredette 
     33   1.1  fredette /*
     34   1.1  fredette  * Machine-dependent spin lock operations.
     35   1.1  fredette  */
     36   1.1  fredette 
     37   1.1  fredette #ifndef _HPPA_LOCK_H_
     38   1.1  fredette #define	_HPPA_LOCK_H_
     39   1.2    martin 
     40  1.13        he #include <sys/stdint.h>
     41  1.13        he 
     42  1.18  christos #define HPPA_LDCW_ALIGN	16UL
     43  1.12     skrll 
     44  1.12     skrll #define __SIMPLELOCK_ALIGN(p) \
     45  1.19  christos     (volatile unsigned long *)((((uintptr_t)(p) + HPPA_LDCW_ALIGN - 1)) & \
     46  1.12     skrll     ~(HPPA_LDCW_ALIGN - 1))
     47  1.12     skrll 
     48  1.17  christos #define __SIMPLELOCK_RAW_LOCKED		0UL
     49  1.17  christos #define __SIMPLELOCK_RAW_UNLOCKED	1UL
     50  1.12     skrll 
     51  1.12     skrll static __inline int
     52  1.20  christos __SIMPLELOCK_LOCKED_P(const __cpu_simple_lock_t *__ptr)
     53  1.12     skrll {
     54  1.12     skrll 	return *__SIMPLELOCK_ALIGN(__ptr) == __SIMPLELOCK_RAW_LOCKED;
     55  1.12     skrll }
     56  1.12     skrll 
     57  1.12     skrll static __inline int
     58  1.20  christos __SIMPLELOCK_UNLOCKED_P(const __cpu_simple_lock_t *__ptr)
     59  1.12     skrll {
     60  1.12     skrll 	return *__SIMPLELOCK_ALIGN(__ptr) == __SIMPLELOCK_RAW_UNLOCKED;
     61  1.12     skrll }
     62  1.12     skrll 
     63  1.10     skrll static __inline int
     64  1.12     skrll __ldcw(volatile unsigned long *__ptr)
     65  1.10     skrll {
     66  1.10     skrll 	int __val;
     67  1.10     skrll 
     68  1.10     skrll 	__asm volatile("ldcw 0(%1), %0"
     69  1.10     skrll 	    : "=r" (__val) : "r" (__ptr)
     70  1.10     skrll 	    : "memory");
     71  1.10     skrll 
     72  1.10     skrll 	return __val;
     73  1.10     skrll }
     74  1.10     skrll 
     75  1.10     skrll static __inline void
     76  1.10     skrll __sync(void)
     77  1.10     skrll {
     78  1.10     skrll 
     79  1.10     skrll 	__asm volatile("sync\n"
     80  1.10     skrll 		: /* no outputs */
     81  1.10     skrll 		: /* no inputs */
     82  1.10     skrll 		: "memory");
     83  1.10     skrll }
     84  1.10     skrll 
     85   1.9     perry static __inline void
     86   1.1  fredette __cpu_simple_lock_init(__cpu_simple_lock_t *alp)
     87   1.1  fredette {
     88  1.23  riastrad 
     89  1.21     skrll 	alp->csl_lock[0] = alp->csl_lock[1] =
     90  1.15     skrll 	alp->csl_lock[2] = alp->csl_lock[3] =
     91  1.15     skrll 	    __SIMPLELOCK_RAW_UNLOCKED;
     92   1.1  fredette }
     93   1.1  fredette 
     94   1.9     perry static __inline void
     95   1.1  fredette __cpu_simple_lock(__cpu_simple_lock_t *alp)
     96   1.1  fredette {
     97  1.12     skrll 	volatile unsigned long *__aptr = __SIMPLELOCK_ALIGN(alp);
     98   1.1  fredette 
     99   1.1  fredette 	/*
    100   1.1  fredette 	 * Note, if we detect that the lock is held when
    101   1.1  fredette 	 * we do the initial load-clear-word, we spin using
    102   1.1  fredette 	 * a non-locked load to save the coherency logic
    103   1.1  fredette 	 * some work.
    104   1.1  fredette 	 */
    105   1.1  fredette 
    106  1.12     skrll 	while (__ldcw(__aptr) == __SIMPLELOCK_RAW_LOCKED)
    107  1.12     skrll 		while (*__aptr == __SIMPLELOCK_RAW_LOCKED)
    108  1.10     skrll 			;
    109   1.1  fredette }
    110   1.1  fredette 
    111   1.9     perry static __inline int
    112   1.1  fredette __cpu_simple_lock_try(__cpu_simple_lock_t *alp)
    113   1.1  fredette {
    114  1.12     skrll 	volatile unsigned long *__aptr = __SIMPLELOCK_ALIGN(alp);
    115   1.1  fredette 
    116  1.12     skrll 	return (__ldcw(__aptr) != __SIMPLELOCK_RAW_LOCKED);
    117   1.1  fredette }
    118   1.1  fredette 
    119   1.9     perry static __inline void
    120   1.1  fredette __cpu_simple_unlock(__cpu_simple_lock_t *alp)
    121   1.1  fredette {
    122  1.12     skrll 	volatile unsigned long *__aptr = __SIMPLELOCK_ALIGN(alp);
    123  1.12     skrll 
    124  1.10     skrll 	__sync();
    125  1.12     skrll 	*__aptr = __SIMPLELOCK_RAW_UNLOCKED;
    126  1.12     skrll }
    127  1.12     skrll 
    128  1.12     skrll static __inline void
    129  1.12     skrll __cpu_simple_lock_set(__cpu_simple_lock_t *alp)
    130  1.12     skrll {
    131  1.12     skrll 	volatile unsigned long *__aptr = __SIMPLELOCK_ALIGN(alp);
    132  1.12     skrll 
    133  1.12     skrll 	*__aptr = __SIMPLELOCK_RAW_LOCKED;
    134  1.12     skrll }
    135  1.12     skrll 
    136  1.12     skrll static __inline void
    137  1.12     skrll __cpu_simple_lock_clear(__cpu_simple_lock_t *alp)
    138  1.12     skrll {
    139  1.12     skrll 	volatile unsigned long *__aptr = __SIMPLELOCK_ALIGN(alp);
    140  1.12     skrll 
    141  1.12     skrll 	*__aptr = __SIMPLELOCK_RAW_UNLOCKED;
    142   1.1  fredette }
    143   1.1  fredette 
    144   1.1  fredette #endif /* _HPPA_LOCK_H_ */
    145