Home | History | Annotate | Line # | Download | only in include
lock.h revision 1.29
      1  1.29    skrll /*	$NetBSD: lock.h,v 1.29 2007/09/10 11:34:09 skrll Exp $ */
      2   1.1       pk 
      3   1.1       pk /*-
      4  1.24       ad  * Copyright (c) 1998, 1999, 2006 The NetBSD Foundation, Inc.
      5   1.1       pk  * All rights reserved.
      6   1.1       pk  *
      7   1.1       pk  * This code is derived from software contributed to The NetBSD Foundation
      8  1.24       ad  * by Paul Kranenburg and Andrew Doran.
      9   1.1       pk  *
     10   1.1       pk  * Redistribution and use in source and binary forms, with or without
     11   1.1       pk  * modification, are permitted provided that the following conditions
     12   1.1       pk  * are met:
     13   1.1       pk  * 1. Redistributions of source code must retain the above copyright
     14   1.1       pk  *    notice, this list of conditions and the following disclaimer.
     15   1.1       pk  * 2. Redistributions in binary form must reproduce the above copyright
     16   1.1       pk  *    notice, this list of conditions and the following disclaimer in the
     17   1.1       pk  *    documentation and/or other materials provided with the distribution.
     18   1.1       pk  * 3. All advertising materials mentioning features or use of this software
     19   1.1       pk  *    must display the following acknowledgement:
     20   1.1       pk  *        This product includes software developed by the NetBSD
     21   1.1       pk  *        Foundation, Inc. and its contributors.
     22   1.1       pk  * 4. Neither the name of The NetBSD Foundation nor the names of its
     23   1.1       pk  *    contributors may be used to endorse or promote products derived
     24   1.1       pk  *    from this software without specific prior written permission.
     25   1.1       pk  *
     26   1.1       pk  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     27   1.1       pk  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     28   1.1       pk  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     29   1.1       pk  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     30   1.1       pk  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     31   1.1       pk  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     32   1.1       pk  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     33   1.1       pk  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     34   1.1       pk  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     35   1.1       pk  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     36   1.1       pk  * POSSIBILITY OF SUCH DAMAGE.
     37   1.1       pk  */
     38   1.1       pk 
     39   1.1       pk #ifndef _MACHINE_LOCK_H
     40   1.1       pk #define _MACHINE_LOCK_H
     41   1.1       pk 
     42   1.1       pk /*
     43   1.1       pk  * Machine dependent spin lock operations.
     44   1.1       pk  */
     45   1.5  thorpej 
     46  1.15       pk #if __SIMPLELOCK_UNLOCKED != 0
     47  1.15       pk #error __SIMPLELOCK_UNLOCKED must be 0 for this implementation
     48  1.15       pk #endif
     49  1.15       pk 
     50   1.7  thorpej /* XXX So we can expose this to userland. */
     51  1.10  hannken #ifdef __lint__
     52  1.10  hannken #define __ldstub(__addr)	(__addr)
     53  1.10  hannken #else /* !__lint__ */
     54  1.22    perry static __inline int __ldstub(__cpu_simple_lock_t *addr);
     55  1.22    perry static __inline int __ldstub(__cpu_simple_lock_t *addr)
     56  1.14      mrg {
     57  1.14      mrg 	int v;
     58  1.14      mrg 
     59  1.20    perry 	__asm volatile("ldstub [%1],%0"
     60  1.18      chs 	    : "=&r" (v)
     61  1.14      mrg 	    : "r" (addr)
     62  1.14      mrg 	    : "memory");
     63  1.14      mrg 
     64  1.14      mrg 	return v;
     65  1.14      mrg }
     66  1.10  hannken #endif /* __lint__ */
     67   1.7  thorpej 
     68  1.23      uwe static __inline void __cpu_simple_lock_init(__cpu_simple_lock_t *)
     69   1.7  thorpej 	__attribute__((__unused__));
     70  1.23      uwe static __inline int __cpu_simple_lock_try(__cpu_simple_lock_t *)
     71   1.7  thorpej 	__attribute__((__unused__));
     72  1.23      uwe static __inline void __cpu_simple_unlock(__cpu_simple_lock_t *)
     73   1.7  thorpej 	__attribute__((__unused__));
     74  1.17       pk #ifndef __CPU_SIMPLE_LOCK_NOINLINE
     75  1.23      uwe static __inline void __cpu_simple_lock(__cpu_simple_lock_t *)
     76  1.15       pk 	__attribute__((__unused__));
     77  1.15       pk #else
     78  1.23      uwe extern void __cpu_simple_lock(__cpu_simple_lock_t *);
     79  1.12       pk #endif
     80  1.12       pk 
     81  1.29    skrll static __inline int
     82  1.29    skrll __SIMPLELOCK_LOCKED_P(__cpu_simple_lock_t *__ptr)
     83  1.29    skrll {
     84  1.29    skrll 	return *__ptr == __SIMPLELOCK_LOCKED;
     85  1.29    skrll }
     86  1.29    skrll 
     87  1.29    skrll static __inline int
     88  1.29    skrll __SIMPLELOCK_UNLOCKED_P(__cpu_simple_lock_t *__ptr)
     89  1.29    skrll {
     90  1.29    skrll 	return *__ptr == __SIMPLELOCK_UNLOCKED;
     91  1.29    skrll }
     92  1.29    skrll 
     93  1.29    skrll static __inline void
     94  1.29    skrll __cpu_simple_lock_clear(__cpu_simple_lock_t *__ptr)
     95  1.29    skrll {
     96  1.29    skrll 	*__ptr = __SIMPLELOCK_UNLOCKED;
     97  1.29    skrll }
     98  1.29    skrll 
     99  1.29    skrll static __inline void
    100  1.29    skrll __cpu_simple_lock_set(__cpu_simple_lock_t *__ptr)
    101  1.29    skrll {
    102  1.29    skrll 	*__ptr = __SIMPLELOCK_LOCKED;
    103  1.29    skrll }
    104  1.29    skrll 
    105  1.22    perry static __inline void
    106   1.9  thorpej __cpu_simple_lock_init(__cpu_simple_lock_t *alp)
    107   1.1       pk {
    108   1.2       pk 
    109   1.7  thorpej 	*alp = __SIMPLELOCK_UNLOCKED;
    110   1.1       pk }
    111   1.1       pk 
    112  1.17       pk #ifndef __CPU_SIMPLE_LOCK_NOINLINE
    113  1.22    perry static __inline void
    114   1.9  thorpej __cpu_simple_lock(__cpu_simple_lock_t *alp)
    115   1.1       pk {
    116   1.2       pk 
    117   1.2       pk 	/*
    118   1.7  thorpej 	 * If someone else holds the lock use simple reads until it
    119   1.7  thorpej 	 * is released, then retry the atomic operation. This reduces
    120   1.7  thorpej 	 * memory bus contention because the cache-coherency logic
    121   1.7  thorpej 	 * does not have to broadcast invalidates on the lock while
    122   1.7  thorpej 	 * we spin on it.
    123   1.2       pk 	 */
    124   1.7  thorpej 	while (__ldstub(alp) != __SIMPLELOCK_UNLOCKED) {
    125   1.7  thorpej 		while (*alp != __SIMPLELOCK_UNLOCKED)
    126   1.7  thorpej 			/* spin */ ;
    127   1.2       pk 	}
    128   1.1       pk }
    129  1.17       pk #endif /* __CPU_SIMPLE_LOCK_NOINLINE */
    130   1.1       pk 
    131  1.22    perry static __inline int
    132   1.9  thorpej __cpu_simple_lock_try(__cpu_simple_lock_t *alp)
    133   1.1       pk {
    134   1.2       pk 
    135   1.7  thorpej 	return (__ldstub(alp) == __SIMPLELOCK_UNLOCKED);
    136   1.1       pk }
    137   1.1       pk 
    138  1.22    perry static __inline void
    139   1.9  thorpej __cpu_simple_unlock(__cpu_simple_lock_t *alp)
    140   1.1       pk {
    141   1.2       pk 
    142  1.12       pk 	/*
    143  1.13       pk 	 * Insert compiler barrier to prevent instruction re-ordering
    144  1.13       pk 	 * around the lock release.
    145  1.12       pk 	 */
    146  1.13       pk 	__insn_barrier();
    147  1.13       pk 	*alp = __SIMPLELOCK_UNLOCKED;
    148   1.1       pk }
    149   1.1       pk 
    150  1.24       ad #if defined(__sparc_v9__)
    151  1.25       ad static __inline void
    152  1.24       ad mb_read(void)
    153  1.24       ad {
    154  1.24       ad 	__asm __volatile("membar #LoadLoad" : : : "memory");
    155  1.24       ad }
    156  1.24       ad 
    157  1.25       ad static __inline void
    158  1.24       ad mb_write(void)
    159  1.24       ad {
    160  1.24       ad 	__asm __volatile("" : : : "memory");
    161  1.24       ad }
    162  1.24       ad 
    163  1.25       ad static __inline void
    164  1.24       ad mb_memory(void)
    165  1.24       ad {
    166  1.24       ad 	__asm __volatile("membar #MemIssue" : : : "memory");
    167  1.24       ad }
    168  1.24       ad #else	/* __sparc_v9__ */
    169  1.25       ad static __inline void
    170  1.24       ad mb_read(void)
    171  1.24       ad {
    172  1.24       ad 	static volatile int junk;
    173  1.28       ad 	__asm volatile("st %%g0,[%0]"
    174  1.28       ad 	    :
    175  1.28       ad 	    : "r" (&junk)
    176  1.28       ad 	    : "memory");
    177  1.24       ad }
    178  1.24       ad 
    179  1.25       ad static __inline void
    180  1.24       ad mb_write(void)
    181  1.24       ad {
    182  1.24       ad 	__insn_barrier();
    183  1.24       ad }
    184  1.24       ad 
    185  1.25       ad static __inline void
    186  1.24       ad mb_memory(void)
    187  1.24       ad {
    188  1.27       ad 	mb_read();
    189  1.24       ad }
    190  1.24       ad #endif	/* __sparc_v9__ */
    191  1.24       ad 
    192   1.1       pk #endif /* _MACHINE_LOCK_H */
    193