lock.h revision 1.12 1 1.12 skrll /* $NetBSD: lock.h,v 1.12 2007/09/10 11:34:08 skrll Exp $ */
2 1.1 fredette
3 1.1 fredette /*-
4 1.1 fredette * Copyright (c) 1998, 1999, 2000, 2001 The NetBSD Foundation, Inc.
5 1.1 fredette * All rights reserved.
6 1.1 fredette *
7 1.1 fredette * This code is derived from software contributed to The NetBSD Foundation
8 1.1 fredette * by Jason R. Thorpe of the Numerical Aerospace Simulation Facility,
9 1.1 fredette * NASA Ames Research Center, and Matthew Fredette.
10 1.1 fredette *
11 1.1 fredette * Redistribution and use in source and binary forms, with or without
12 1.1 fredette * modification, are permitted provided that the following conditions
13 1.1 fredette * are met:
14 1.1 fredette * 1. Redistributions of source code must retain the above copyright
15 1.1 fredette * notice, this list of conditions and the following disclaimer.
16 1.1 fredette * 2. Redistributions in binary form must reproduce the above copyright
17 1.1 fredette * notice, this list of conditions and the following disclaimer in the
18 1.1 fredette * documentation and/or other materials provided with the distribution.
19 1.1 fredette * 3. All advertising materials mentioning features or use of this software
20 1.1 fredette * must display the following acknowledgement:
21 1.1 fredette * This product includes software developed by the NetBSD
22 1.1 fredette * Foundation, Inc. and its contributors.
23 1.1 fredette * 4. Neither the name of The NetBSD Foundation nor the names of its
24 1.1 fredette * contributors may be used to endorse or promote products derived
25 1.1 fredette * from this software without specific prior written permission.
26 1.1 fredette *
27 1.1 fredette * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
28 1.1 fredette * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
29 1.1 fredette * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
30 1.1 fredette * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
31 1.1 fredette * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
32 1.1 fredette * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
33 1.1 fredette * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
34 1.1 fredette * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
35 1.1 fredette * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
36 1.1 fredette * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
37 1.1 fredette * POSSIBILITY OF SUCH DAMAGE.
38 1.1 fredette */
39 1.1 fredette
40 1.1 fredette /*
41 1.1 fredette * Machine-dependent spin lock operations.
42 1.1 fredette */
43 1.1 fredette
44 1.1 fredette #ifndef _HPPA_LOCK_H_
45 1.1 fredette #define _HPPA_LOCK_H_
46 1.2 martin
47 1.12 skrll #define HPPA_LDCW_ALIGN 16
48 1.12 skrll
49 1.12 skrll #define __SIMPLELOCK_ALIGN(p) \
50 1.12 skrll (volatile unsigned long *)(((uintptr_t)(p) + HPPA_LDCW_ALIGN - 1) & \
51 1.12 skrll ~(HPPA_LDCW_ALIGN - 1))
52 1.12 skrll
53 1.12 skrll #define __SIMPLELOCK_RAW_LOCKED 0
54 1.12 skrll #define __SIMPLELOCK_RAW_UNLOCKED 1
55 1.12 skrll
56 1.12 skrll static __inline int
57 1.12 skrll __SIMPLELOCK_LOCKED_P(__cpu_simple_lock_t *__ptr)
58 1.12 skrll {
59 1.12 skrll return *__SIMPLELOCK_ALIGN(__ptr) == __SIMPLELOCK_RAW_LOCKED;
60 1.12 skrll }
61 1.12 skrll
62 1.12 skrll static __inline int
63 1.12 skrll __SIMPLELOCK_UNLOCKED_P(__cpu_simple_lock_t *__ptr)
64 1.12 skrll {
65 1.12 skrll return *__SIMPLELOCK_ALIGN(__ptr) == __SIMPLELOCK_RAW_UNLOCKED;
66 1.12 skrll }
67 1.12 skrll
68 1.10 skrll static __inline int
69 1.12 skrll __ldcw(volatile unsigned long *__ptr)
70 1.10 skrll {
71 1.10 skrll int __val;
72 1.10 skrll
73 1.10 skrll __asm volatile("ldcw 0(%1), %0"
74 1.10 skrll : "=r" (__val) : "r" (__ptr)
75 1.10 skrll : "memory");
76 1.10 skrll
77 1.10 skrll return __val;
78 1.10 skrll }
79 1.10 skrll
80 1.10 skrll static __inline void
81 1.10 skrll __sync(void)
82 1.10 skrll {
83 1.10 skrll
84 1.10 skrll __asm volatile("sync\n"
85 1.10 skrll : /* no outputs */
86 1.10 skrll : /* no inputs */
87 1.10 skrll : "memory");
88 1.10 skrll }
89 1.10 skrll
90 1.9 perry static __inline void
91 1.1 fredette __cpu_simple_lock_init(__cpu_simple_lock_t *alp)
92 1.1 fredette {
93 1.12 skrll __cpu_simple_lock_t ul = __SIMPLELOCK_UNLOCKED;
94 1.10 skrll
95 1.12 skrll *alp = ul;
96 1.10 skrll __sync();
97 1.1 fredette }
98 1.1 fredette
99 1.9 perry static __inline void
100 1.1 fredette __cpu_simple_lock(__cpu_simple_lock_t *alp)
101 1.1 fredette {
102 1.12 skrll volatile unsigned long *__aptr = __SIMPLELOCK_ALIGN(alp);
103 1.1 fredette
104 1.1 fredette /*
105 1.1 fredette * Note, if we detect that the lock is held when
106 1.1 fredette * we do the initial load-clear-word, we spin using
107 1.1 fredette * a non-locked load to save the coherency logic
108 1.1 fredette * some work.
109 1.1 fredette */
110 1.1 fredette
111 1.12 skrll while (__ldcw(__aptr) == __SIMPLELOCK_RAW_LOCKED)
112 1.12 skrll while (*__aptr == __SIMPLELOCK_RAW_LOCKED)
113 1.10 skrll ;
114 1.1 fredette }
115 1.1 fredette
116 1.9 perry static __inline int
117 1.1 fredette __cpu_simple_lock_try(__cpu_simple_lock_t *alp)
118 1.1 fredette {
119 1.12 skrll volatile unsigned long *__aptr = __SIMPLELOCK_ALIGN(alp);
120 1.1 fredette
121 1.12 skrll return (__ldcw(__aptr) != __SIMPLELOCK_RAW_LOCKED);
122 1.1 fredette }
123 1.1 fredette
124 1.9 perry static __inline void
125 1.1 fredette __cpu_simple_unlock(__cpu_simple_lock_t *alp)
126 1.1 fredette {
127 1.12 skrll volatile unsigned long *__aptr = __SIMPLELOCK_ALIGN(alp);
128 1.12 skrll
129 1.10 skrll __sync();
130 1.12 skrll *__aptr = __SIMPLELOCK_RAW_UNLOCKED;
131 1.12 skrll }
132 1.12 skrll
133 1.12 skrll static __inline void
134 1.12 skrll __cpu_simple_lock_set(__cpu_simple_lock_t *alp)
135 1.12 skrll {
136 1.12 skrll volatile unsigned long *__aptr = __SIMPLELOCK_ALIGN(alp);
137 1.12 skrll
138 1.12 skrll *__aptr = __SIMPLELOCK_RAW_LOCKED;
139 1.12 skrll }
140 1.12 skrll
141 1.12 skrll static __inline void
142 1.12 skrll __cpu_simple_lock_clear(__cpu_simple_lock_t *alp)
143 1.12 skrll {
144 1.12 skrll volatile unsigned long *__aptr = __SIMPLELOCK_ALIGN(alp);
145 1.12 skrll
146 1.12 skrll *__aptr = __SIMPLELOCK_RAW_UNLOCKED;
147 1.1 fredette }
148 1.1 fredette
149 1.11 ad static __inline void
150 1.11 ad mb_read(void)
151 1.11 ad {
152 1.11 ad __sync();
153 1.11 ad }
154 1.11 ad
155 1.11 ad static __inline void
156 1.11 ad mb_write(void)
157 1.11 ad {
158 1.11 ad __sync();
159 1.11 ad }
160 1.11 ad
161 1.11 ad static __inline void
162 1.11 ad mb_memory(void)
163 1.11 ad {
164 1.11 ad __sync();
165 1.11 ad }
166 1.11 ad
167 1.1 fredette #endif /* _HPPA_LOCK_H_ */
168