lock.h revision 1.15 1 1.15 skrll /* $NetBSD: lock.h,v 1.15 2007/11/13 11:37:06 skrll Exp $ */
2 1.1 fredette
3 1.1 fredette /*-
4 1.1 fredette * Copyright (c) 1998, 1999, 2000, 2001 The NetBSD Foundation, Inc.
5 1.1 fredette * All rights reserved.
6 1.1 fredette *
7 1.1 fredette * This code is derived from software contributed to The NetBSD Foundation
8 1.1 fredette * by Jason R. Thorpe of the Numerical Aerospace Simulation Facility,
9 1.1 fredette * NASA Ames Research Center, and Matthew Fredette.
10 1.1 fredette *
11 1.1 fredette * Redistribution and use in source and binary forms, with or without
12 1.1 fredette * modification, are permitted provided that the following conditions
13 1.1 fredette * are met:
14 1.1 fredette * 1. Redistributions of source code must retain the above copyright
15 1.1 fredette * notice, this list of conditions and the following disclaimer.
16 1.1 fredette * 2. Redistributions in binary form must reproduce the above copyright
17 1.1 fredette * notice, this list of conditions and the following disclaimer in the
18 1.1 fredette * documentation and/or other materials provided with the distribution.
19 1.1 fredette * 3. All advertising materials mentioning features or use of this software
20 1.1 fredette * must display the following acknowledgement:
21 1.1 fredette * This product includes software developed by the NetBSD
22 1.1 fredette * Foundation, Inc. and its contributors.
23 1.1 fredette * 4. Neither the name of The NetBSD Foundation nor the names of its
24 1.1 fredette * contributors may be used to endorse or promote products derived
25 1.1 fredette * from this software without specific prior written permission.
26 1.1 fredette *
27 1.1 fredette * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
28 1.1 fredette * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
29 1.1 fredette * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
30 1.1 fredette * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
31 1.1 fredette * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
32 1.1 fredette * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
33 1.1 fredette * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
34 1.1 fredette * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
35 1.1 fredette * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
36 1.1 fredette * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
37 1.1 fredette * POSSIBILITY OF SUCH DAMAGE.
38 1.1 fredette */
39 1.1 fredette
40 1.1 fredette /*
41 1.1 fredette * Machine-dependent spin lock operations.
42 1.1 fredette */
43 1.1 fredette
44 1.1 fredette #ifndef _HPPA_LOCK_H_
45 1.1 fredette #define _HPPA_LOCK_H_
46 1.2 martin
47 1.13 he #include <sys/stdint.h>
48 1.13 he
49 1.12 skrll #define HPPA_LDCW_ALIGN 16
50 1.12 skrll
51 1.12 skrll #define __SIMPLELOCK_ALIGN(p) \
52 1.12 skrll (volatile unsigned long *)(((uintptr_t)(p) + HPPA_LDCW_ALIGN - 1) & \
53 1.12 skrll ~(HPPA_LDCW_ALIGN - 1))
54 1.12 skrll
55 1.12 skrll #define __SIMPLELOCK_RAW_LOCKED 0
56 1.12 skrll #define __SIMPLELOCK_RAW_UNLOCKED 1
57 1.12 skrll
58 1.12 skrll static __inline int
59 1.12 skrll __SIMPLELOCK_LOCKED_P(__cpu_simple_lock_t *__ptr)
60 1.12 skrll {
61 1.12 skrll return *__SIMPLELOCK_ALIGN(__ptr) == __SIMPLELOCK_RAW_LOCKED;
62 1.12 skrll }
63 1.12 skrll
64 1.12 skrll static __inline int
65 1.12 skrll __SIMPLELOCK_UNLOCKED_P(__cpu_simple_lock_t *__ptr)
66 1.12 skrll {
67 1.12 skrll return *__SIMPLELOCK_ALIGN(__ptr) == __SIMPLELOCK_RAW_UNLOCKED;
68 1.12 skrll }
69 1.12 skrll
70 1.10 skrll static __inline int
71 1.12 skrll __ldcw(volatile unsigned long *__ptr)
72 1.10 skrll {
73 1.10 skrll int __val;
74 1.10 skrll
75 1.10 skrll __asm volatile("ldcw 0(%1), %0"
76 1.10 skrll : "=r" (__val) : "r" (__ptr)
77 1.10 skrll : "memory");
78 1.10 skrll
79 1.10 skrll return __val;
80 1.10 skrll }
81 1.10 skrll
82 1.10 skrll static __inline void
83 1.10 skrll __sync(void)
84 1.10 skrll {
85 1.10 skrll
86 1.10 skrll __asm volatile("sync\n"
87 1.10 skrll : /* no outputs */
88 1.10 skrll : /* no inputs */
89 1.10 skrll : "memory");
90 1.10 skrll }
91 1.10 skrll
92 1.9 perry static __inline void
93 1.1 fredette __cpu_simple_lock_init(__cpu_simple_lock_t *alp)
94 1.1 fredette {
95 1.15 skrll alp->csl_lock[0] = alp->csl_lock[1] =
96 1.15 skrll alp->csl_lock[2] = alp->csl_lock[3] =
97 1.15 skrll __SIMPLELOCK_RAW_UNLOCKED;
98 1.10 skrll __sync();
99 1.1 fredette }
100 1.1 fredette
101 1.9 perry static __inline void
102 1.1 fredette __cpu_simple_lock(__cpu_simple_lock_t *alp)
103 1.1 fredette {
104 1.12 skrll volatile unsigned long *__aptr = __SIMPLELOCK_ALIGN(alp);
105 1.1 fredette
106 1.1 fredette /*
107 1.1 fredette * Note, if we detect that the lock is held when
108 1.1 fredette * we do the initial load-clear-word, we spin using
109 1.1 fredette * a non-locked load to save the coherency logic
110 1.1 fredette * some work.
111 1.1 fredette */
112 1.1 fredette
113 1.12 skrll while (__ldcw(__aptr) == __SIMPLELOCK_RAW_LOCKED)
114 1.12 skrll while (*__aptr == __SIMPLELOCK_RAW_LOCKED)
115 1.10 skrll ;
116 1.1 fredette }
117 1.1 fredette
118 1.9 perry static __inline int
119 1.1 fredette __cpu_simple_lock_try(__cpu_simple_lock_t *alp)
120 1.1 fredette {
121 1.12 skrll volatile unsigned long *__aptr = __SIMPLELOCK_ALIGN(alp);
122 1.1 fredette
123 1.12 skrll return (__ldcw(__aptr) != __SIMPLELOCK_RAW_LOCKED);
124 1.1 fredette }
125 1.1 fredette
126 1.9 perry static __inline void
127 1.1 fredette __cpu_simple_unlock(__cpu_simple_lock_t *alp)
128 1.1 fredette {
129 1.12 skrll volatile unsigned long *__aptr = __SIMPLELOCK_ALIGN(alp);
130 1.12 skrll
131 1.10 skrll __sync();
132 1.12 skrll *__aptr = __SIMPLELOCK_RAW_UNLOCKED;
133 1.12 skrll }
134 1.12 skrll
135 1.12 skrll static __inline void
136 1.12 skrll __cpu_simple_lock_set(__cpu_simple_lock_t *alp)
137 1.12 skrll {
138 1.12 skrll volatile unsigned long *__aptr = __SIMPLELOCK_ALIGN(alp);
139 1.12 skrll
140 1.12 skrll *__aptr = __SIMPLELOCK_RAW_LOCKED;
141 1.12 skrll }
142 1.12 skrll
143 1.12 skrll static __inline void
144 1.12 skrll __cpu_simple_lock_clear(__cpu_simple_lock_t *alp)
145 1.12 skrll {
146 1.12 skrll volatile unsigned long *__aptr = __SIMPLELOCK_ALIGN(alp);
147 1.12 skrll
148 1.12 skrll *__aptr = __SIMPLELOCK_RAW_UNLOCKED;
149 1.1 fredette }
150 1.1 fredette
151 1.11 ad static __inline void
152 1.11 ad mb_read(void)
153 1.11 ad {
154 1.11 ad __sync();
155 1.11 ad }
156 1.11 ad
157 1.11 ad static __inline void
158 1.11 ad mb_write(void)
159 1.11 ad {
160 1.11 ad __sync();
161 1.11 ad }
162 1.11 ad
163 1.11 ad static __inline void
164 1.11 ad mb_memory(void)
165 1.11 ad {
166 1.11 ad __sync();
167 1.11 ad }
168 1.11 ad
169 1.1 fredette #endif /* _HPPA_LOCK_H_ */
170