cpufunc.h revision 1.28.16.2 1 1.28.16.2 yamt /* $NetBSD: cpufunc.h,v 1.28.16.2 2006/12/30 20:46:11 yamt Exp $ */
2 1.8 cgd
3 1.16 mycroft /*-
4 1.16 mycroft * Copyright (c) 1998 The NetBSD Foundation, Inc.
5 1.4 mycroft * All rights reserved.
6 1.2 mycroft *
7 1.16 mycroft * This code is derived from software contributed to The NetBSD Foundation
8 1.16 mycroft * by Charles M. Hannum.
9 1.16 mycroft *
10 1.4 mycroft * Redistribution and use in source and binary forms, with or without
11 1.4 mycroft * modification, are permitted provided that the following conditions
12 1.4 mycroft * are met:
13 1.4 mycroft * 1. Redistributions of source code must retain the above copyright
14 1.4 mycroft * notice, this list of conditions and the following disclaimer.
15 1.4 mycroft * 2. Redistributions in binary form must reproduce the above copyright
16 1.4 mycroft * notice, this list of conditions and the following disclaimer in the
17 1.4 mycroft * documentation and/or other materials provided with the distribution.
18 1.4 mycroft * 3. All advertising materials mentioning features or use of this software
19 1.4 mycroft * must display the following acknowledgement:
20 1.16 mycroft * This product includes software developed by the NetBSD
21 1.16 mycroft * Foundation, Inc. and its contributors.
22 1.16 mycroft * 4. Neither the name of The NetBSD Foundation nor the names of its
23 1.16 mycroft * contributors may be used to endorse or promote products derived
24 1.16 mycroft * from this software without specific prior written permission.
25 1.4 mycroft *
26 1.16 mycroft * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
27 1.16 mycroft * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
28 1.16 mycroft * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
29 1.16 mycroft * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
30 1.16 mycroft * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
31 1.16 mycroft * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
32 1.16 mycroft * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
33 1.16 mycroft * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
34 1.16 mycroft * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
35 1.16 mycroft * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
36 1.16 mycroft * POSSIBILITY OF SUCH DAMAGE.
37 1.4 mycroft */
38 1.4 mycroft
39 1.7 mycroft #ifndef _I386_CPUFUNC_H_
40 1.7 mycroft #define _I386_CPUFUNC_H_
41 1.7 mycroft
42 1.4 mycroft /*
43 1.4 mycroft * Functions to provide access to i386-specific instructions.
44 1.1 cgd */
45 1.1 cgd
46 1.1 cgd #include <sys/cdefs.h>
47 1.1 cgd #include <sys/types.h>
48 1.13 perry
49 1.28.16.2 yamt #include <machine/segments.h>
50 1.22 fvdl #include <machine/specialreg.h>
51 1.22 fvdl
52 1.25 fvdl static __inline void
53 1.25 fvdl x86_pause(void)
54 1.25 fvdl {
55 1.28.16.1 yamt __asm volatile("pause");
56 1.25 fvdl }
57 1.25 fvdl
58 1.28.16.2 yamt /*
59 1.28.16.2 yamt * XXX it's better to use real lfence insn if available.
60 1.28.16.2 yamt *
61 1.28.16.2 yamt * memory clobber to avoid compiler reordering.
62 1.28.16.2 yamt */
63 1.28 yamt static __inline void
64 1.28 yamt x86_lfence(void)
65 1.28 yamt {
66 1.28 yamt
67 1.28.16.2 yamt __asm volatile("lock; addl $0, 0(%%esp)" : : : "memory");
68 1.28.16.2 yamt }
69 1.28.16.2 yamt
70 1.28.16.2 yamt static __inline void
71 1.28.16.2 yamt x86_sfence(void)
72 1.28.16.2 yamt {
73 1.28.16.2 yamt
74 1.28.16.2 yamt __asm volatile("lock; addl $0, 0(%%esp)" : : : "memory");
75 1.28.16.2 yamt }
76 1.28.16.2 yamt
77 1.28.16.2 yamt static __inline void
78 1.28.16.2 yamt x86_mfence(void)
79 1.28.16.2 yamt {
80 1.28.16.2 yamt
81 1.28.16.1 yamt __asm volatile("lock; addl $0, 0(%%esp)" : : : "memory");
82 1.28 yamt }
83 1.28 yamt
84 1.14 perry #ifdef _KERNEL
85 1.1 cgd
86 1.26 drochner extern unsigned int cpu_feature;
87 1.24 fvdl
88 1.10 chuck static __inline void
89 1.10 chuck invlpg(u_int addr)
90 1.10 chuck {
91 1.28.16.1 yamt __asm volatile("invlpg (%0)" : : "r" (addr) : "memory");
92 1.10 chuck }
93 1.10 chuck
94 1.1 cgd static __inline void
95 1.28.16.2 yamt lidt(struct region_descriptor *region)
96 1.4 mycroft {
97 1.28.16.2 yamt __asm volatile("lidt %0" : : "m" (*region));
98 1.4 mycroft }
99 1.4 mycroft
100 1.4 mycroft static __inline void
101 1.4 mycroft lldt(u_short sel)
102 1.4 mycroft {
103 1.28.16.1 yamt __asm volatile("lldt %0" : : "r" (sel));
104 1.4 mycroft }
105 1.4 mycroft
106 1.4 mycroft static __inline void
107 1.4 mycroft ltr(u_short sel)
108 1.4 mycroft {
109 1.28.16.1 yamt __asm volatile("ltr %0" : : "r" (sel));
110 1.4 mycroft }
111 1.4 mycroft
112 1.4 mycroft static __inline void
113 1.4 mycroft lcr0(u_int val)
114 1.4 mycroft {
115 1.28.16.1 yamt __asm volatile("movl %0,%%cr0" : : "r" (val));
116 1.4 mycroft }
117 1.4 mycroft
118 1.4 mycroft static __inline u_int
119 1.4 mycroft rcr0(void)
120 1.4 mycroft {
121 1.4 mycroft u_int val;
122 1.28.16.1 yamt __asm volatile("movl %%cr0,%0" : "=r" (val));
123 1.4 mycroft return val;
124 1.4 mycroft }
125 1.4 mycroft
126 1.4 mycroft static __inline u_int
127 1.4 mycroft rcr2(void)
128 1.4 mycroft {
129 1.4 mycroft u_int val;
130 1.28.16.1 yamt __asm volatile("movl %%cr2,%0" : "=r" (val));
131 1.4 mycroft return val;
132 1.4 mycroft }
133 1.4 mycroft
134 1.4 mycroft static __inline void
135 1.4 mycroft lcr3(u_int val)
136 1.4 mycroft {
137 1.28.16.1 yamt __asm volatile("movl %0,%%cr3" : : "r" (val));
138 1.4 mycroft }
139 1.4 mycroft
140 1.4 mycroft static __inline u_int
141 1.4 mycroft rcr3(void)
142 1.4 mycroft {
143 1.4 mycroft u_int val;
144 1.28.16.1 yamt __asm volatile("movl %%cr3,%0" : "=r" (val));
145 1.10 chuck return val;
146 1.10 chuck }
147 1.10 chuck
148 1.10 chuck static __inline void
149 1.10 chuck lcr4(u_int val)
150 1.10 chuck {
151 1.28.16.1 yamt __asm volatile("movl %0,%%cr4" : : "r" (val));
152 1.10 chuck }
153 1.10 chuck
154 1.10 chuck static __inline u_int
155 1.10 chuck rcr4(void)
156 1.10 chuck {
157 1.10 chuck u_int val;
158 1.28.16.1 yamt __asm volatile("movl %%cr4,%0" : "=r" (val));
159 1.4 mycroft return val;
160 1.6 mycroft }
161 1.6 mycroft
162 1.6 mycroft static __inline void
163 1.6 mycroft tlbflush(void)
164 1.6 mycroft {
165 1.6 mycroft u_int val;
166 1.22 fvdl val = rcr3();
167 1.22 fvdl lcr3(val);
168 1.22 fvdl }
169 1.22 fvdl
170 1.22 fvdl static __inline void
171 1.22 fvdl tlbflushg(void)
172 1.22 fvdl {
173 1.22 fvdl /*
174 1.22 fvdl * Big hammer: flush all TLB entries, including ones from PTE's
175 1.22 fvdl * with the G bit set. This should only be necessary if TLB
176 1.22 fvdl * shootdown falls far behind.
177 1.22 fvdl *
178 1.22 fvdl * Intel Architecture Software Developer's Manual, Volume 3,
179 1.22 fvdl * System Programming, section 9.10, "Invalidating the
180 1.22 fvdl * Translation Lookaside Buffers (TLBS)":
181 1.22 fvdl * "The following operations invalidate all TLB entries, irrespective
182 1.22 fvdl * of the setting of the G flag:
183 1.22 fvdl * ...
184 1.22 fvdl * "(P6 family processors only): Writing to control register CR4 to
185 1.22 fvdl * modify the PSE, PGE, or PAE flag."
186 1.22 fvdl *
187 1.22 fvdl * (the alternatives not quoted above are not an option here.)
188 1.22 fvdl *
189 1.22 fvdl * If PGE is not in use, we reload CR3 for the benefit of
190 1.22 fvdl * pre-P6-family processors.
191 1.22 fvdl */
192 1.22 fvdl
193 1.22 fvdl #if defined(I686_CPU)
194 1.22 fvdl if (cpu_feature & CPUID_PGE) {
195 1.22 fvdl u_int cr4 = rcr4();
196 1.22 fvdl lcr4(cr4 & ~CR4_PGE);
197 1.22 fvdl lcr4(cr4);
198 1.22 fvdl } else
199 1.22 fvdl #endif
200 1.22 fvdl tlbflush();
201 1.4 mycroft }
202 1.22 fvdl
203 1.4 mycroft
204 1.4 mycroft #ifdef notyet
205 1.27 junyoung void setidt(int idx, /*XXX*/caddr_t func, int typ, int dpl);
206 1.4 mycroft #endif
207 1.4 mycroft
208 1.21 yamt /* debug register */
209 1.28.16.1 yamt void dr0(caddr_t, uint32_t, uint32_t, uint32_t);
210 1.21 yamt
211 1.21 yamt static __inline u_int
212 1.21 yamt rdr6(void)
213 1.21 yamt {
214 1.21 yamt u_int val;
215 1.21 yamt
216 1.28.16.1 yamt __asm volatile("movl %%dr6,%0" : "=r" (val));
217 1.21 yamt return val;
218 1.21 yamt }
219 1.21 yamt
220 1.21 yamt static __inline void
221 1.21 yamt ldr6(u_int val)
222 1.21 yamt {
223 1.21 yamt
224 1.28.16.1 yamt __asm volatile("movl %0,%%dr6" : : "r" (val));
225 1.21 yamt }
226 1.4 mycroft
227 1.4 mycroft /* XXXX ought to be in psl.h with spl() functions */
228 1.4 mycroft
229 1.4 mycroft static __inline void
230 1.1 cgd disable_intr(void)
231 1.1 cgd {
232 1.28.16.1 yamt __asm volatile("cli");
233 1.1 cgd }
234 1.1 cgd
235 1.1 cgd static __inline void
236 1.1 cgd enable_intr(void)
237 1.1 cgd {
238 1.28.16.1 yamt __asm volatile("sti");
239 1.12 drochner }
240 1.12 drochner
241 1.12 drochner static __inline u_long
242 1.12 drochner read_eflags(void)
243 1.12 drochner {
244 1.12 drochner u_long ef;
245 1.12 drochner
246 1.28.16.1 yamt __asm volatile("pushfl; popl %0" : "=r" (ef));
247 1.12 drochner return (ef);
248 1.12 drochner }
249 1.12 drochner
250 1.12 drochner static __inline void
251 1.12 drochner write_eflags(u_long ef)
252 1.12 drochner {
253 1.28.16.1 yamt __asm volatile("pushl %0; popfl" : : "r" (ef));
254 1.17 thorpej }
255 1.17 thorpej
256 1.28.16.1 yamt static __inline uint64_t
257 1.17 thorpej rdmsr(u_int msr)
258 1.17 thorpej {
259 1.28.16.1 yamt uint64_t rv;
260 1.17 thorpej
261 1.28.16.1 yamt __asm volatile("rdmsr" : "=A" (rv) : "c" (msr));
262 1.17 thorpej return (rv);
263 1.17 thorpej }
264 1.17 thorpej
265 1.17 thorpej static __inline void
266 1.28.16.1 yamt wrmsr(u_int msr, uint64_t newval)
267 1.17 thorpej {
268 1.28.16.1 yamt __asm volatile("wrmsr" : : "A" (newval), "c" (msr));
269 1.17 thorpej }
270 1.17 thorpej
271 1.17 thorpej static __inline void
272 1.17 thorpej wbinvd(void)
273 1.17 thorpej {
274 1.28.16.1 yamt __asm volatile("wbinvd");
275 1.18 thorpej }
276 1.18 thorpej
277 1.28.16.1 yamt static __inline uint64_t
278 1.18 thorpej rdtsc(void)
279 1.18 thorpej {
280 1.28.16.1 yamt uint64_t rv;
281 1.18 thorpej
282 1.28.16.1 yamt __asm volatile("rdtsc" : "=A" (rv));
283 1.18 thorpej return (rv);
284 1.18 thorpej }
285 1.18 thorpej
286 1.28.16.1 yamt static __inline uint64_t
287 1.18 thorpej rdpmc(u_int pmc)
288 1.18 thorpej {
289 1.28.16.1 yamt uint64_t rv;
290 1.18 thorpej
291 1.28.16.1 yamt __asm volatile("rdpmc" : "=A" (rv) : "c" (pmc));
292 1.18 thorpej return (rv);
293 1.9 thorpej }
294 1.9 thorpej
295 1.9 thorpej /* Break into DDB/KGDB. */
296 1.9 thorpej static __inline void
297 1.9 thorpej breakpoint(void)
298 1.9 thorpej {
299 1.28.16.1 yamt __asm volatile("int $3");
300 1.1 cgd }
301 1.23 fvdl
302 1.23 fvdl #define read_psl() read_eflags()
303 1.23 fvdl #define write_psl(x) write_eflags(x)
304 1.20 thorpej
305 1.20 thorpej /*
306 1.20 thorpej * XXX Maybe these don't belong here...
307 1.20 thorpej */
308 1.20 thorpej
309 1.20 thorpej extern int (*copyout_func)(const void *, void *, size_t);
310 1.20 thorpej extern int (*copyin_func)(const void *, void *, size_t);
311 1.20 thorpej
312 1.20 thorpej int i386_copyout(const void *, void *, size_t);
313 1.20 thorpej int i486_copyout(const void *, void *, size_t);
314 1.20 thorpej
315 1.20 thorpej int i386_copyin(const void *, void *, size_t);
316 1.14 perry
317 1.14 perry #endif /* _KERNEL */
318 1.1 cgd
319 1.7 mycroft #endif /* !_I386_CPUFUNC_H_ */
320