cpufunc.h revision 1.28.16.3 1 1.28.16.3 yamt /* $NetBSD: cpufunc.h,v 1.28.16.3 2007/02/26 09:07:01 yamt Exp $ */
2 1.8 cgd
3 1.16 mycroft /*-
4 1.16 mycroft * Copyright (c) 1998 The NetBSD Foundation, Inc.
5 1.4 mycroft * All rights reserved.
6 1.2 mycroft *
7 1.16 mycroft * This code is derived from software contributed to The NetBSD Foundation
8 1.16 mycroft * by Charles M. Hannum.
9 1.16 mycroft *
10 1.4 mycroft * Redistribution and use in source and binary forms, with or without
11 1.4 mycroft * modification, are permitted provided that the following conditions
12 1.4 mycroft * are met:
13 1.4 mycroft * 1. Redistributions of source code must retain the above copyright
14 1.4 mycroft * notice, this list of conditions and the following disclaimer.
15 1.4 mycroft * 2. Redistributions in binary form must reproduce the above copyright
16 1.4 mycroft * notice, this list of conditions and the following disclaimer in the
17 1.4 mycroft * documentation and/or other materials provided with the distribution.
18 1.4 mycroft * 3. All advertising materials mentioning features or use of this software
19 1.4 mycroft * must display the following acknowledgement:
20 1.16 mycroft * This product includes software developed by the NetBSD
21 1.16 mycroft * Foundation, Inc. and its contributors.
22 1.16 mycroft * 4. Neither the name of The NetBSD Foundation nor the names of its
23 1.16 mycroft * contributors may be used to endorse or promote products derived
24 1.16 mycroft * from this software without specific prior written permission.
25 1.4 mycroft *
26 1.16 mycroft * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
27 1.16 mycroft * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
28 1.16 mycroft * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
29 1.16 mycroft * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
30 1.16 mycroft * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
31 1.16 mycroft * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
32 1.16 mycroft * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
33 1.16 mycroft * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
34 1.16 mycroft * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
35 1.16 mycroft * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
36 1.16 mycroft * POSSIBILITY OF SUCH DAMAGE.
37 1.4 mycroft */
38 1.4 mycroft
39 1.7 mycroft #ifndef _I386_CPUFUNC_H_
40 1.7 mycroft #define _I386_CPUFUNC_H_
41 1.7 mycroft
42 1.4 mycroft /*
43 1.4 mycroft * Functions to provide access to i386-specific instructions.
44 1.1 cgd */
45 1.1 cgd
46 1.1 cgd #include <sys/cdefs.h>
47 1.1 cgd #include <sys/types.h>
48 1.13 perry
49 1.28.16.2 yamt #include <machine/segments.h>
50 1.22 fvdl #include <machine/specialreg.h>
51 1.22 fvdl
52 1.28.16.3 yamt #ifdef _KERNEL
53 1.28.16.3 yamt void x86_pause(void);
54 1.28.16.3 yamt #else
55 1.25 fvdl static __inline void
56 1.25 fvdl x86_pause(void)
57 1.25 fvdl {
58 1.28.16.1 yamt __asm volatile("pause");
59 1.25 fvdl }
60 1.28.16.3 yamt #endif
61 1.25 fvdl
62 1.28.16.2 yamt /*
63 1.28.16.2 yamt * XXX it's better to use real lfence insn if available.
64 1.28.16.2 yamt *
65 1.28.16.2 yamt * memory clobber to avoid compiler reordering.
66 1.28.16.2 yamt */
67 1.28 yamt static __inline void
68 1.28 yamt x86_lfence(void)
69 1.28 yamt {
70 1.28 yamt
71 1.28.16.2 yamt __asm volatile("lock; addl $0, 0(%%esp)" : : : "memory");
72 1.28.16.2 yamt }
73 1.28.16.2 yamt
74 1.28.16.2 yamt static __inline void
75 1.28.16.2 yamt x86_sfence(void)
76 1.28.16.2 yamt {
77 1.28.16.2 yamt
78 1.28.16.2 yamt __asm volatile("lock; addl $0, 0(%%esp)" : : : "memory");
79 1.28.16.2 yamt }
80 1.28.16.2 yamt
81 1.28.16.2 yamt static __inline void
82 1.28.16.2 yamt x86_mfence(void)
83 1.28.16.2 yamt {
84 1.28.16.2 yamt
85 1.28.16.1 yamt __asm volatile("lock; addl $0, 0(%%esp)" : : : "memory");
86 1.28 yamt }
87 1.28 yamt
88 1.14 perry #ifdef _KERNEL
89 1.1 cgd
90 1.28.16.3 yamt void x86_flush(void);
91 1.28.16.3 yamt void x86_patch(void);
92 1.28.16.3 yamt
93 1.26 drochner extern unsigned int cpu_feature;
94 1.24 fvdl
95 1.10 chuck static __inline void
96 1.10 chuck invlpg(u_int addr)
97 1.10 chuck {
98 1.28.16.1 yamt __asm volatile("invlpg (%0)" : : "r" (addr) : "memory");
99 1.10 chuck }
100 1.10 chuck
101 1.1 cgd static __inline void
102 1.28.16.2 yamt lidt(struct region_descriptor *region)
103 1.4 mycroft {
104 1.28.16.2 yamt __asm volatile("lidt %0" : : "m" (*region));
105 1.4 mycroft }
106 1.4 mycroft
107 1.4 mycroft static __inline void
108 1.4 mycroft lldt(u_short sel)
109 1.4 mycroft {
110 1.28.16.1 yamt __asm volatile("lldt %0" : : "r" (sel));
111 1.4 mycroft }
112 1.4 mycroft
113 1.4 mycroft static __inline void
114 1.4 mycroft ltr(u_short sel)
115 1.4 mycroft {
116 1.28.16.1 yamt __asm volatile("ltr %0" : : "r" (sel));
117 1.4 mycroft }
118 1.4 mycroft
119 1.4 mycroft static __inline void
120 1.4 mycroft lcr0(u_int val)
121 1.4 mycroft {
122 1.28.16.1 yamt __asm volatile("movl %0,%%cr0" : : "r" (val));
123 1.4 mycroft }
124 1.4 mycroft
125 1.4 mycroft static __inline u_int
126 1.4 mycroft rcr0(void)
127 1.4 mycroft {
128 1.4 mycroft u_int val;
129 1.28.16.1 yamt __asm volatile("movl %%cr0,%0" : "=r" (val));
130 1.4 mycroft return val;
131 1.4 mycroft }
132 1.4 mycroft
133 1.4 mycroft static __inline u_int
134 1.4 mycroft rcr2(void)
135 1.4 mycroft {
136 1.4 mycroft u_int val;
137 1.28.16.1 yamt __asm volatile("movl %%cr2,%0" : "=r" (val));
138 1.4 mycroft return val;
139 1.4 mycroft }
140 1.4 mycroft
141 1.4 mycroft static __inline void
142 1.4 mycroft lcr3(u_int val)
143 1.4 mycroft {
144 1.28.16.1 yamt __asm volatile("movl %0,%%cr3" : : "r" (val));
145 1.4 mycroft }
146 1.4 mycroft
147 1.4 mycroft static __inline u_int
148 1.4 mycroft rcr3(void)
149 1.4 mycroft {
150 1.4 mycroft u_int val;
151 1.28.16.1 yamt __asm volatile("movl %%cr3,%0" : "=r" (val));
152 1.10 chuck return val;
153 1.10 chuck }
154 1.10 chuck
155 1.10 chuck static __inline void
156 1.10 chuck lcr4(u_int val)
157 1.10 chuck {
158 1.28.16.1 yamt __asm volatile("movl %0,%%cr4" : : "r" (val));
159 1.10 chuck }
160 1.10 chuck
161 1.10 chuck static __inline u_int
162 1.10 chuck rcr4(void)
163 1.10 chuck {
164 1.10 chuck u_int val;
165 1.28.16.1 yamt __asm volatile("movl %%cr4,%0" : "=r" (val));
166 1.4 mycroft return val;
167 1.6 mycroft }
168 1.6 mycroft
169 1.6 mycroft static __inline void
170 1.6 mycroft tlbflush(void)
171 1.6 mycroft {
172 1.6 mycroft u_int val;
173 1.22 fvdl val = rcr3();
174 1.22 fvdl lcr3(val);
175 1.22 fvdl }
176 1.22 fvdl
177 1.22 fvdl static __inline void
178 1.22 fvdl tlbflushg(void)
179 1.22 fvdl {
180 1.22 fvdl /*
181 1.22 fvdl * Big hammer: flush all TLB entries, including ones from PTE's
182 1.22 fvdl * with the G bit set. This should only be necessary if TLB
183 1.22 fvdl * shootdown falls far behind.
184 1.22 fvdl *
185 1.22 fvdl * Intel Architecture Software Developer's Manual, Volume 3,
186 1.22 fvdl * System Programming, section 9.10, "Invalidating the
187 1.22 fvdl * Translation Lookaside Buffers (TLBS)":
188 1.22 fvdl * "The following operations invalidate all TLB entries, irrespective
189 1.22 fvdl * of the setting of the G flag:
190 1.22 fvdl * ...
191 1.22 fvdl * "(P6 family processors only): Writing to control register CR4 to
192 1.22 fvdl * modify the PSE, PGE, or PAE flag."
193 1.22 fvdl *
194 1.22 fvdl * (the alternatives not quoted above are not an option here.)
195 1.22 fvdl *
196 1.22 fvdl * If PGE is not in use, we reload CR3 for the benefit of
197 1.22 fvdl * pre-P6-family processors.
198 1.22 fvdl */
199 1.22 fvdl
200 1.22 fvdl #if defined(I686_CPU)
201 1.22 fvdl if (cpu_feature & CPUID_PGE) {
202 1.22 fvdl u_int cr4 = rcr4();
203 1.22 fvdl lcr4(cr4 & ~CR4_PGE);
204 1.22 fvdl lcr4(cr4);
205 1.22 fvdl } else
206 1.22 fvdl #endif
207 1.22 fvdl tlbflush();
208 1.4 mycroft }
209 1.22 fvdl
210 1.4 mycroft
211 1.4 mycroft #ifdef notyet
212 1.27 junyoung void setidt(int idx, /*XXX*/caddr_t func, int typ, int dpl);
213 1.4 mycroft #endif
214 1.4 mycroft
215 1.21 yamt /* debug register */
216 1.28.16.1 yamt void dr0(caddr_t, uint32_t, uint32_t, uint32_t);
217 1.21 yamt
218 1.21 yamt static __inline u_int
219 1.21 yamt rdr6(void)
220 1.21 yamt {
221 1.21 yamt u_int val;
222 1.21 yamt
223 1.28.16.1 yamt __asm volatile("movl %%dr6,%0" : "=r" (val));
224 1.21 yamt return val;
225 1.21 yamt }
226 1.21 yamt
227 1.21 yamt static __inline void
228 1.21 yamt ldr6(u_int val)
229 1.21 yamt {
230 1.21 yamt
231 1.28.16.1 yamt __asm volatile("movl %0,%%dr6" : : "r" (val));
232 1.21 yamt }
233 1.4 mycroft
234 1.4 mycroft /* XXXX ought to be in psl.h with spl() functions */
235 1.4 mycroft
236 1.4 mycroft static __inline void
237 1.1 cgd disable_intr(void)
238 1.1 cgd {
239 1.28.16.1 yamt __asm volatile("cli");
240 1.1 cgd }
241 1.1 cgd
242 1.1 cgd static __inline void
243 1.1 cgd enable_intr(void)
244 1.1 cgd {
245 1.28.16.1 yamt __asm volatile("sti");
246 1.12 drochner }
247 1.12 drochner
248 1.12 drochner static __inline u_long
249 1.12 drochner read_eflags(void)
250 1.12 drochner {
251 1.12 drochner u_long ef;
252 1.12 drochner
253 1.28.16.1 yamt __asm volatile("pushfl; popl %0" : "=r" (ef));
254 1.12 drochner return (ef);
255 1.12 drochner }
256 1.12 drochner
257 1.12 drochner static __inline void
258 1.12 drochner write_eflags(u_long ef)
259 1.12 drochner {
260 1.28.16.1 yamt __asm volatile("pushl %0; popfl" : : "r" (ef));
261 1.17 thorpej }
262 1.17 thorpej
263 1.28.16.1 yamt static __inline uint64_t
264 1.17 thorpej rdmsr(u_int msr)
265 1.17 thorpej {
266 1.28.16.1 yamt uint64_t rv;
267 1.17 thorpej
268 1.28.16.1 yamt __asm volatile("rdmsr" : "=A" (rv) : "c" (msr));
269 1.17 thorpej return (rv);
270 1.17 thorpej }
271 1.17 thorpej
272 1.17 thorpej static __inline void
273 1.28.16.1 yamt wrmsr(u_int msr, uint64_t newval)
274 1.17 thorpej {
275 1.28.16.1 yamt __asm volatile("wrmsr" : : "A" (newval), "c" (msr));
276 1.17 thorpej }
277 1.17 thorpej
278 1.28.16.3 yamt /*
279 1.28.16.3 yamt * Some of the undocumented AMD64 MSRs need a 'passcode' to access.
280 1.28.16.3 yamt *
281 1.28.16.3 yamt * See LinuxBIOSv2: src/cpu/amd/model_fxx/model_fxx_init.c
282 1.28.16.3 yamt */
283 1.28.16.3 yamt
284 1.28.16.3 yamt #define OPTERON_MSR_PASSCODE 0x9c5a203a
285 1.28.16.3 yamt
286 1.28.16.3 yamt static __inline u_int64_t
287 1.28.16.3 yamt rdmsr_locked(u_int msr, u_int code)
288 1.28.16.3 yamt {
289 1.28.16.3 yamt uint64_t rv;
290 1.28.16.3 yamt __asm volatile("rdmsr"
291 1.28.16.3 yamt : "=A" (rv)
292 1.28.16.3 yamt : "c" (msr), "D" (code));
293 1.28.16.3 yamt return (rv);
294 1.28.16.3 yamt }
295 1.28.16.3 yamt
296 1.28.16.3 yamt static __inline void
297 1.28.16.3 yamt wrmsr_locked(u_int msr, u_int code, u_int64_t newval)
298 1.28.16.3 yamt {
299 1.28.16.3 yamt __asm volatile("wrmsr"
300 1.28.16.3 yamt :
301 1.28.16.3 yamt : "A" (newval), "c" (msr), "D" (code));
302 1.28.16.3 yamt }
303 1.28.16.3 yamt
304 1.17 thorpej static __inline void
305 1.17 thorpej wbinvd(void)
306 1.17 thorpej {
307 1.28.16.1 yamt __asm volatile("wbinvd");
308 1.18 thorpej }
309 1.18 thorpej
310 1.28.16.1 yamt static __inline uint64_t
311 1.18 thorpej rdtsc(void)
312 1.18 thorpej {
313 1.28.16.1 yamt uint64_t rv;
314 1.18 thorpej
315 1.28.16.1 yamt __asm volatile("rdtsc" : "=A" (rv));
316 1.18 thorpej return (rv);
317 1.18 thorpej }
318 1.18 thorpej
319 1.28.16.1 yamt static __inline uint64_t
320 1.18 thorpej rdpmc(u_int pmc)
321 1.18 thorpej {
322 1.28.16.1 yamt uint64_t rv;
323 1.18 thorpej
324 1.28.16.1 yamt __asm volatile("rdpmc" : "=A" (rv) : "c" (pmc));
325 1.18 thorpej return (rv);
326 1.9 thorpej }
327 1.9 thorpej
328 1.9 thorpej /* Break into DDB/KGDB. */
329 1.9 thorpej static __inline void
330 1.9 thorpej breakpoint(void)
331 1.9 thorpej {
332 1.28.16.1 yamt __asm volatile("int $3");
333 1.1 cgd }
334 1.23 fvdl
335 1.23 fvdl #define read_psl() read_eflags()
336 1.23 fvdl #define write_psl(x) write_eflags(x)
337 1.20 thorpej
338 1.20 thorpej /*
339 1.20 thorpej * XXX Maybe these don't belong here...
340 1.20 thorpej */
341 1.20 thorpej
342 1.20 thorpej extern int (*copyout_func)(const void *, void *, size_t);
343 1.20 thorpej extern int (*copyin_func)(const void *, void *, size_t);
344 1.20 thorpej
345 1.20 thorpej int i386_copyout(const void *, void *, size_t);
346 1.20 thorpej int i486_copyout(const void *, void *, size_t);
347 1.20 thorpej
348 1.20 thorpej int i386_copyin(const void *, void *, size_t);
349 1.14 perry
350 1.14 perry #endif /* _KERNEL */
351 1.1 cgd
352 1.7 mycroft #endif /* !_I386_CPUFUNC_H_ */
353