cpufunc.h revision 1.31.18.1 1 1.31.18.1 riz /* $NetBSD: cpufunc.h,v 1.31.18.1 2006/08/27 06:25:30 riz Exp $ */
2 1.8 cgd
3 1.16 mycroft /*-
4 1.16 mycroft * Copyright (c) 1998 The NetBSD Foundation, Inc.
5 1.4 mycroft * All rights reserved.
6 1.2 mycroft *
7 1.16 mycroft * This code is derived from software contributed to The NetBSD Foundation
8 1.16 mycroft * by Charles M. Hannum.
9 1.16 mycroft *
10 1.4 mycroft * Redistribution and use in source and binary forms, with or without
11 1.4 mycroft * modification, are permitted provided that the following conditions
12 1.4 mycroft * are met:
13 1.4 mycroft * 1. Redistributions of source code must retain the above copyright
14 1.4 mycroft * notice, this list of conditions and the following disclaimer.
15 1.4 mycroft * 2. Redistributions in binary form must reproduce the above copyright
16 1.4 mycroft * notice, this list of conditions and the following disclaimer in the
17 1.4 mycroft * documentation and/or other materials provided with the distribution.
18 1.4 mycroft * 3. All advertising materials mentioning features or use of this software
19 1.4 mycroft * must display the following acknowledgement:
20 1.16 mycroft * This product includes software developed by the NetBSD
21 1.16 mycroft * Foundation, Inc. and its contributors.
22 1.16 mycroft * 4. Neither the name of The NetBSD Foundation nor the names of its
23 1.16 mycroft * contributors may be used to endorse or promote products derived
24 1.16 mycroft * from this software without specific prior written permission.
25 1.4 mycroft *
26 1.16 mycroft * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
27 1.16 mycroft * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
28 1.16 mycroft * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
29 1.16 mycroft * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
30 1.16 mycroft * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
31 1.16 mycroft * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
32 1.16 mycroft * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
33 1.16 mycroft * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
34 1.16 mycroft * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
35 1.16 mycroft * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
36 1.16 mycroft * POSSIBILITY OF SUCH DAMAGE.
37 1.4 mycroft */
38 1.4 mycroft
39 1.7 mycroft #ifndef _I386_CPUFUNC_H_
40 1.7 mycroft #define _I386_CPUFUNC_H_
41 1.7 mycroft
42 1.4 mycroft /*
43 1.4 mycroft * Functions to provide access to i386-specific instructions.
44 1.1 cgd */
45 1.1 cgd
46 1.1 cgd #include <sys/cdefs.h>
47 1.1 cgd #include <sys/types.h>
48 1.13 perry
49 1.31.18.1 riz #include <machine/segments.h>
50 1.22 fvdl #include <machine/specialreg.h>
51 1.22 fvdl
52 1.31 perry static __inline void
53 1.25 fvdl x86_pause(void)
54 1.25 fvdl {
55 1.29 perry __asm volatile("pause");
56 1.25 fvdl }
57 1.25 fvdl
58 1.31 perry static __inline void
59 1.28 yamt x86_lfence(void)
60 1.28 yamt {
61 1.28 yamt
62 1.28 yamt /*
63 1.28 yamt * XXX it's better to use real lfence insn if available.
64 1.28 yamt */
65 1.29 perry __asm volatile("lock; addl $0, 0(%%esp)" : : : "memory");
66 1.28 yamt }
67 1.28 yamt
68 1.14 perry #ifdef _KERNEL
69 1.1 cgd
70 1.26 drochner extern unsigned int cpu_feature;
71 1.24 fvdl
72 1.31 perry static __inline void
73 1.10 chuck invlpg(u_int addr)
74 1.10 chuck {
75 1.29 perry __asm volatile("invlpg (%0)" : : "r" (addr) : "memory");
76 1.10 chuck }
77 1.10 chuck
78 1.31 perry static __inline void
79 1.31.18.1 riz lidt(struct region_descriptor *region)
80 1.4 mycroft {
81 1.31.18.1 riz __asm volatile("lidt %0" : : "m" (*region));
82 1.4 mycroft }
83 1.4 mycroft
84 1.31 perry static __inline void
85 1.4 mycroft lldt(u_short sel)
86 1.4 mycroft {
87 1.29 perry __asm volatile("lldt %0" : : "r" (sel));
88 1.4 mycroft }
89 1.4 mycroft
90 1.31 perry static __inline void
91 1.4 mycroft ltr(u_short sel)
92 1.4 mycroft {
93 1.29 perry __asm volatile("ltr %0" : : "r" (sel));
94 1.4 mycroft }
95 1.4 mycroft
96 1.31 perry static __inline void
97 1.4 mycroft lcr0(u_int val)
98 1.4 mycroft {
99 1.29 perry __asm volatile("movl %0,%%cr0" : : "r" (val));
100 1.4 mycroft }
101 1.4 mycroft
102 1.31 perry static __inline u_int
103 1.4 mycroft rcr0(void)
104 1.4 mycroft {
105 1.4 mycroft u_int val;
106 1.29 perry __asm volatile("movl %%cr0,%0" : "=r" (val));
107 1.4 mycroft return val;
108 1.4 mycroft }
109 1.4 mycroft
110 1.31 perry static __inline u_int
111 1.4 mycroft rcr2(void)
112 1.4 mycroft {
113 1.4 mycroft u_int val;
114 1.29 perry __asm volatile("movl %%cr2,%0" : "=r" (val));
115 1.4 mycroft return val;
116 1.4 mycroft }
117 1.4 mycroft
118 1.31 perry static __inline void
119 1.4 mycroft lcr3(u_int val)
120 1.4 mycroft {
121 1.29 perry __asm volatile("movl %0,%%cr3" : : "r" (val));
122 1.4 mycroft }
123 1.4 mycroft
124 1.31 perry static __inline u_int
125 1.4 mycroft rcr3(void)
126 1.4 mycroft {
127 1.4 mycroft u_int val;
128 1.29 perry __asm volatile("movl %%cr3,%0" : "=r" (val));
129 1.10 chuck return val;
130 1.10 chuck }
131 1.10 chuck
132 1.31 perry static __inline void
133 1.10 chuck lcr4(u_int val)
134 1.10 chuck {
135 1.29 perry __asm volatile("movl %0,%%cr4" : : "r" (val));
136 1.10 chuck }
137 1.10 chuck
138 1.31 perry static __inline u_int
139 1.10 chuck rcr4(void)
140 1.10 chuck {
141 1.10 chuck u_int val;
142 1.29 perry __asm volatile("movl %%cr4,%0" : "=r" (val));
143 1.4 mycroft return val;
144 1.6 mycroft }
145 1.6 mycroft
146 1.31 perry static __inline void
147 1.6 mycroft tlbflush(void)
148 1.6 mycroft {
149 1.6 mycroft u_int val;
150 1.22 fvdl val = rcr3();
151 1.22 fvdl lcr3(val);
152 1.22 fvdl }
153 1.22 fvdl
154 1.31 perry static __inline void
155 1.22 fvdl tlbflushg(void)
156 1.22 fvdl {
157 1.22 fvdl /*
158 1.22 fvdl * Big hammer: flush all TLB entries, including ones from PTE's
159 1.22 fvdl * with the G bit set. This should only be necessary if TLB
160 1.22 fvdl * shootdown falls far behind.
161 1.22 fvdl *
162 1.22 fvdl * Intel Architecture Software Developer's Manual, Volume 3,
163 1.22 fvdl * System Programming, section 9.10, "Invalidating the
164 1.22 fvdl * Translation Lookaside Buffers (TLBS)":
165 1.22 fvdl * "The following operations invalidate all TLB entries, irrespective
166 1.22 fvdl * of the setting of the G flag:
167 1.22 fvdl * ...
168 1.22 fvdl * "(P6 family processors only): Writing to control register CR4 to
169 1.22 fvdl * modify the PSE, PGE, or PAE flag."
170 1.22 fvdl *
171 1.22 fvdl * (the alternatives not quoted above are not an option here.)
172 1.22 fvdl *
173 1.22 fvdl * If PGE is not in use, we reload CR3 for the benefit of
174 1.22 fvdl * pre-P6-family processors.
175 1.22 fvdl */
176 1.22 fvdl
177 1.22 fvdl #if defined(I686_CPU)
178 1.22 fvdl if (cpu_feature & CPUID_PGE) {
179 1.22 fvdl u_int cr4 = rcr4();
180 1.22 fvdl lcr4(cr4 & ~CR4_PGE);
181 1.22 fvdl lcr4(cr4);
182 1.22 fvdl } else
183 1.22 fvdl #endif
184 1.22 fvdl tlbflush();
185 1.4 mycroft }
186 1.22 fvdl
187 1.4 mycroft
188 1.4 mycroft #ifdef notyet
189 1.27 junyoung void setidt(int idx, /*XXX*/caddr_t func, int typ, int dpl);
190 1.4 mycroft #endif
191 1.4 mycroft
192 1.21 yamt /* debug register */
193 1.30 perry void dr0(caddr_t, uint32_t, uint32_t, uint32_t);
194 1.21 yamt
195 1.31 perry static __inline u_int
196 1.21 yamt rdr6(void)
197 1.21 yamt {
198 1.21 yamt u_int val;
199 1.21 yamt
200 1.29 perry __asm volatile("movl %%dr6,%0" : "=r" (val));
201 1.21 yamt return val;
202 1.21 yamt }
203 1.21 yamt
204 1.31 perry static __inline void
205 1.21 yamt ldr6(u_int val)
206 1.21 yamt {
207 1.21 yamt
208 1.29 perry __asm volatile("movl %0,%%dr6" : : "r" (val));
209 1.21 yamt }
210 1.4 mycroft
211 1.4 mycroft /* XXXX ought to be in psl.h with spl() functions */
212 1.4 mycroft
213 1.31 perry static __inline void
214 1.1 cgd disable_intr(void)
215 1.1 cgd {
216 1.29 perry __asm volatile("cli");
217 1.1 cgd }
218 1.1 cgd
219 1.31 perry static __inline void
220 1.1 cgd enable_intr(void)
221 1.1 cgd {
222 1.29 perry __asm volatile("sti");
223 1.12 drochner }
224 1.12 drochner
225 1.31 perry static __inline u_long
226 1.12 drochner read_eflags(void)
227 1.12 drochner {
228 1.12 drochner u_long ef;
229 1.12 drochner
230 1.29 perry __asm volatile("pushfl; popl %0" : "=r" (ef));
231 1.12 drochner return (ef);
232 1.12 drochner }
233 1.12 drochner
234 1.31 perry static __inline void
235 1.12 drochner write_eflags(u_long ef)
236 1.12 drochner {
237 1.29 perry __asm volatile("pushl %0; popfl" : : "r" (ef));
238 1.17 thorpej }
239 1.17 thorpej
240 1.31 perry static __inline uint64_t
241 1.17 thorpej rdmsr(u_int msr)
242 1.17 thorpej {
243 1.30 perry uint64_t rv;
244 1.17 thorpej
245 1.29 perry __asm volatile("rdmsr" : "=A" (rv) : "c" (msr));
246 1.17 thorpej return (rv);
247 1.17 thorpej }
248 1.17 thorpej
249 1.31 perry static __inline void
250 1.30 perry wrmsr(u_int msr, uint64_t newval)
251 1.17 thorpej {
252 1.29 perry __asm volatile("wrmsr" : : "A" (newval), "c" (msr));
253 1.17 thorpej }
254 1.17 thorpej
255 1.31 perry static __inline void
256 1.17 thorpej wbinvd(void)
257 1.17 thorpej {
258 1.29 perry __asm volatile("wbinvd");
259 1.18 thorpej }
260 1.18 thorpej
261 1.31 perry static __inline uint64_t
262 1.18 thorpej rdtsc(void)
263 1.18 thorpej {
264 1.30 perry uint64_t rv;
265 1.18 thorpej
266 1.29 perry __asm volatile("rdtsc" : "=A" (rv));
267 1.18 thorpej return (rv);
268 1.18 thorpej }
269 1.18 thorpej
270 1.31 perry static __inline uint64_t
271 1.18 thorpej rdpmc(u_int pmc)
272 1.18 thorpej {
273 1.30 perry uint64_t rv;
274 1.18 thorpej
275 1.29 perry __asm volatile("rdpmc" : "=A" (rv) : "c" (pmc));
276 1.18 thorpej return (rv);
277 1.9 thorpej }
278 1.9 thorpej
279 1.9 thorpej /* Break into DDB/KGDB. */
280 1.31 perry static __inline void
281 1.9 thorpej breakpoint(void)
282 1.9 thorpej {
283 1.29 perry __asm volatile("int $3");
284 1.1 cgd }
285 1.23 fvdl
286 1.23 fvdl #define read_psl() read_eflags()
287 1.23 fvdl #define write_psl(x) write_eflags(x)
288 1.20 thorpej
289 1.20 thorpej /*
290 1.20 thorpej * XXX Maybe these don't belong here...
291 1.20 thorpej */
292 1.20 thorpej
293 1.20 thorpej extern int (*copyout_func)(const void *, void *, size_t);
294 1.20 thorpej extern int (*copyin_func)(const void *, void *, size_t);
295 1.20 thorpej
296 1.20 thorpej int i386_copyout(const void *, void *, size_t);
297 1.20 thorpej int i486_copyout(const void *, void *, size_t);
298 1.20 thorpej
299 1.20 thorpej int i386_copyin(const void *, void *, size_t);
300 1.14 perry
301 1.14 perry #endif /* _KERNEL */
302 1.1 cgd
303 1.7 mycroft #endif /* !_I386_CPUFUNC_H_ */
304