locore.h revision 1.99 1 1.99 matt /* $NetBSD: locore.h,v 1.99 2015/06/09 15:01:05 matt Exp $ */
2 1.83 matt
3 1.83 matt /*
4 1.83 matt * This file should not be included by MI code!!!
5 1.83 matt */
6 1.1 jonathan
7 1.1 jonathan /*
8 1.1 jonathan * Copyright 1996 The Board of Trustees of The Leland Stanford
9 1.1 jonathan * Junior University. All Rights Reserved.
10 1.1 jonathan *
11 1.1 jonathan * Permission to use, copy, modify, and distribute this
12 1.1 jonathan * software and its documentation for any purpose and without
13 1.1 jonathan * fee is hereby granted, provided that the above copyright
14 1.1 jonathan * notice appear in all copies. Stanford University
15 1.1 jonathan * makes no representations about the suitability of this
16 1.1 jonathan * software for any purpose. It is provided "as is" without
17 1.1 jonathan * express or implied warranty.
18 1.1 jonathan */
19 1.1 jonathan
20 1.1 jonathan /*
21 1.68 wiz * Jump table for MIPS CPU locore functions that are implemented
22 1.1 jonathan * differently on different generations, or instruction-level
23 1.81 snj * architecture (ISA) level, the Mips family.
24 1.1 jonathan *
25 1.33 soren * We currently provide support for MIPS I and MIPS III.
26 1.1 jonathan */
27 1.1 jonathan
28 1.1 jonathan #ifndef _MIPS_LOCORE_H
29 1.70 tsutsui #define _MIPS_LOCORE_H
30 1.2 jonathan
31 1.91 dyoung #if !defined(_LKM) && defined(_KERNEL_OPT)
32 1.32 soren #include "opt_cputype.h"
33 1.17 castor #endif
34 1.16 castor
35 1.83 matt #include <mips/mutex.h>
36 1.59 simonb #include <mips/cpuregs.h>
37 1.83 matt #include <mips/reg.h>
38 1.83 matt
39 1.83 matt struct tlbmask;
40 1.83 matt struct trapframe;
41 1.59 simonb
42 1.83 matt void trap(uint32_t, uint32_t, vaddr_t, vaddr_t, struct trapframe *);
43 1.83 matt void ast(void);
44 1.83 matt
45 1.83 matt void mips_fpu_trap(vaddr_t, struct trapframe *);
46 1.83 matt void mips_fpu_intr(vaddr_t, struct trapframe *);
47 1.83 matt
48 1.83 matt vaddr_t mips_emul_branch(struct trapframe *, vaddr_t, uint32_t, bool);
49 1.83 matt void mips_emul_inst(uint32_t, uint32_t, vaddr_t, struct trapframe *);
50 1.83 matt
51 1.83 matt void mips_emul_fp(uint32_t, struct trapframe *, uint32_t);
52 1.83 matt void mips_emul_branchdelayslot(uint32_t, struct trapframe *, uint32_t);
53 1.83 matt
54 1.83 matt void mips_emul_lwc0(uint32_t, struct trapframe *, uint32_t);
55 1.83 matt void mips_emul_swc0(uint32_t, struct trapframe *, uint32_t);
56 1.83 matt void mips_emul_special(uint32_t, struct trapframe *, uint32_t);
57 1.83 matt void mips_emul_special3(uint32_t, struct trapframe *, uint32_t);
58 1.83 matt
59 1.83 matt void mips_emul_lwc1(uint32_t, struct trapframe *, uint32_t);
60 1.83 matt void mips_emul_swc1(uint32_t, struct trapframe *, uint32_t);
61 1.83 matt void mips_emul_ldc1(uint32_t, struct trapframe *, uint32_t);
62 1.83 matt void mips_emul_sdc1(uint32_t, struct trapframe *, uint32_t);
63 1.83 matt
64 1.83 matt void mips_emul_lb(uint32_t, struct trapframe *, uint32_t);
65 1.83 matt void mips_emul_lbu(uint32_t, struct trapframe *, uint32_t);
66 1.83 matt void mips_emul_lh(uint32_t, struct trapframe *, uint32_t);
67 1.83 matt void mips_emul_lhu(uint32_t, struct trapframe *, uint32_t);
68 1.83 matt void mips_emul_lw(uint32_t, struct trapframe *, uint32_t);
69 1.83 matt void mips_emul_lwl(uint32_t, struct trapframe *, uint32_t);
70 1.83 matt void mips_emul_lwr(uint32_t, struct trapframe *, uint32_t);
71 1.83 matt #if defined(__mips_n32) || defined(__mips_n64) || defined(__mips_o64)
72 1.83 matt void mips_emul_lwu(uint32_t, struct trapframe *, uint32_t);
73 1.83 matt void mips_emul_ld(uint32_t, struct trapframe *, uint32_t);
74 1.83 matt void mips_emul_ldl(uint32_t, struct trapframe *, uint32_t);
75 1.83 matt void mips_emul_ldr(uint32_t, struct trapframe *, uint32_t);
76 1.83 matt #endif
77 1.83 matt void mips_emul_sb(uint32_t, struct trapframe *, uint32_t);
78 1.83 matt void mips_emul_sh(uint32_t, struct trapframe *, uint32_t);
79 1.83 matt void mips_emul_sw(uint32_t, struct trapframe *, uint32_t);
80 1.83 matt void mips_emul_swl(uint32_t, struct trapframe *, uint32_t);
81 1.83 matt void mips_emul_swr(uint32_t, struct trapframe *, uint32_t);
82 1.83 matt #if defined(__mips_n32) || defined(__mips_n64) || defined(__mips_o64)
83 1.83 matt void mips_emul_sd(uint32_t, struct trapframe *, uint32_t);
84 1.83 matt void mips_emul_sdl(uint32_t, struct trapframe *, uint32_t);
85 1.83 matt void mips_emul_sdr(uint32_t, struct trapframe *, uint32_t);
86 1.83 matt #endif
87 1.38 cgd
88 1.59 simonb uint32_t mips_cp0_cause_read(void);
89 1.59 simonb void mips_cp0_cause_write(uint32_t);
90 1.59 simonb uint32_t mips_cp0_status_read(void);
91 1.59 simonb void mips_cp0_status_write(uint32_t);
92 1.29 simonb
93 1.83 matt void softint_process(uint32_t);
94 1.83 matt void softint_fast_dispatch(struct lwp *, int);
95 1.83 matt
96 1.83 matt /*
97 1.83 matt * Convert an address to an offset used in a MIPS jump instruction. The offset
98 1.83 matt * contains the low 28 bits (allowing a jump to anywhere within the same 256MB
99 1.83 matt * segment of address space) of the address but since mips instructions are
100 1.83 matt * always on a 4 byte boundary the low 2 bits are always zero so the 28 bits
101 1.83 matt * get shifted right by 2 bits leaving us with a 26 bit result. To make the
102 1.83 matt * offset, we shift left to clear the upper four bits and then right by 6.
103 1.83 matt */
104 1.83 matt #define fixup_addr2offset(x) ((((uint32_t)(uintptr_t)(x)) << 4) >> 6)
105 1.98 matt typedef bool (*mips_fixup_callback_t)(int32_t, uint32_t [2], void *);
106 1.83 matt struct mips_jump_fixup_info {
107 1.83 matt uint32_t jfi_stub;
108 1.83 matt uint32_t jfi_real;
109 1.83 matt };
110 1.83 matt
111 1.83 matt void fixup_splcalls(void); /* splstubs.c */
112 1.98 matt bool mips_fixup_exceptions(mips_fixup_callback_t, void *);
113 1.99 matt bool mips_fixup_zero_relative(int32_t, uint32_t [2], void *);
114 1.92 matt intptr_t
115 1.92 matt mips_fixup_addr(const uint32_t *);
116 1.83 matt void mips_fixup_stubs(uint32_t *, uint32_t *);
117 1.83 matt
118 1.83 matt /*
119 1.83 matt * Define these stubs...
120 1.83 matt */
121 1.83 matt void mips_cpu_switch_resume(struct lwp *);
122 1.83 matt void tlb_set_asid(uint32_t);
123 1.83 matt void tlb_invalidate_all(void);
124 1.83 matt void tlb_invalidate_globals(void);
125 1.83 matt void tlb_invalidate_asids(uint32_t, uint32_t);
126 1.83 matt void tlb_invalidate_addr(vaddr_t);
127 1.83 matt u_int tlb_record_asids(u_long *, uint32_t);
128 1.83 matt int tlb_update(vaddr_t, uint32_t);
129 1.83 matt void tlb_enter(size_t, vaddr_t, uint32_t);
130 1.83 matt void tlb_read_indexed(size_t, struct tlbmask *);
131 1.83 matt void tlb_write_indexed(size_t, const struct tlbmask *);
132 1.83 matt void wbflush(void);
133 1.77 tsutsui
134 1.59 simonb #ifdef MIPS1
135 1.83 matt void mips1_tlb_invalidate_all(void);
136 1.38 cgd
137 1.58 thorpej uint32_t tx3900_cp0_config_read(void);
138 1.59 simonb #endif
139 1.38 cgd
140 1.85 matt #if (MIPS3 + MIPS4 + MIPS32 + MIPS32R2 + MIPS64 + MIPS64R2) > 0
141 1.59 simonb uint32_t mips3_cp0_compare_read(void);
142 1.59 simonb void mips3_cp0_compare_write(uint32_t);
143 1.49 cgd
144 1.59 simonb uint32_t mips3_cp0_config_read(void);
145 1.59 simonb void mips3_cp0_config_write(uint32_t);
146 1.86 matt
147 1.85 matt #if (MIPS32 + MIPS32R2 + MIPS64 + MIPS64R2) > 0
148 1.59 simonb uint32_t mipsNN_cp0_config1_read(void);
149 1.59 simonb void mipsNN_cp0_config1_write(uint32_t);
150 1.63 simonb uint32_t mipsNN_cp0_config2_read(void);
151 1.63 simonb uint32_t mipsNN_cp0_config3_read(void);
152 1.85 matt
153 1.89 matt intptr_t mipsNN_cp0_watchlo_read(u_int);
154 1.89 matt void mipsNN_cp0_watchlo_write(u_int, intptr_t);
155 1.87 matt uint32_t mipsNN_cp0_watchhi_read(u_int);
156 1.87 matt void mipsNN_cp0_watchhi_write(u_int, uint32_t);
157 1.87 matt
158 1.98 matt int32_t mipsNN_cp0_ebase_read(void);
159 1.98 matt void mipsNN_cp0_ebase_write(int32_t);
160 1.98 matt
161 1.85 matt #if (MIPS32R2 + MIPS64R2) > 0
162 1.85 matt void mipsNN_cp0_hwrena_write(uint32_t);
163 1.85 matt void mipsNN_cp0_userlocal_write(void *);
164 1.85 matt #endif
165 1.63 simonb #endif
166 1.59 simonb
167 1.59 simonb uint32_t mips3_cp0_count_read(void);
168 1.59 simonb void mips3_cp0_count_write(uint32_t);
169 1.59 simonb
170 1.59 simonb uint32_t mips3_cp0_wired_read(void);
171 1.59 simonb void mips3_cp0_wired_write(uint32_t);
172 1.69 tsutsui void mips3_cp0_pg_mask_write(uint32_t);
173 1.59 simonb
174 1.80 matt #if defined(__GNUC__) && !defined(__mips_o32)
175 1.80 matt static inline uint64_t
176 1.80 matt mips3_ld(const volatile uint64_t *va)
177 1.80 matt {
178 1.80 matt uint64_t rv;
179 1.80 matt #if defined(__mips_o32)
180 1.80 matt uint32_t sr;
181 1.80 matt
182 1.80 matt sr = mips_cp0_status_read();
183 1.80 matt mips_cp0_status_write(sr & ~MIPS_SR_INT_IE);
184 1.80 matt
185 1.80 matt __asm volatile(
186 1.80 matt ".set push \n\t"
187 1.80 matt ".set mips3 \n\t"
188 1.80 matt ".set noreorder \n\t"
189 1.80 matt ".set noat \n\t"
190 1.80 matt "ld %M0,0(%1) \n\t"
191 1.80 matt "dsll32 %L0,%M0,0 \n\t"
192 1.80 matt "dsra32 %M0,%M0,0 \n\t" /* high word */
193 1.80 matt "dsra32 %L0,%L0,0 \n\t" /* low word */
194 1.80 matt "ld %0,0(%1) \n\t"
195 1.80 matt ".set pop"
196 1.80 matt : "=d"(rv)
197 1.80 matt : "r"(va));
198 1.80 matt
199 1.80 matt mips_cp0_status_write(sr);
200 1.80 matt #elif defined(_LP64)
201 1.80 matt rv = *va;
202 1.80 matt #else
203 1.80 matt __asm volatile("ld %0,0(%1)" : "=d"(rv) : "r"(va));
204 1.80 matt #endif
205 1.80 matt
206 1.80 matt return rv;
207 1.80 matt }
208 1.80 matt static inline void
209 1.80 matt mips3_sd(volatile uint64_t *va, uint64_t v)
210 1.80 matt {
211 1.80 matt #if defined(__mips_o32)
212 1.80 matt uint32_t sr;
213 1.80 matt
214 1.80 matt sr = mips_cp0_status_read();
215 1.80 matt mips_cp0_status_write(sr & ~MIPS_SR_INT_IE);
216 1.80 matt
217 1.80 matt __asm volatile(
218 1.80 matt ".set push \n\t"
219 1.80 matt ".set mips3 \n\t"
220 1.80 matt ".set noreorder \n\t"
221 1.80 matt ".set noat \n\t"
222 1.80 matt "dsll32 %M0,%M0,0 \n\t"
223 1.80 matt "dsll32 %L0,%L0,0 \n\t"
224 1.80 matt "dsrl32 %L0,%L0,0 \n\t"
225 1.80 matt "or %0,%L0,%M0 \n\t"
226 1.80 matt "sd %0,0(%1) \n\t"
227 1.80 matt ".set pop"
228 1.80 matt : "=d"(v) : "0"(v), "r"(va));
229 1.80 matt
230 1.80 matt mips_cp0_status_write(sr);
231 1.80 matt #elif defined(_LP64)
232 1.80 matt *va = v;
233 1.80 matt #else
234 1.80 matt __asm volatile("sd %0,0(%1)" :: "r"(v), "r"(va));
235 1.80 matt #endif
236 1.80 matt }
237 1.80 matt #else
238 1.80 matt uint64_t mips3_ld(volatile uint64_t *va);
239 1.80 matt void mips3_sd(volatile uint64_t *, uint64_t);
240 1.80 matt #endif /* __GNUC__ */
241 1.85 matt #endif /* (MIPS3 + MIPS4 + MIPS32 + MIPS32R2 + MIPS64 + MIPS64R2) > 0 */
242 1.59 simonb
243 1.85 matt #if (MIPS3 + MIPS4 + MIPS64 + MIPS64R2) > 0
244 1.85 matt static __inline uint32_t mips3_lw_a64(uint64_t addr) __unused;
245 1.85 matt static __inline void mips3_sw_a64(uint64_t addr, uint32_t val) __unused;
246 1.59 simonb
247 1.74 perry static __inline uint32_t
248 1.59 simonb mips3_lw_a64(uint64_t addr)
249 1.59 simonb {
250 1.59 simonb uint32_t rv;
251 1.80 matt #if defined(__mips_o32)
252 1.59 simonb uint32_t sr;
253 1.59 simonb
254 1.59 simonb sr = mips_cp0_status_read();
255 1.80 matt mips_cp0_status_write((sr & ~MIPS_SR_INT_IE) | MIPS3_SR_KX);
256 1.59 simonb
257 1.80 matt __asm volatile (
258 1.80 matt ".set push \n\t"
259 1.80 matt ".set mips3 \n\t"
260 1.80 matt ".set noreorder \n\t"
261 1.80 matt ".set noat \n\t"
262 1.80 matt "dsll32 %M1,%M1,0 \n\t"
263 1.80 matt "dsll32 %L1,%L1,0 \n\t"
264 1.83 matt "dsrl32 %L1,%L1,0 \n\t"
265 1.80 matt "or %1,%M1,%L1 \n\t"
266 1.80 matt "lw %0, 0(%1) \n\t"
267 1.80 matt ".set pop"
268 1.80 matt : "=r"(rv), "=d"(addr)
269 1.80 matt : "1"(addr)
270 1.80 matt );
271 1.59 simonb
272 1.59 simonb mips_cp0_status_write(sr);
273 1.83 matt #elif defined(__mips_n32)
274 1.83 matt uint32_t sr = mips_cp0_status_read();
275 1.83 matt mips_cp0_status_write((sr & ~MIPS_SR_INT_IE) | MIPS3_SR_KX);
276 1.83 matt __asm volatile("lw %0, 0(%1)" : "=r"(rv) : "d"(addr));
277 1.83 matt mips_cp0_status_write(sr);
278 1.80 matt #elif defined(_LP64)
279 1.80 matt rv = *(const uint32_t *)addr;
280 1.80 matt #else
281 1.83 matt #error unknown ABI
282 1.80 matt #endif
283 1.59 simonb return (rv);
284 1.59 simonb }
285 1.59 simonb
286 1.74 perry static __inline void
287 1.59 simonb mips3_sw_a64(uint64_t addr, uint32_t val)
288 1.59 simonb {
289 1.80 matt #if defined(__mips_o32)
290 1.59 simonb uint32_t sr;
291 1.59 simonb
292 1.59 simonb sr = mips_cp0_status_read();
293 1.80 matt mips_cp0_status_write((sr & ~MIPS_SR_INT_IE) | MIPS3_SR_KX);
294 1.59 simonb
295 1.80 matt __asm volatile (
296 1.80 matt ".set push \n\t"
297 1.80 matt ".set mips3 \n\t"
298 1.80 matt ".set noreorder \n\t"
299 1.80 matt ".set noat \n\t"
300 1.80 matt "dsll32 %M0,%M0,0 \n\t"
301 1.80 matt "dsll32 %L0,%L0,0 \n\t"
302 1.83 matt "dsrl32 %L0,%L0,0 \n\t"
303 1.80 matt "or %0,%M0,%L0 \n\t"
304 1.80 matt "sw %1, 0(%0) \n\t"
305 1.80 matt ".set pop"
306 1.80 matt : "=d"(addr): "r"(val), "0"(addr)
307 1.80 matt );
308 1.44 cgd
309 1.59 simonb mips_cp0_status_write(sr);
310 1.83 matt #elif defined(__mips_n32)
311 1.83 matt uint32_t sr = mips_cp0_status_read();
312 1.83 matt mips_cp0_status_write((sr & ~MIPS_SR_INT_IE) | MIPS3_SR_KX);
313 1.83 matt __asm volatile("sw %1, 0(%0)" :: "d"(addr), "r"(val));
314 1.83 matt mips_cp0_status_write(sr);
315 1.80 matt #elif defined(_LP64)
316 1.80 matt *(uint32_t *)addr = val;
317 1.80 matt #else
318 1.83 matt #error unknown ABI
319 1.80 matt #endif
320 1.59 simonb }
321 1.85 matt #endif /* (MIPS3 + MIPS4 + MIPS64 + MIPS64R2) > 0 */
322 1.7 jonathan
323 1.97 matt #if (MIPS64 + MIPS64R2) > 0 && !defined(__mips_o32)
324 1.95 hikaru /* 64-bits address space accessor for n32, n64 ABI */
325 1.95 hikaru
326 1.95 hikaru static __inline uint64_t mips64_ld_a64(uint64_t addr) __unused;
327 1.95 hikaru static __inline void mips64_sd_a64(uint64_t addr, uint64_t val) __unused;
328 1.95 hikaru
329 1.95 hikaru static __inline uint64_t
330 1.95 hikaru mips64_ld_a64(uint64_t addr)
331 1.95 hikaru {
332 1.95 hikaru uint64_t rv;
333 1.95 hikaru #if defined(__mips_n32)
334 1.95 hikaru __asm volatile("ld %0, 0(%1)" : "=r"(rv) : "d"(addr));
335 1.95 hikaru #elif defined(_LP64)
336 1.95 hikaru rv = *(volatile uint64_t *)addr;
337 1.95 hikaru #else
338 1.97 matt #error unknown ABI
339 1.95 hikaru #endif
340 1.95 hikaru return (rv);
341 1.95 hikaru }
342 1.95 hikaru
343 1.95 hikaru static __inline void
344 1.95 hikaru mips64_sd_a64(uint64_t addr, uint64_t val)
345 1.95 hikaru {
346 1.95 hikaru #if defined(__mips_n32)
347 1.95 hikaru __asm volatile("sd %1, 0(%0)" :: "d"(addr), "r"(val));
348 1.95 hikaru #elif defined(_LP64)
349 1.95 hikaru *(volatile uint64_t *)addr = val;
350 1.95 hikaru #else
351 1.97 matt #error unknown ABI
352 1.95 hikaru #endif
353 1.95 hikaru }
354 1.95 hikaru #endif /* (MIPS64 + MIPS64R2) > 0 */
355 1.95 hikaru
356 1.1 jonathan /*
357 1.58 thorpej * A vector with an entry for each mips-ISA-level dependent
358 1.1 jonathan * locore function, and macros which jump through it.
359 1.1 jonathan */
360 1.1 jonathan typedef struct {
361 1.83 matt void (*ljv_cpu_switch_resume)(struct lwp *);
362 1.83 matt intptr_t ljv_lwp_trampoline;
363 1.83 matt void (*ljv_wbflush)(void);
364 1.83 matt void (*ljv_tlb_set_asid)(uint32_t pid);
365 1.83 matt void (*ljv_tlb_invalidate_asids)(uint32_t, uint32_t);
366 1.83 matt void (*ljv_tlb_invalidate_addr)(vaddr_t);
367 1.83 matt void (*ljv_tlb_invalidate_globals)(void);
368 1.83 matt void (*ljv_tlb_invalidate_all)(void);
369 1.83 matt u_int (*ljv_tlb_record_asids)(u_long *, uint32_t);
370 1.83 matt int (*ljv_tlb_update)(vaddr_t, uint32_t);
371 1.83 matt void (*ljv_tlb_enter)(size_t, vaddr_t, uint32_t);
372 1.83 matt void (*ljv_tlb_read_indexed)(size_t, struct tlbmask *);
373 1.83 matt void (*ljv_tlb_write_indexed)(size_t, const struct tlbmask *);
374 1.1 jonathan } mips_locore_jumpvec_t;
375 1.13 jonathan
376 1.83 matt typedef struct {
377 1.83 matt u_int (*lav_atomic_cas_uint)(volatile u_int *, u_int, u_int);
378 1.83 matt u_long (*lav_atomic_cas_ulong)(volatile u_long *, u_long, u_long);
379 1.83 matt int (*lav_ucas_uint)(volatile u_int *, u_int, u_int, u_int *);
380 1.83 matt int (*lav_ucas_ulong)(volatile u_long *, u_long, u_long, u_long *);
381 1.83 matt void (*lav_mutex_enter)(kmutex_t *);
382 1.83 matt void (*lav_mutex_exit)(kmutex_t *);
383 1.83 matt void (*lav_mutex_spin_enter)(kmutex_t *);
384 1.83 matt void (*lav_mutex_spin_exit)(kmutex_t *);
385 1.83 matt } mips_locore_atomicvec_t;
386 1.83 matt
387 1.38 cgd void mips_set_wbflush(void (*)(void));
388 1.62 simonb void mips_wait_idle(void);
389 1.1 jonathan
390 1.38 cgd void stacktrace(void);
391 1.38 cgd void logstacktrace(void);
392 1.1 jonathan
393 1.83 matt struct cpu_info;
394 1.83 matt struct splsw;
395 1.83 matt
396 1.80 matt struct locoresw {
397 1.83 matt void (*lsw_wbflush)(void);
398 1.83 matt void (*lsw_cpu_idle)(void);
399 1.83 matt int (*lsw_send_ipi)(struct cpu_info *, int);
400 1.83 matt void (*lsw_cpu_offline_md)(void);
401 1.83 matt void (*lsw_cpu_init)(struct cpu_info *);
402 1.88 cliff void (*lsw_cpu_run)(struct cpu_info *);
403 1.83 matt int (*lsw_bus_error)(unsigned int);
404 1.83 matt };
405 1.83 matt
406 1.83 matt struct mips_vmfreelist {
407 1.83 matt paddr_t fl_start;
408 1.83 matt paddr_t fl_end;
409 1.83 matt int fl_freelist;
410 1.80 matt };
411 1.80 matt
412 1.1 jonathan /*
413 1.81 snj * The "active" locore-function vector, and
414 1.1 jonathan */
415 1.83 matt extern const mips_locore_atomicvec_t mips_llsc_locore_atomicvec;
416 1.83 matt
417 1.83 matt extern mips_locore_atomicvec_t mips_locore_atomicvec;
418 1.1 jonathan extern mips_locore_jumpvec_t mips_locore_jumpvec;
419 1.80 matt extern struct locoresw mips_locoresw;
420 1.1 jonathan
421 1.83 matt struct splsw;
422 1.83 matt struct mips_vmfreelist;
423 1.83 matt struct phys_ram_seg;
424 1.83 matt
425 1.98 matt void mips64r2_vector_init(const struct splsw *);
426 1.83 matt void mips_vector_init(const struct splsw *, bool);
427 1.83 matt void mips_init_msgbuf(void);
428 1.83 matt void mips_init_lwp0_uarea(void);
429 1.83 matt void mips_page_physload(vaddr_t, vaddr_t,
430 1.83 matt const struct phys_ram_seg *, size_t,
431 1.83 matt const struct mips_vmfreelist *, size_t);
432 1.11 jonathan
433 1.7 jonathan
434 1.7 jonathan /*
435 1.7 jonathan * CPU identification, from PRID register.
436 1.7 jonathan */
437 1.70 tsutsui #define MIPS_PRID_REV(x) (((x) >> 0) & 0x00ff)
438 1.70 tsutsui #define MIPS_PRID_IMPL(x) (((x) >> 8) & 0x00ff)
439 1.45 cgd
440 1.59 simonb /* pre-MIPS32/64 */
441 1.70 tsutsui #define MIPS_PRID_RSVD(x) (((x) >> 16) & 0xffff)
442 1.70 tsutsui #define MIPS_PRID_REV_MIN(x) ((MIPS_PRID_REV(x) >> 0) & 0x0f)
443 1.70 tsutsui #define MIPS_PRID_REV_MAJ(x) ((MIPS_PRID_REV(x) >> 4) & 0x0f)
444 1.45 cgd
445 1.59 simonb /* MIPS32/64 */
446 1.70 tsutsui #define MIPS_PRID_CID(x) (((x) >> 16) & 0x00ff) /* Company ID */
447 1.70 tsutsui #define MIPS_PRID_CID_PREHISTORIC 0x00 /* Not MIPS32/64 */
448 1.70 tsutsui #define MIPS_PRID_CID_MTI 0x01 /* MIPS Technologies, Inc. */
449 1.70 tsutsui #define MIPS_PRID_CID_BROADCOM 0x02 /* Broadcom */
450 1.70 tsutsui #define MIPS_PRID_CID_ALCHEMY 0x03 /* Alchemy Semiconductor */
451 1.70 tsutsui #define MIPS_PRID_CID_SIBYTE 0x04 /* SiByte */
452 1.70 tsutsui #define MIPS_PRID_CID_SANDCRAFT 0x05 /* SandCraft */
453 1.70 tsutsui #define MIPS_PRID_CID_PHILIPS 0x06 /* Philips */
454 1.70 tsutsui #define MIPS_PRID_CID_TOSHIBA 0x07 /* Toshiba */
455 1.82 pooka #define MIPS_PRID_CID_MICROSOFT 0x07 /* Microsoft also, sigh */
456 1.70 tsutsui #define MIPS_PRID_CID_LSI 0x08 /* LSI */
457 1.67 simonb /* 0x09 unannounced */
458 1.67 simonb /* 0x0a unannounced */
459 1.70 tsutsui #define MIPS_PRID_CID_LEXRA 0x0b /* Lexra */
460 1.80 matt #define MIPS_PRID_CID_RMI 0x0c /* RMI / NetLogic */
461 1.95 hikaru #define MIPS_PRID_CID_CAVIUM 0x0d /* Cavium */
462 1.94 macallan #define MIPS_PRID_CID_INGENIC 0xe1
463 1.70 tsutsui #define MIPS_PRID_COPTS(x) (((x) >> 24) & 0x00ff) /* Company Options */
464 1.6 jonathan
465 1.6 jonathan #ifdef _KERNEL
466 1.6 jonathan /*
467 1.6 jonathan * Global variables used to communicate CPU type, and parameters
468 1.6 jonathan * such as cache size, from locore to higher-level code (e.g., pmap).
469 1.6 jonathan */
470 1.75 christos void mips_pagecopy(void *dst, void *src);
471 1.75 christos void mips_pagezero(void *dst);
472 1.19 jonathan
473 1.59 simonb #ifdef __HAVE_MIPS_MACHDEP_CACHE_CONFIG
474 1.59 simonb void mips_machdep_cache_config(void);
475 1.59 simonb #endif
476 1.59 simonb
477 1.19 jonathan /*
478 1.20 simonb * trapframe argument passed to trap()
479 1.19 jonathan */
480 1.64 thorpej
481 1.83 matt #if 0
482 1.83 matt #define TF_AST 0 /* really zero */
483 1.83 matt #define TF_V0 _R_V0
484 1.83 matt #define TF_V1 _R_V1
485 1.83 matt #define TF_A0 _R_A0
486 1.83 matt #define TF_A1 _R_A1
487 1.83 matt #define TF_A2 _R_A2
488 1.83 matt #define TF_A3 _R_A3
489 1.83 matt #define TF_T0 _R_T0
490 1.83 matt #define TF_T1 _R_T1
491 1.83 matt #define TF_T2 _R_T2
492 1.83 matt #define TF_T3 _R_T3
493 1.64 thorpej
494 1.64 thorpej #if defined(__mips_n32) || defined(__mips_n64)
495 1.83 matt #define TF_A4 _R_A4
496 1.83 matt #define TF_A5 _R_A5
497 1.83 matt #define TF_A6 _R_A6
498 1.83 matt #define TF_A7 _R_A7
499 1.64 thorpej #else
500 1.83 matt #define TF_T4 _R_T4
501 1.83 matt #define TF_T5 _R_T5
502 1.83 matt #define TF_T6 _R_T6
503 1.83 matt #define TF_T7 _R_T7
504 1.64 thorpej #endif /* __mips_n32 || __mips_n64 */
505 1.64 thorpej
506 1.83 matt #define TF_TA0 _R_TA0
507 1.83 matt #define TF_TA1 _R_TA1
508 1.83 matt #define TF_TA2 _R_TA2
509 1.83 matt #define TF_TA3 _R_TA3
510 1.83 matt
511 1.83 matt #define TF_T8 _R_T8
512 1.83 matt #define TF_T9 _R_T9
513 1.83 matt
514 1.83 matt #define TF_RA _R_RA
515 1.83 matt #define TF_SR _R_SR
516 1.83 matt #define TF_MULLO _R_MULLO
517 1.83 matt #define TF_MULHI _R_MULLO
518 1.83 matt #define TF_EPC _R_PC /* may be changed by trap() call */
519 1.65 thorpej
520 1.83 matt #define TF_NREGS (sizeof(struct reg) / sizeof(mips_reg_t))
521 1.83 matt #endif
522 1.64 thorpej
523 1.19 jonathan struct trapframe {
524 1.83 matt struct reg tf_registers;
525 1.83 matt #define tf_regs tf_registers.r_regs
526 1.80 matt uint32_t tf_ppl; /* previous priority level */
527 1.80 matt mips_reg_t tf_pad; /* for 8 byte aligned */
528 1.19 jonathan };
529 1.19 jonathan
530 1.83 matt CTASSERT(sizeof(struct trapframe) % (4*sizeof(mips_reg_t)) == 0);
531 1.83 matt
532 1.19 jonathan /*
533 1.19 jonathan * Stack frame for kernel traps. four args passed in registers.
534 1.19 jonathan * A trapframe is pointed to by the 5th arg, and a dummy sixth argument
535 1.19 jonathan * is used to avoid alignment problems
536 1.19 jonathan */
537 1.19 jonathan
538 1.19 jonathan struct kernframe {
539 1.80 matt #if defined(__mips_o32) || defined(__mips_o64)
540 1.19 jonathan register_t cf_args[4 + 1];
541 1.80 matt #if defined(__mips_o32)
542 1.83 matt register_t cf_pad; /* (for 8 byte alignment) */
543 1.80 matt #endif
544 1.80 matt #endif
545 1.80 matt #if defined(__mips_n32) || defined(__mips_n64)
546 1.80 matt register_t cf_pad[2]; /* for 16 byte alignment */
547 1.80 matt #endif
548 1.19 jonathan register_t cf_sp;
549 1.19 jonathan register_t cf_ra;
550 1.19 jonathan struct trapframe cf_frame;
551 1.19 jonathan };
552 1.83 matt
553 1.83 matt CTASSERT(sizeof(struct kernframe) % (2*sizeof(mips_reg_t)) == 0);
554 1.83 matt
555 1.83 matt /*
556 1.83 matt * PRocessor IDentity TABle
557 1.83 matt */
558 1.83 matt
559 1.83 matt struct pridtab {
560 1.83 matt int cpu_cid;
561 1.83 matt int cpu_pid;
562 1.83 matt int cpu_rev; /* -1 == wildcard */
563 1.83 matt int cpu_copts; /* -1 == wildcard */
564 1.83 matt int cpu_isa; /* -1 == probed (mips32/mips64) */
565 1.83 matt int cpu_ntlb; /* -1 == unknown, 0 == probed */
566 1.83 matt int cpu_flags;
567 1.83 matt u_int cpu_cp0flags; /* presence of some cp0 regs */
568 1.83 matt u_int cpu_cidflags; /* company-specific flags */
569 1.83 matt const char *cpu_name;
570 1.83 matt };
571 1.83 matt
572 1.83 matt /*
573 1.83 matt * bitfield defines for cpu_cp0flags
574 1.83 matt */
575 1.83 matt #define MIPS_CP0FL_USE __BIT(0) /* use these flags */
576 1.83 matt #define MIPS_CP0FL_ECC __BIT(1)
577 1.83 matt #define MIPS_CP0FL_CACHE_ERR __BIT(2)
578 1.83 matt #define MIPS_CP0FL_EIRR __BIT(3)
579 1.83 matt #define MIPS_CP0FL_EIMR __BIT(4)
580 1.83 matt #define MIPS_CP0FL_EBASE __BIT(5)
581 1.83 matt #define MIPS_CP0FL_CONFIG __BIT(6)
582 1.84 matt #define MIPS_CP0FL_CONFIG1 __BIT(7)
583 1.84 matt #define MIPS_CP0FL_CONFIG2 __BIT(8)
584 1.84 matt #define MIPS_CP0FL_CONFIG3 __BIT(9)
585 1.84 matt #define MIPS_CP0FL_CONFIG4 __BIT(10)
586 1.84 matt #define MIPS_CP0FL_CONFIG5 __BIT(11)
587 1.84 matt #define MIPS_CP0FL_CONFIG6 __BIT(12)
588 1.84 matt #define MIPS_CP0FL_CONFIG7 __BIT(13)
589 1.84 matt #define MIPS_CP0FL_USERLOCAL __BIT(14)
590 1.84 matt #define MIPS_CP0FL_HWRENA __BIT(15)
591 1.83 matt
592 1.83 matt /*
593 1.83 matt * cpu_cidflags defines, by company
594 1.83 matt */
595 1.83 matt /*
596 1.83 matt * RMI company-specific cpu_cidflags
597 1.83 matt */
598 1.84 matt #define MIPS_CIDFL_RMI_TYPE __BITS(2,0)
599 1.84 matt # define CIDFL_RMI_TYPE_XLR 0
600 1.84 matt # define CIDFL_RMI_TYPE_XLS 1
601 1.84 matt # define CIDFL_RMI_TYPE_XLP 2
602 1.83 matt #define MIPS_CIDFL_RMI_THREADS_MASK __BITS(6,3)
603 1.83 matt # define MIPS_CIDFL_RMI_THREADS_SHIFT 3
604 1.83 matt #define MIPS_CIDFL_RMI_CORES_MASK __BITS(10,7)
605 1.83 matt # define MIPS_CIDFL_RMI_CORES_SHIFT 7
606 1.83 matt # define LOG2_1 0
607 1.83 matt # define LOG2_2 1
608 1.83 matt # define LOG2_4 2
609 1.83 matt # define LOG2_8 3
610 1.83 matt # define MIPS_CIDFL_RMI_CPUS(ncores, nthreads) \
611 1.83 matt ((LOG2_ ## ncores << MIPS_CIDFL_RMI_CORES_SHIFT) \
612 1.83 matt |(LOG2_ ## nthreads << MIPS_CIDFL_RMI_THREADS_SHIFT))
613 1.83 matt # define MIPS_CIDFL_RMI_NTHREADS(cidfl) \
614 1.83 matt (1 << (((cidfl) & MIPS_CIDFL_RMI_THREADS_MASK) \
615 1.83 matt >> MIPS_CIDFL_RMI_THREADS_SHIFT))
616 1.83 matt # define MIPS_CIDFL_RMI_NCORES(cidfl) \
617 1.83 matt (1 << (((cidfl) & MIPS_CIDFL_RMI_CORES_MASK) \
618 1.83 matt >> MIPS_CIDFL_RMI_CORES_SHIFT))
619 1.83 matt #define MIPS_CIDFL_RMI_L2SZ_MASK __BITS(14,11)
620 1.83 matt # define MIPS_CIDFL_RMI_L2SZ_SHIFT 11
621 1.83 matt # define RMI_L2SZ_256KB 0
622 1.83 matt # define RMI_L2SZ_512KB 1
623 1.83 matt # define RMI_L2SZ_1MB 2
624 1.83 matt # define RMI_L2SZ_2MB 3
625 1.83 matt # define RMI_L2SZ_4MB 4
626 1.83 matt # define MIPS_CIDFL_RMI_L2(l2sz) \
627 1.83 matt (RMI_L2SZ_ ## l2sz << MIPS_CIDFL_RMI_L2SZ_SHIFT)
628 1.83 matt # define MIPS_CIDFL_RMI_L2SZ(cidfl) \
629 1.83 matt ((256*1024) << (((cidfl) & MIPS_CIDFL_RMI_L2SZ_MASK) \
630 1.83 matt >> MIPS_CIDFL_RMI_L2SZ_SHIFT))
631 1.83 matt
632 1.61 simonb #endif /* _KERNEL */
633 1.1 jonathan #endif /* _MIPS_LOCORE_H */
634