Home | History | Annotate | Line # | Download | only in include
locore.h revision 1.78.36.1.2.36
      1 /* locore.h,v 1.78.36.1.2.29 2011/04/29 08:26:21 matt Exp */
      2 
      3 /*
      4  * This file should not be included by MI code!!!
      5  */
      6 
      7 /*
      8  * Copyright 1996 The Board of Trustees of The Leland Stanford
      9  * Junior University. All Rights Reserved.
     10  *
     11  * Permission to use, copy, modify, and distribute this
     12  * software and its documentation for any purpose and without
     13  * fee is hereby granted, provided that the above copyright
     14  * notice appear in all copies.  Stanford University
     15  * makes no representations about the suitability of this
     16  * software for any purpose.  It is provided "as is" without
     17  * express or implied warranty.
     18  */
     19 
     20 /*
     21  * Jump table for MIPS CPU locore functions that are implemented
     22  * differently on different generations, or instruction-level
     23  * architecture (ISA) level, the Mips family.
     24  *
     25  * We currently provide support for MIPS I and MIPS III.
     26  */
     27 
     28 #ifndef _MIPS_LOCORE_H
     29 #define _MIPS_LOCORE_H
     30 
     31 #ifndef _LKM
     32 #include "opt_cputype.h"
     33 #endif
     34 
     35 #include <sys/kcore.h>
     36 
     37 #include <mips/mutex.h>
     38 #include <mips/cpuregs.h>
     39 #include <mips/reg.h>
     40 
     41 struct tlbmask;
     42 struct trapframe;
     43 
     44 void	trap(uint32_t, uint32_t, vaddr_t, vaddr_t, struct trapframe *);
     45 void	ast(void);
     46 
     47 struct lwp *
     48 	mips_cpu_switchto(struct lwp *, struct lwp *, bool);
     49 
     50 /*
     51  * Perform a trapsignal, and if cpu_printfataltraps is true, print the trap info
     52  * to the console.
     53  */
     54 extern bool cpu_printfataltraps;
     55 void	cpu_trapsignal(struct lwp *, ksiginfo_t *, struct trapframe *);
     56 
     57 void	mips_fpu_trap(vaddr_t, struct trapframe *);
     58 void	mips_fpu_intr(vaddr_t, struct trapframe *);
     59 
     60 vaddr_t mips_emul_branch(struct trapframe *, vaddr_t, uint32_t, bool);
     61 void	mips_emul_inst(uint32_t, uint32_t, vaddr_t, struct trapframe *);
     62 
     63 void	mips_emul_fp(uint32_t, struct trapframe *, uint32_t);
     64 void	mips_emul_branchdelayslot(uint32_t, struct trapframe *, uint32_t);
     65 
     66 void	mips_emul_lwc0(uint32_t, struct trapframe *, uint32_t);
     67 void	mips_emul_swc0(uint32_t, struct trapframe *, uint32_t);
     68 void	mips_emul_special(uint32_t, struct trapframe *, uint32_t);
     69 void	mips_emul_special3(uint32_t, struct trapframe *, uint32_t);
     70 
     71 void	mips_emul_lwc1(uint32_t, struct trapframe *, uint32_t);
     72 void	mips_emul_swc1(uint32_t, struct trapframe *, uint32_t);
     73 void	mips_emul_ldc1(uint32_t, struct trapframe *, uint32_t);
     74 void	mips_emul_sdc1(uint32_t, struct trapframe *, uint32_t);
     75 
     76 void	mips_emul_lb(uint32_t, struct trapframe *, uint32_t);
     77 void	mips_emul_lbu(uint32_t, struct trapframe *, uint32_t);
     78 void	mips_emul_lh(uint32_t, struct trapframe *, uint32_t);
     79 void	mips_emul_lhu(uint32_t, struct trapframe *, uint32_t);
     80 void	mips_emul_lw(uint32_t, struct trapframe *, uint32_t);
     81 void	mips_emul_lwl(uint32_t, struct trapframe *, uint32_t);
     82 void	mips_emul_lwr(uint32_t, struct trapframe *, uint32_t);
     83 #if defined(__mips_n32) || defined(__mips_n64) || defined(__mips_o64)
     84 void	mips_emul_lwu(uint32_t, struct trapframe *, uint32_t);
     85 void	mips_emul_ld(uint32_t, struct trapframe *, uint32_t);
     86 void	mips_emul_ldl(uint32_t, struct trapframe *, uint32_t);
     87 void	mips_emul_ldr(uint32_t, struct trapframe *, uint32_t);
     88 #endif
     89 void	mips_emul_sb(uint32_t, struct trapframe *, uint32_t);
     90 void	mips_emul_sh(uint32_t, struct trapframe *, uint32_t);
     91 void	mips_emul_sw(uint32_t, struct trapframe *, uint32_t);
     92 void	mips_emul_swl(uint32_t, struct trapframe *, uint32_t);
     93 void	mips_emul_swr(uint32_t, struct trapframe *, uint32_t);
     94 #if defined(__mips_n32) || defined(__mips_n64) || defined(__mips_o64)
     95 void	mips_emul_sd(uint32_t, struct trapframe *, uint32_t);
     96 void	mips_emul_sdl(uint32_t, struct trapframe *, uint32_t);
     97 void	mips_emul_sdr(uint32_t, struct trapframe *, uint32_t);
     98 #endif
     99 
    100 uint32_t mips_cp0_cause_read(void);
    101 void	mips_cp0_cause_write(uint32_t);
    102 uint32_t mips_cp0_status_read(void);
    103 void	mips_cp0_status_write(uint32_t);
    104 
    105 void	softint_process(uint32_t);
    106 void	softint_fast_dispatch(struct lwp *, int);
    107 
    108 /*
    109  * Convert an address to an offset used in a MIPS jump instruction.  The offset
    110  * contains the low 28 bits (allowing a jump to anywhere within the same 256MB
    111  * segment of address space) of the address but since mips instructions are
    112  * always on a 4 byte boundary the low 2 bits are always zero so the 28 bits
    113  * get shifted right by 2 bits leaving us with a 26 bit result.  To make the
    114  * offset, we shift left to clear the upper four bits and then right by 6.
    115  */
    116 #define	fixup_addr2offset(x)	((((uint32_t)(uintptr_t)(x)) << 4) >> 6)
    117 typedef bool (*mips_fixup_callback_t)(int32_t, uint32_t [2]);
    118 struct mips_jump_fixup_info {
    119 	uint32_t jfi_stub;
    120 	uint32_t jfi_real;
    121 };
    122 
    123 void	fixup_splcalls(void);				/* splstubs.c */
    124 bool	mips_fixup_exceptions(mips_fixup_callback_t);
    125 bool	mips_fixup_zero_relative(int32_t, uint32_t [2]);
    126 intptr_t
    127 	mips_fixup_addr(const uint32_t *);
    128 void	mips_fixup_stubs(uint32_t *, uint32_t *);
    129 
    130 /*
    131  * Define these stubs...
    132  */
    133 void	mips_cpu_switch_resume(struct lwp *);
    134 void	tlb_set_asid(uint32_t);
    135 void	tlb_invalidate_all(void);
    136 void	tlb_invalidate_globals(void);
    137 void	tlb_invalidate_asids(uint32_t, uint32_t);
    138 void	tlb_invalidate_addr(vaddr_t);
    139 u_int	tlb_record_asids(u_long *, uint32_t);
    140 int	tlb_update(vaddr_t, uint32_t);
    141 void	tlb_enter(size_t, vaddr_t, uint32_t);
    142 void	tlb_read_indexed(size_t, struct tlbmask *);
    143 void	tlb_write_indexed(size_t, const struct tlbmask *);
    144 void	wbflush(void);
    145 
    146 #ifdef MIPS1
    147 void	mips1_tlb_invalidate_all(void);
    148 
    149 uint32_t tx3900_cp0_config_read(void);
    150 #endif
    151 
    152 #if (MIPS3 + MIPS4 + MIPS32 + MIPS32R2 + MIPS64 + MIPS64R2 + MIPS64_RMIXL + MIPS64R2_RMIXL) > 0
    153 uint32_t mips3_cp0_compare_read(void);
    154 void	mips3_cp0_compare_write(uint32_t);
    155 
    156 uint32_t mips3_cp0_config_read(void);
    157 void	mips3_cp0_config_write(uint32_t);
    158 
    159 #if (MIPS32 + MIPS32R2 + MIPS64 + MIPS64R2 + MIPS64_RMIXL + MIPS64R2_RMIXL) > 0
    160 uint32_t mipsNN_cp0_config1_read(void);
    161 void	mipsNN_cp0_config1_write(uint32_t);
    162 uint32_t mipsNN_cp0_config2_read(void);
    163 void	mipsNN_cp0_config2_write(uint32_t);
    164 uint32_t mipsNN_cp0_config3_read(void);
    165 void	mipsNN_cp0_config3_write(uint32_t);
    166 uint32_t mipsNN_cp0_config4_read(void);
    167 void	mipsNN_cp0_config4_write(uint32_t);
    168 uint32_t mipsNN_cp0_config5_read(void);
    169 void	mipsNN_cp0_config5_write(uint32_t);
    170 uint32_t mipsNN_cp0_config6_read(void);
    171 void	mipsNN_cp0_config6_write(uint32_t);
    172 uint32_t mipsNN_cp0_config7_read(void);
    173 void	mipsNN_cp0_config7_write(uint32_t);
    174 uint64_t mips64_cp0_config7_read(void);
    175 void	mips64_cp0_config7_write(uint32_t);
    176 
    177 uintptr_t mipsNN_cp0_watchlo_read(u_int);
    178 void	mipsNN_cp0_watchlo_write(u_int, uintptr_t);
    179 uint32_t mipsNN_cp0_watchhi_read(u_int);
    180 void	mipsNN_cp0_watchhi_write(u_int, uint32_t);
    181 
    182 #if (MIPS32R2 + MIPS64R2 + MIPS64R2_RMIXL) > 0
    183 void	mipsNN_cp0_hwrena_write(uint32_t);
    184 void	mipsNN_cp0_userlocal_write(void *);
    185 #endif
    186 #endif
    187 
    188 uint32_t mips3_cp0_count_read(void);
    189 void	mips3_cp0_count_write(uint32_t);
    190 
    191 uint32_t mips3_cp0_random_read(void);
    192 
    193 uint32_t mips3_cp0_wired_read(void);
    194 void	mips3_cp0_wired_write(uint32_t);
    195 void	mips3_cp0_pg_mask_write(uint32_t);
    196 
    197 #if defined(__GNUC__) && !defined(__mips_o32)
    198 static inline uint64_t
    199 mips3_ld(const volatile uint64_t *va)
    200 {
    201 	uint64_t rv;
    202 #if defined(__mips_o32)
    203 	uint32_t sr;
    204 
    205 	sr = mips_cp0_status_read();
    206 	mips_cp0_status_write(sr & ~MIPS_SR_INT_IE);
    207 
    208 	__asm volatile(
    209 		".set push		\n\t"
    210 		".set mips3		\n\t"
    211 		".set noreorder		\n\t"
    212 		".set noat		\n\t"
    213 		"ld	%M0,0(%1)	\n\t"
    214 		"dsll32	%L0,%M0,0	\n\t"
    215 		"dsra32	%M0,%M0,0	\n\t"		/* high word */
    216 		"dsra32	%L0,%L0,0	\n\t"		/* low word */
    217 		"ld	%0,0(%1)	\n\t"
    218 		".set pop"
    219 	    : "=d"(rv)
    220 	    : "r"(va));
    221 
    222 	mips_cp0_status_write(sr);
    223 #elif defined(_LP64)
    224 	rv = *va;
    225 #else
    226 	__asm volatile("ld	%0,0(%1)" : "=d"(rv) : "r"(va));
    227 #endif
    228 
    229 	return rv;
    230 }
    231 static inline void
    232 mips3_sd(volatile uint64_t *va, uint64_t v)
    233 {
    234 #if defined(__mips_o32)
    235 	uint32_t sr;
    236 
    237 	sr = mips_cp0_status_read();
    238 	mips_cp0_status_write(sr & ~MIPS_SR_INT_IE);
    239 
    240 	__asm volatile(
    241 		".set push		\n\t"
    242 		".set mips3		\n\t"
    243 		".set noreorder		\n\t"
    244 		".set noat		\n\t"
    245 		"dsll32	%M0,%M0,0	\n\t"
    246 		"dsll32	%L0,%L0,0	\n\t"
    247 		"dsrl32	%L0,%L0,0	\n\t"
    248 		"or	%0,%L0,%M0	\n\t"
    249 		"sd	%0,0(%1)	\n\t"
    250 		".set pop"
    251 	    : "=d"(v) : "0"(v), "r"(va));
    252 
    253 	mips_cp0_status_write(sr);
    254 #elif defined(_LP64)
    255 	*va = v;
    256 #else
    257 	__asm volatile("sd	%0,0(%1)" :: "r"(v), "r"(va));
    258 #endif
    259 }
    260 #else
    261 uint64_t mips3_ld(volatile uint64_t *va);
    262 void	mips3_sd(volatile uint64_t *, uint64_t);
    263 #endif	/* __GNUC__ */
    264 #endif	/* (MIPS3 + MIPS4 + MIPS32 + MIPS32R2 + MIPS64 + MIPS64R2 + MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 */
    265 
    266 #if (MIPS3 + MIPS4 + MIPS64 + MIPS64R2 + MIPS64_RMIXL + MIPS64R2_RMIXL) > 0
    267 static __inline uint32_t	mips3_lw_a64(uint64_t addr) __unused;
    268 static __inline void	mips3_sw_a64(uint64_t addr, uint32_t val) __unused;
    269 
    270 static __inline uint32_t
    271 mips3_lw_a64(uint64_t addr)
    272 {
    273 	uint32_t rv;
    274 #if defined(__mips_o32)
    275 	uint32_t sr;
    276 
    277 	sr = mips_cp0_status_read();
    278 	mips_cp0_status_write((sr & ~MIPS_SR_INT_IE) | MIPS3_SR_KX);
    279 
    280 	__asm volatile (
    281 		".set push		\n\t"
    282 		".set mips3		\n\t"
    283 		".set noreorder		\n\t"
    284 		".set noat		\n\t"
    285 		"dsll32	%M1,%M1,0	\n\t"
    286 		"dsll32	%L1,%L1,0	\n\t"
    287 		"dsrl32	%L1,%L1,0	\n\t"
    288 		"or	%1,%M1,%L1	\n\t"
    289 		"lw	%0, 0(%1)	\n\t"
    290 		".set pop"
    291 	    : "=r"(rv), "=d"(addr)
    292 	    : "1"(addr)
    293 	    );
    294 
    295 	mips_cp0_status_write(sr);
    296 #elif defined(__mips_n32)
    297 	uint32_t sr = mips_cp0_status_read();
    298 	mips_cp0_status_write((sr & ~MIPS_SR_INT_IE) | MIPS3_SR_KX);
    299 	__asm volatile("lw	%0, 0(%1)" : "=r"(rv) : "d"(addr));
    300 	mips_cp0_status_write(sr);
    301 #elif defined(_LP64)
    302 	rv = *(const uint32_t *)addr;
    303 #else
    304 #error unknown ABI
    305 #endif
    306 	return (rv);
    307 }
    308 
    309 static __inline void
    310 mips3_sw_a64(uint64_t addr, uint32_t val)
    311 {
    312 #if defined(__mips_o32)
    313 	uint32_t sr;
    314 
    315 	sr = mips_cp0_status_read();
    316 	mips_cp0_status_write((sr & ~MIPS_SR_INT_IE) | MIPS3_SR_KX);
    317 
    318 	__asm volatile (
    319 		".set push		\n\t"
    320 		".set mips3		\n\t"
    321 		".set noreorder		\n\t"
    322 		".set noat		\n\t"
    323 		"dsll32	%M0,%M0,0	\n\t"
    324 		"dsll32	%L0,%L0,0	\n\t"
    325 		"dsrl32	%L0,%L0,0	\n\t"
    326 		"or	%0,%M0,%L0	\n\t"
    327 		"sw	%1, 0(%0)	\n\t"
    328 		".set pop"
    329 	    : "=d"(addr): "r"(val), "0"(addr)
    330 	    );
    331 
    332 	mips_cp0_status_write(sr);
    333 #elif defined(__mips_n32)
    334 	uint32_t sr = mips_cp0_status_read();
    335 	mips_cp0_status_write((sr & ~MIPS_SR_INT_IE) | MIPS3_SR_KX);
    336 	__asm volatile("sw	%1, 0(%0)" :: "d"(addr), "r"(val));
    337 	mips_cp0_status_write(sr);
    338 #elif defined(_LP64)
    339 	*(uint32_t *)addr = val;
    340 #else
    341 #error unknown ABI
    342 #endif
    343 }
    344 #endif	/* (MIPS3 + MIPS4 + MIPS64 + MIPS64R2 + MIPS64_RMIXL + MIPS64R2_RMIXL) > 0 */
    345 
    346 /*
    347  * A vector with an entry for each mips-ISA-level dependent
    348  * locore function, and macros which jump through it.
    349  */
    350 typedef struct  {
    351 	void	(*ljv_cpu_switch_resume)(struct lwp *);
    352 	intptr_t ljv_lwp_trampoline;
    353 	intptr_t ljv_setfunc_trampoline;
    354 	void	(*ljv_wbflush)(void);
    355 	void	(*ljv_tlb_set_asid)(uint32_t pid);
    356 	void	(*ljv_tlb_invalidate_asids)(uint32_t, uint32_t);
    357 	void	(*ljv_tlb_invalidate_addr)(vaddr_t);
    358 	void	(*ljv_tlb_invalidate_globals)(void);
    359 	void	(*ljv_tlb_invalidate_all)(void);
    360 	u_int	(*ljv_tlb_record_asids)(u_long *, uint32_t);
    361 	int	(*ljv_tlb_update)(vaddr_t, uint32_t);
    362 	void	(*ljv_tlb_enter)(size_t, vaddr_t, uint32_t);
    363 	void	(*ljv_tlb_read_indexed)(size_t, struct tlbmask *);
    364 	void	(*ljv_tlb_write_indexed)(size_t, const struct tlbmask *);
    365 	lwp_t *	(*ljv_cpu_switchto)(lwp_t *, lwp_t *, bool);
    366 } mips_locore_jumpvec_t;
    367 
    368 typedef struct {
    369 	u_int	(*lav_atomic_cas_uint)(volatile u_int *, u_int, u_int);
    370 	u_long	(*lav_atomic_cas_ulong)(volatile u_long *, u_long, u_long);
    371 	int	(*lav_ucas_uint)(volatile u_int *, u_int, u_int, u_int *);
    372 	int	(*lav_ucas_ulong)(volatile u_long *, u_long, u_long, u_long *);
    373 	void	(*lav_mutex_enter)(kmutex_t *);
    374 	void	(*lav_mutex_exit)(kmutex_t *);
    375 	void	(*lav_mutex_spin_enter)(kmutex_t *);
    376 	void	(*lav_mutex_spin_exit)(kmutex_t *);
    377 } mips_locore_atomicvec_t;
    378 
    379 void	mips_set_wbflush(void (*)(void));
    380 void	mips_wait_idle(void);
    381 
    382 void	stacktrace(void);
    383 void	logstacktrace(void);
    384 
    385 struct cpu_info;
    386 struct splsw;
    387 
    388 struct locoresw {
    389 	void		(*lsw_wbflush)(void);
    390 	void		(*lsw_cpu_idle)(void);
    391 	int		(*lsw_send_ipi)(struct cpu_info *, int);
    392 	void		(*lsw_cpu_offline_md)(void);
    393 	void		(*lsw_cpu_init)(struct cpu_info *);
    394 	void		(*lsw_cpu_run)(struct cpu_info *);
    395 	int		(*lsw_bus_error)(unsigned int);
    396 };
    397 
    398 struct mips_vmfreelist {
    399 	paddr_t fl_start;
    400 	paddr_t fl_end;
    401 	int fl_freelist;
    402 };
    403 
    404 /*
    405  * The "active" locore-function vector, and
    406  */
    407 extern const mips_locore_atomicvec_t mips_llsc_locore_atomicvec;
    408 extern const mips_locore_atomicvec_t mips_ras_locore_atomicvec;
    409 
    410 extern mips_locore_atomicvec_t mips_locore_atomicvec;
    411 extern mips_locore_jumpvec_t mips_locore_jumpvec;
    412 extern struct locoresw mips_locoresw;
    413 
    414 struct splsw;
    415 struct mips_vmfreelist;
    416 struct phys_ram_seg;
    417 
    418 void	mips_vector_init(const struct splsw *, bool);
    419 void	mips_init_msgbuf(void);
    420 void	mips_init_lwp0_uarea(void);
    421 void	mips_page_physload(vaddr_t, vaddr_t,
    422 	    const struct phys_ram_seg *, size_t,
    423 	    const struct mips_vmfreelist *, size_t);
    424 
    425 paddr_t	kvtophys(vaddr_t);
    426 
    427 extern struct phys_ram_seg mem_clusters[];
    428 extern u_int mem_cluster_cnt;
    429 
    430 /*
    431  * Helper routines for kernel coredumps.
    432  */
    433 vaddr_t	mips_kcore_window_map(paddr_t, vsize_t *);
    434 #ifndef _LP64
    435 bool	mips_kcore_window_vtophys(vaddr_t, paddr_t *);
    436 #endif
    437 
    438 
    439 /*
    440  * CPU identification, from PRID register.
    441  */
    442 #define MIPS_PRID_REV(x)	(((x) >>  0) & 0x00ff)
    443 #define MIPS_PRID_IMPL(x)	(((x) >>  8) & 0x00ff)
    444 
    445 /* pre-MIPS32/64 */
    446 #define MIPS_PRID_RSVD(x)	(((x) >> 16) & 0xffff)
    447 #define MIPS_PRID_REV_MIN(x)	((MIPS_PRID_REV(x) >> 0) & 0x0f)
    448 #define MIPS_PRID_REV_MAJ(x)	((MIPS_PRID_REV(x) >> 4) & 0x0f)
    449 
    450 /* MIPS32/64 */
    451 #define MIPS_PRID_CID(x)	(((x) >> 16) & 0x00ff)	/* Company ID */
    452 #define     MIPS_PRID_CID_PREHISTORIC	0x00	/* Not MIPS32/64 */
    453 #define     MIPS_PRID_CID_MTI		0x01	/* MIPS Technologies, Inc. */
    454 #define     MIPS_PRID_CID_BROADCOM	0x02	/* Broadcom */
    455 #define     MIPS_PRID_CID_ALCHEMY	0x03	/* Alchemy Semiconductor */
    456 #define     MIPS_PRID_CID_SIBYTE	0x04	/* SiByte */
    457 #define     MIPS_PRID_CID_SANDCRAFT	0x05	/* SandCraft */
    458 #define     MIPS_PRID_CID_PHILIPS	0x06	/* Philips */
    459 #define     MIPS_PRID_CID_TOSHIBA	0x07	/* Toshiba */
    460 #define     MIPS_PRID_CID_LSI		0x08	/* LSI */
    461 				/*	0x09	unannounced */
    462 				/*	0x0a	unannounced */
    463 #define     MIPS_PRID_CID_LEXRA		0x0b	/* Lexra */
    464 #define     MIPS_PRID_CID_RMI		0x0c	/* RMI / NetLogic */
    465 #define MIPS_PRID_COPTS(x)	(((x) >> 24) & 0x00ff)	/* Company Options */
    466 
    467 #ifdef _KERNEL
    468 /*
    469  * Global variables used to communicate CPU type, and parameters
    470  * such as cache size, from locore to higher-level code (e.g., pmap).
    471  */
    472 void mips_pagecopy(void *dst, void *src);
    473 void mips_pagezero(void *dst);
    474 
    475 #ifdef __HAVE_MIPS_MACHDEP_CACHE_CONFIG
    476 void mips_machdep_cache_config(void);
    477 #endif
    478 
    479 /*
    480  * trapframe argument passed to trap()
    481  */
    482 
    483 #if 0
    484 #define TF_AST		0		/* really zero */
    485 #define TF_V0		_R_V0
    486 #define TF_V1		_R_V1
    487 #define TF_A0		_R_A0
    488 #define TF_A1		_R_A1
    489 #define TF_A2		_R_A2
    490 #define TF_A3		_R_A3
    491 #define TF_T0		_R_T0
    492 #define TF_T1		_R_T1
    493 #define TF_T2		_R_T2
    494 #define TF_T3		_R_T3
    495 
    496 #if defined(__mips_n32) || defined(__mips_n64)
    497 #define TF_A4		_R_A4
    498 #define TF_A5		_R_A5
    499 #define TF_A6		_R_A6
    500 #define TF_A7		_R_A7
    501 #else
    502 #define TF_T4		_R_T4
    503 #define TF_T5		_R_T5
    504 #define TF_T6		_R_T6
    505 #define TF_T7		_R_T7
    506 #endif /* __mips_n32 || __mips_n64 */
    507 
    508 #define TF_TA0		_R_TA0
    509 #define TF_TA1		_R_TA1
    510 #define TF_TA2		_R_TA2
    511 #define TF_TA3		_R_TA3
    512 
    513 #define TF_T8		_R_T8
    514 #define TF_T9		_R_T9
    515 
    516 #define TF_RA		_R_RA
    517 #define TF_SR		_R_SR
    518 #define TF_MULLO	_R_MULLO
    519 #define TF_MULHI	_R_MULLO
    520 #define TF_EPC		_R_PC		/* may be changed by trap() call */
    521 
    522 #define	TF_NREGS	(sizeof(struct reg) / sizeof(mips_reg_t))
    523 #endif
    524 
    525 struct trapframe {
    526 	struct reg tf_registers;
    527 #define	tf_regs	tf_registers.r_regs
    528 	uint32_t   tf_ppl;		/* previous priority level */
    529 	mips_reg_t tf_pad;		/* for 8 byte aligned */
    530 };
    531 
    532 CTASSERT(sizeof(struct trapframe) % (4*sizeof(mips_reg_t)) == 0);
    533 
    534 /*
    535  * Stack frame for kernel traps. four args passed in registers.
    536  * A trapframe is pointed to by the 5th arg, and a dummy sixth argument
    537  * is used to avoid alignment problems
    538  */
    539 
    540 struct kernframe {
    541 #if defined(__mips_o32) || defined(__mips_o64)
    542 	register_t cf_args[4 + 1];
    543 #if defined(__mips_o32)
    544 	register_t cf_pad;		/* (for 8 byte alignment) */
    545 #endif
    546 #endif
    547 #if defined(__mips_n32) || defined(__mips_n64)
    548 	register_t cf_pad[2];		/* for 16 byte alignment */
    549 #endif
    550 	register_t cf_sp;
    551 	register_t cf_ra;
    552 	struct trapframe cf_frame;
    553 };
    554 
    555 CTASSERT(sizeof(struct kernframe) % (2*sizeof(mips_reg_t)) == 0);
    556 
    557 /*
    558  * PRocessor IDentity TABle
    559  */
    560 
    561 struct pridtab {
    562 	int	cpu_cid;
    563 	int	cpu_pid;
    564 	int	cpu_rev;	/* -1 == wildcard */
    565 	int	cpu_copts;	/* -1 == wildcard */
    566 	int	cpu_isa;	/* -1 == probed (mips32/mips64) */
    567 	int	cpu_ntlb;	/* -1 == unknown, 0 == probed */
    568 	int	cpu_flags;
    569 	u_int	cpu_cp0flags;	/* presence of some cp0 regs */
    570 	u_int	cpu_cidflags;	/* company-specific flags */
    571 	const char	*cpu_name;
    572 };
    573 
    574 /*
    575  * bitfield defines for cpu_cp0flags
    576  */
    577 #define  MIPS_CP0FL_USE		__BIT(0)	/* use these flags */
    578 #define  MIPS_CP0FL_ECC		__BIT(1)
    579 #define  MIPS_CP0FL_CACHE_ERR	__BIT(2)
    580 #define  MIPS_CP0FL_EIRR	__BIT(3)
    581 #define  MIPS_CP0FL_EIMR	__BIT(4)
    582 #define  MIPS_CP0FL_EBASE	__BIT(5)
    583 #define  MIPS_CP0FL_CONFIG	__BIT(6)
    584 #define  MIPS_CP0FL_CONFIG1	__BIT(7)
    585 #define  MIPS_CP0FL_CONFIG2	__BIT(8)
    586 #define  MIPS_CP0FL_CONFIG3	__BIT(9)
    587 #define  MIPS_CP0FL_CONFIG4	__BIT(10)
    588 #define  MIPS_CP0FL_CONFIG5	__BIT(11)
    589 #define  MIPS_CP0FL_CONFIG6	__BIT(12)
    590 #define  MIPS_CP0FL_CONFIG7	__BIT(13)
    591 #define  MIPS_CP0FL_USERLOCAL	__BIT(14)
    592 #define  MIPS_CP0FL_HWRENA	__BIT(15)
    593 
    594 /*
    595  * cpu_cidflags defines, by company
    596  */
    597 /*
    598  * RMI company-specific cpu_cidflags
    599  */
    600 #define MIPS_CIDFL_RMI_TYPE		__BITS(2,0)
    601 # define  CIDFL_RMI_TYPE_XLR		0
    602 # define  CIDFL_RMI_TYPE_XLS		1
    603 # define  CIDFL_RMI_TYPE_XLP		2
    604 #define MIPS_CIDFL_RMI_THREADS_MASK	__BITS(6,3)
    605 #define MIPS_CIDFL_RMI_CORES_MASK	__BITS(10,7)
    606 # define LOG2_1	0
    607 # define LOG2_2	1
    608 # define LOG2_4	2
    609 # define LOG2_8	3
    610 # define MIPS_CIDFL_RMI_CPUS(ncores, nthreads)				\
    611 		(__SHIFTIN(LOG2_ ## ncores, MIPS_CIDFL_RMI_CORES_MASK)	\
    612 		|__SHIFTIN(LOG2_ ## nthreads, MIPS_CIDFL_RMI_THREADS_MASK))
    613 # define MIPS_CIDFL_RMI_NTHREADS(cidfl)					\
    614 		(1 << __SHIFTOUT((cidfl), MIPS_CIDFL_RMI_THREADS_MASK))
    615 # define MIPS_CIDFL_RMI_NCORES(cidfl)					\
    616 		(1 << __SHIFTOUT((cidfl), MIPS_CIDFL_RMI_CORES_MASK))
    617 #define MIPS_CIDFL_RMI_L2SZ_MASK	__BITS(14,11)
    618 # define RMI_L2SZ_256KB	 0
    619 # define RMI_L2SZ_512KB  1
    620 # define RMI_L2SZ_1MB    2
    621 # define RMI_L2SZ_2MB    3
    622 # define RMI_L2SZ_4MB    4
    623 # define MIPS_CIDFL_RMI_L2(l2sz)					\
    624 		__SHIFTIN(RMI_L2SZ_ ## l2sz, MIPS_CIDFL_RMI_L2SZ_MASK)
    625 # define MIPS_CIDFL_RMI_L2SZ(cidfl)					\
    626 		((256*1024) << __SHIFTOUT((cidfl), MIPS_CIDFL_RMI_L2SZ_MASK))
    627 #define MIPS_CIDFL_RMI_L3SZ_MASK	__BITS(18,15)
    628 # define RMI_L3SZ_256KB	 0
    629 # define RMI_L3SZ_512KB  1
    630 # define RMI_L3SZ_1MB    2
    631 # define RMI_L3SZ_2MB    3
    632 # define RMI_L3SZ_4MB    4
    633 # define MIPS_CIDFL_RMI_L3(l3sz)					\
    634 		__SHIFTIN(RMI_L3SZ_ ## l3sz, MIPS_CIDFL_RMI_L3SZ_MASK)
    635 # define MIPS_CIDFL_RMI_L3SZ(cidfl)					\
    636 		((256*1024) << __SHIFTOUT((cidfl), MIPS_CIDFL_RMI_L3SZ_MASK))
    637 
    638 #endif	/* _KERNEL */
    639 #endif	/* _MIPS_LOCORE_H */
    640