exception_vector.S revision 1.24
1/* $NetBSD: exception_vector.S,v 1.24 2007/03/15 23:13:59 uwe Exp $ */ 2 3/*- 4 * Copyright (c) 2002 The NetBSD Foundation, Inc. 5 * All rights reserved. 6 * 7 * Redistribution and use in source and binary forms, with or without 8 * modification, are permitted provided that the following conditions 9 * are met: 10 * 1. Redistributions of source code must retain the above copyright 11 * notice, this list of conditions and the following disclaimer. 12 * 2. Redistributions in binary form must reproduce the above copyright 13 * notice, this list of conditions and the following disclaimer in the 14 * documentation and/or other materials provided with the distribution. 15 * 3. All advertising materials mentioning features or use of this software 16 * must display the following acknowledgement: 17 * This product includes software developed by the NetBSD 18 * Foundation, Inc. and its contributors. 19 * 4. Neither the name of The NetBSD Foundation nor the names of its 20 * contributors may be used to endorse or promote products derived 21 * from this software without specific prior written permission. 22 * 23 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 24 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 25 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 26 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 27 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 28 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 29 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 30 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 31 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 32 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 33 * POSSIBILITY OF SUCH DAMAGE. 34 */ 35 36#include "opt_cputype.h" 37#include "opt_ddb.h" 38#include "assym.h" 39 40#include <sh3/param.h> 41#include <sh3/locore.h> 42#include <sh3/exception.h> 43#include <sh3/ubcreg.h> 44#include <sh3/mmu_sh3.h> 45#include <sh3/mmu_sh4.h> 46 47/* 48 * Align vectors more strictly here (where we don't really care) so 49 * that .align 5 (i.e. 32B cache line) before data block does the 50 * right thing w.r.t. final destinations after vectors are copied. 51 */ 52#define _ALIGN_TEXT .align 5 53#include <sh3/asm.h> 54 55__KERNEL_RCSID(0, "$NetBSD: exception_vector.S,v 1.24 2007/03/15 23:13:59 uwe Exp $") 56 57 58/* 59 * Exception vectors. 60 * The following routines are copied to vector addresses. 61 * sh_vector_generic: VBR + 0x100 62 * sh_vector_tlbmiss: VBR + 0x400 63 * sh_vector_interrupt: VBR + 0x600 64 */ 65 66#define VECTOR_END_MARKER(sym) \ 67 .globl _C_LABEL(sym); \ 68 _C_LABEL(sym): 69 70 71/* 72 * LINTSTUB: Var: char sh_vector_generic[1]; 73 * 74 * void sh_vector_generic(void); 75 * Copied to VBR+0x100. This code should be position independent 76 * and maximum 786 bytes long (== 0x400 - 0x100). 77 */ 78NENTRY(sh_vector_generic) 79 __EXCEPTION_ENTRY 80 __INTR_MASK(r0, r1) 81 /* Identify exception cause */ 82 MOV (EXPEVT, r0) 83 mov.l @r0, r0 84 mov.l r0, @(TF_EXPEVT, r14) /* tf->tf_expevt = EXPEVT */ 85 /* Get curlwp */ 86 mov.l .Lg_curlwp, r1 87 mov.l @r1, r4 /* 1st arg */ 88 /* Get TEA */ 89 MOV (TEA, r1) 90 mov.l @r1, r6 /* 3rd arg */ 91 /* Check TLB exception or not */ 92 mov.l .Lg_TLB_PROT_ST, r1 93 cmp/hi r1, r0 94 bt 1f 95 96 /* tlb_exception(curlwp, tf, TEA); */ 97 __EXCEPTION_UNBLOCK(r0, r1) 98 mov.l .Lg_tlb_exception, r0 99 jsr @r0 100 mov r14, r5 /* 2nd arg */ 101 bra 2f 102 nop 103 104 /* general_exception(curlwp, tf, TEA); */ 1051: mov r4, r8 106#ifdef DDB 107 mov #0, r2 108 MOV (BBRA, r1) 109 mov.w r2, @r1 /* disable UBC */ 110 mov.l r2, @(TF_UBC, r14) /* clear tf->tf_ubc */ 111#endif /* DDB */ 112 __EXCEPTION_UNBLOCK(r0, r1) 113 mov.l .Lg_general_exception, r0 114 jsr @r0 115 mov r14, r5 /* 2nd arg */ 116 117 /* Check for ASTs on exit to user mode. */ 118 mov r8, r4 119 mov.l .Lg_ast, r0 120 jsr @r0 121 mov r14, r5 122#ifdef DDB /* BBRA = tf->tf_ubc */ 123 __EXCEPTION_BLOCK(r0, r1) 124 mov.l @(TF_UBC, r14), r0 125 MOV (BBRA, r1) 126 mov.w r0, @r1 127#endif /* DDB */ 1282: __EXCEPTION_RETURN 129 130 .align 5 131.Lg_curlwp: .long _C_LABEL(curlwp) 132REG_SYMBOL(EXPEVT) 133REG_SYMBOL(BBRA) 134REG_SYMBOL(TEA) 135.Lg_tlb_exception: .long _C_LABEL(tlb_exception) 136.Lg_general_exception: .long _C_LABEL(general_exception) 137.Lg_ast: .long _C_LABEL(ast) 138.Lg_TLB_PROT_ST: .long EXPEVT_TLB_PROT_ST 139 140/* LINTSTUB: Var: char sh_vector_generic_end[1]; */ 141VECTOR_END_MARKER(sh_vector_generic_end) 142 SET_ENTRY_SIZE(sh_vector_generic) 143 144 145#ifdef SH3 146/* 147 * LINTSTUB: Var: char sh3_vector_tlbmiss[1]; 148 * 149 * void sh3_vector_tlbmiss(void); 150 * Copied to VBR+0x400. This code should be position independent 151 * and maximum 512 bytes long (== 0x600 - 0x400). 152 */ 153NENTRY(sh3_vector_tlbmiss) 154 __EXCEPTION_ENTRY 155 mov #(SH3_TEA & 0xff), r0 156 mov.l @r0, r6 ! 3rd arg: va = TEA 157 158 !! if kernel stack is in P3, handle it here fast 159#if !defined(P1_STACK) 160 mov.l .L3_VPN_MASK, r0 161 and r6, r0 ! vpn 162 tst r0, r0 163 bt 6f ! punt if vpn is 0 164 165 mov.l .L3_CURUPTE, r1 166 mov.l @r1, r1 ! upte = &l->l_md.md_upte[0] 167 mov #UPAGES, r3 ! loop limit 168 mov #1, r2 ! loop count 169 170 !! for each page of u-area 1714: mov.l @r1+, r7 ! upte->addr: u-area VPN 172 cmp/eq r7, r0 ! if (vpn == upte->addr) 173 bt 5f ! goto found; 174 add #4, r1 ! skip, upte->data; point to next md_upte[i] 175 cmp/eq r2, r3 176 bf/s 4b 177 add #1, r2 178 179 !! not a page of u-area, proceed to handler 180 bra 7f ! pull insn at 6f into delay slot 181 mov #(SH3_EXPEVT & 0xff), r0 182 183 !! load entry for this uarea page into tlb 1845: mov.l @r1, r2 ! upte->data: u-area PTE 185 mov #(SH3_PTEL & 0xff), r1 186 mov.l r2, @r1 187 188 mov #(SH3_PTEH & 0xff), r1 189 mov.l @r1, r2 190 mov.l .L3_VPN_MASK, r0 191 and r2, r0 192 mov.l r0, @r1 ! ASID 0 193 194 ldtlb 195 196 bra 99f ! return 197 mov.l r2, @r1 ! restore ASID 198#endif /* !P1_STACK */ 199 200 !! tlb_exception(curlwp, trapframe, tea) 2016: mov #(SH3_EXPEVT & 0xff), r0 2027: mov.l @r0, r0 203 mov.l r0, @(TF_EXPEVT, r14) ! tf->tf_expevt = EXPEVT 204 mov.l .L3_curlwp, r0 205 mov.l @r0, r4 ! 1st arg: curlwp 206 __INTR_MASK(r0, r1) 207 __EXCEPTION_UNBLOCK(r0, r1) 208 mov.l .L3_tlb_exception, r0 209 jsr @r0 210 mov r14, r5 ! 2nd arg: trap frame 21199: __EXCEPTION_RETURN 212 213 .align 5 214.L3_curlwp: .long _C_LABEL(curlwp) 215.L3_tlb_exception: .long _C_LABEL(tlb_exception) 216.L3_VPN_MASK: .long 0xfffff000 217.L3_CURUPTE: .long _C_LABEL(curupte) 218 219/* LINTSTUB: Var: char sh3_vector_tlbmiss_end[1]; */ 220VECTOR_END_MARKER(sh3_vector_tlbmiss_end) 221 SET_ENTRY_SIZE(sh3_vector_tlbmiss) 222#endif /* SH3 */ 223 224 225#ifdef SH4 226/* 227 * LINTSTUB: Var: char sh4_vector_tlbmiss[1]; 228 * 229 * void sh4_vector_tlbmiss(void); 230 * Copied to VBR+0x400. This code should be position independent 231 * and maximum 512 bytes long (== 0x600 - 0x400). 232 */ 233NENTRY(sh4_vector_tlbmiss) 234 __EXCEPTION_ENTRY 235 mov.l .L4_TEA4, r0 236 mov.l @r0, r6 237 mov.l .L4_EXPEVT4, r0 238 mov.l @r0, r0 239 mov.l r0, @(TF_EXPEVT, r14) /* tf->tf_expevt = EXPEVT */ 240 mov.l .L4_curlwp, r0 241 mov.l @r0, r4 /* 1st arg */ 242 __INTR_MASK(r0, r1) 243 __EXCEPTION_UNBLOCK(r0, r1) 244 mov.l .L4_tlb_exception, r0 245 jsr @r0 246 mov r14, r5 /* 2nd arg */ 247 __EXCEPTION_RETURN 248 249 .align 5 250.L4_tlb_exception: .long _C_LABEL(tlb_exception) 251.L4_curlwp: .long _C_LABEL(curlwp) 252.L4_EXPEVT4: .long SH4_EXPEVT 253.L4_TEA4: .long SH4_TEA 254 255/* LINTSTUB: Var: char sh4_vector_tlbmiss_end[1]; */ 256VECTOR_END_MARKER(sh4_vector_tlbmiss_end) 257 SET_ENTRY_SIZE(sh4_vector_tlbmiss) 258#endif /* SH4 */ 259 260 261/* 262 * LINTSTUB: Var: char sh_vector_interrupt[1]; 263 * 264 * void sh_vector_interrupt(void); 265 * Copied to VBR+0x600. This code should be position independent. 266 */ 267NENTRY(sh_vector_interrupt) 268 __EXCEPTION_ENTRY 269 xor r0, r0 270 mov.l r0, @(TF_EXPEVT, r14) /* (for debug) */ 271 stc r0_bank, r6 /* ssp */ 272 /* Enable exceptions for P3 access */ 273 __INTR_MASK(r0, r1) 274 __EXCEPTION_UNBLOCK(r0, r1) 275 /* ++uvmexp.intrs */ 276 mov.l .Li_uvmexp_intrs, r0 277 mov.l @r0, r1 278 add #1 r1 279 mov.l r1, @r0 280 /* Dispatch interrupt handler */ 281 mov.l .Li_intc_intr, r0 282 jsr @r0 /* intc_intr(ssr, spc, ssp) */ 283 nop 284 /* Check for ASTs on exit to user mode. */ 285 mov.l .Li_curlwp, r0 286 mov.l @r0, r4 /* 1st arg */ 287 mov.l .Li_ast, r0 288 jsr @r0 289 mov r14, r5 /* 2nd arg */ 290 __EXCEPTION_RETURN 291 292 .align 5 293.Li_curlwp: .long _C_LABEL(curlwp) 294.Li_intc_intr: .long _C_LABEL(intc_intr) 295.Li_ast: .long _C_LABEL(ast) 296.Li_uvmexp_intrs: .long _C_LABEL(uvmexp) + UVMEXP_INTRS 297 298/* LINTSTUB: Var: char sh_vector_interrupt_end[1]; */ 299VECTOR_END_MARKER(sh_vector_interrupt_end) 300 SET_ENTRY_SIZE(sh_vector_interrupt) 301