exception_vector.S revision 1.27
1/* $NetBSD: exception_vector.S,v 1.27 2007/03/17 17:23:31 uwe Exp $ */ 2 3/*- 4 * Copyright (c) 2002 The NetBSD Foundation, Inc. 5 * All rights reserved. 6 * 7 * Redistribution and use in source and binary forms, with or without 8 * modification, are permitted provided that the following conditions 9 * are met: 10 * 1. Redistributions of source code must retain the above copyright 11 * notice, this list of conditions and the following disclaimer. 12 * 2. Redistributions in binary form must reproduce the above copyright 13 * notice, this list of conditions and the following disclaimer in the 14 * documentation and/or other materials provided with the distribution. 15 * 3. All advertising materials mentioning features or use of this software 16 * must display the following acknowledgement: 17 * This product includes software developed by the NetBSD 18 * Foundation, Inc. and its contributors. 19 * 4. Neither the name of The NetBSD Foundation nor the names of its 20 * contributors may be used to endorse or promote products derived 21 * from this software without specific prior written permission. 22 * 23 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 24 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 25 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 26 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 27 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 28 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 29 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 30 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 31 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 32 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 33 * POSSIBILITY OF SUCH DAMAGE. 34 */ 35 36#include "opt_cputype.h" 37#include "opt_ddb.h" 38#include "assym.h" 39 40#include <sh3/param.h> 41#include <sh3/locore.h> 42#include <sh3/exception.h> 43#include <sh3/ubcreg.h> 44#include <sh3/mmu_sh3.h> 45#include <sh3/mmu_sh4.h> 46 47/* 48 * Align vectors more strictly here (where we don't really care) so 49 * that .align 5 (i.e. 32B cache line) before data block does the 50 * right thing w.r.t. final destinations after vectors are copied. 51 */ 52#define _ALIGN_TEXT .align 5 53#include <sh3/asm.h> 54 55__KERNEL_RCSID(0, "$NetBSD: exception_vector.S,v 1.27 2007/03/17 17:23:31 uwe Exp $") 56 57 58/* 59 * Exception vectors. 60 * The following routines are copied to vector addresses. 61 * sh_vector_generic: VBR + 0x100 62 * sh_vector_tlbmiss: VBR + 0x400 63 * sh_vector_interrupt: VBR + 0x600 64 */ 65 66#define VECTOR_END_MARKER(sym) \ 67 .globl _C_LABEL(sym); \ 68 _C_LABEL(sym): 69 70 71/* 72 * LINTSTUB: Var: char sh_vector_generic[1]; 73 * 74 * void sh_vector_generic(void); 75 * Copied to VBR+0x100. This code should be position independent 76 * and maximum 786 bytes long (== 0x400 - 0x100). 77 */ 78NENTRY(sh_vector_generic) 79 __EXCEPTION_ENTRY 80 __INTR_MASK(r0, r1) 81 /* Identify exception cause */ 82 MOV (EXPEVT, r0) 83 mov.l @r0, r0 84 mov.l r0, @(TF_EXPEVT, r14) /* tf->tf_expevt = EXPEVT */ 85 /* Get curlwp */ 86 mov.l .Lg_curlwp, r1 87 mov.l @r1, r4 /* 1st arg */ 88 /* Get TEA */ 89 MOV (TEA, r1) 90 mov.l @r1, r6 /* 3rd arg */ 91 /* Check TLB exception or not */ 92 mov.l .Lg_TLB_PROT_ST, r1 93 cmp/hi r1, r0 94 bt 1f 95 96 /* tlb_exception(curlwp, tf, TEA); */ 97 __EXCEPTION_UNBLOCK(r0, r1) 98 mov.l .Lg_tlb_exception, r0 99 jsr @r0 100 mov r14, r5 /* 2nd arg */ 101 bra 2f 102 nop 103 104 /* general_exception(curlwp, tf, TEA); */ 1051: mov r4, r8 106#ifdef DDB 107 mov #0, r2 108 MOV (BBRA, r1) 109 mov.w r2, @r1 /* disable UBC */ 110 mov.l r2, @(TF_UBC, r14) /* clear tf->tf_ubc */ 111#endif /* DDB */ 112 __EXCEPTION_UNBLOCK(r0, r1) 113 mov.l .Lg_general_exception, r0 114 jsr @r0 115 mov r14, r5 /* 2nd arg */ 116 117 /* Check for ASTs on exit to user mode. */ 118 mov r8, r4 119 mov.l .Lg_ast, r0 120 jsr @r0 121 mov r14, r5 122#ifdef DDB /* BBRA = tf->tf_ubc */ 123 __EXCEPTION_BLOCK(r0, r1) 124 mov.l @(TF_UBC, r14), r0 125 MOV (BBRA, r1) 126 mov.w r0, @r1 127#endif /* DDB */ 1282: __EXCEPTION_RETURN 129 130 .align 5 131.Lg_curlwp: .long _C_LABEL(curlwp) 132REG_SYMBOL(EXPEVT) 133REG_SYMBOL(BBRA) 134REG_SYMBOL(TEA) 135.Lg_tlb_exception: .long _C_LABEL(tlb_exception) 136.Lg_general_exception: .long _C_LABEL(general_exception) 137.Lg_ast: .long _C_LABEL(ast) 138.Lg_TLB_PROT_ST: .long EXPEVT_TLB_PROT_ST 139 140/* LINTSTUB: Var: char sh_vector_generic_end[1]; */ 141VECTOR_END_MARKER(sh_vector_generic_end) 142 SET_ENTRY_SIZE(sh_vector_generic) 143 144 145#ifdef SH3 146/* 147 * LINTSTUB: Var: char sh3_vector_tlbmiss[1]; 148 * 149 * void sh3_vector_tlbmiss(void); 150 * Copied to VBR+0x400. This code should be position independent 151 * and maximum 512 bytes long (== 0x600 - 0x400). 152 */ 153NENTRY(sh3_vector_tlbmiss) 154 __EXCEPTION_ENTRY 155 mov #(SH3_TEA & 0xff), r0 156 mov.l @r0, r6 ! 3rd arg: va = TEA 157 158 !! if kernel stack is in P3, handle it here fast 159#if !defined(P1_STACK) 160 mov.l .L3_VPN_MASK, r0 161 and r6, r0 ! vpn 162 tst r0, r0 163 bt 6f ! punt if vpn is 0 164 165 mov.l .L3_curupte, r1 166 mov.l @r1, r1 ! upte = &l->l_md.md_upte[0] 167 mov #UPAGES, r3 ! loop limit 168 169 !! for each page of u-area 1704: mov.l @r1+, r7 ! upte->addr: u-area VPN 171 cmp/eq r7, r0 ! if (vpn == upte->addr) 172 bt/s 5f ! goto found; 173 dt r3 174 bf/s 4b 175 add #4, r1 ! skip upte->data; point to next md_upte[i] 176 177 !! not a page of u-area, proceed to handler 178 bra 7f ! pull insn at 6f into delay slot 179 mov #(SH3_EXPEVT & 0xff), r0 180 181 !! load entry for this uarea page into tlb 1825: mov #(SH3_PTEH & 0xff), r2 183 mov.l @r1, r1 ! md_upte[i]->data 184 mov.l @r2, r3 ! save ASID 185 mov.l r0, @r2 ! SH3_PTEH = { VPN, ASID = 0 } 186 mov.l r1, @(4, r2) ! SH3_PTEL = md_upte[i]->data 187 188 ldtlb 189 190 bra 99f ! return 191 mov.l r3, @r2 ! restore ASID 192#endif /* !P1_STACK */ 193 194 !! tlb_exception(curlwp, trapframe, tea) 1956: mov #(SH3_EXPEVT & 0xff), r0 1967: mov.l @r0, r0 197 mov.l r0, @(TF_EXPEVT, r14) ! tf->tf_expevt = EXPEVT 198 mov.l .L3_curlwp, r0 199 mov.l @r0, r4 ! 1st arg: curlwp 200 __INTR_MASK(r0, r1) 201 __EXCEPTION_UNBLOCK(r0, r1) 202 mov.l .L3_tlb_exception, r0 203 jsr @r0 204 mov r14, r5 ! 2nd arg: trap frame 20599: __EXCEPTION_RETURN 206 207 .align 5 208.L3_curlwp: .long _C_LABEL(curlwp) 209.L3_tlb_exception: .long _C_LABEL(tlb_exception) 210.L3_VPN_MASK: .long 0xfffff000 211.L3_curupte: .long _C_LABEL(curupte) 212 213/* LINTSTUB: Var: char sh3_vector_tlbmiss_end[1]; */ 214VECTOR_END_MARKER(sh3_vector_tlbmiss_end) 215 SET_ENTRY_SIZE(sh3_vector_tlbmiss) 216#endif /* SH3 */ 217 218 219#ifdef SH4 220/* 221 * LINTSTUB: Var: char sh4_vector_tlbmiss[1]; 222 * 223 * void sh4_vector_tlbmiss(void); 224 * Copied to VBR+0x400. This code should be position independent 225 * and maximum 512 bytes long (== 0x600 - 0x400). 226 */ 227NENTRY(sh4_vector_tlbmiss) 228 __EXCEPTION_ENTRY 229 mov.l .L4_TEA4, r0 230 mov.l @r0, r6 231 mov.l .L4_EXPEVT4, r0 232 mov.l @r0, r0 233 mov.l r0, @(TF_EXPEVT, r14) /* tf->tf_expevt = EXPEVT */ 234 mov.l .L4_curlwp, r0 235 mov.l @r0, r4 /* 1st arg */ 236 __INTR_MASK(r0, r1) 237 __EXCEPTION_UNBLOCK(r0, r1) 238 mov.l .L4_tlb_exception, r0 239 jsr @r0 240 mov r14, r5 /* 2nd arg */ 241 __EXCEPTION_RETURN 242 243 .align 5 244.L4_tlb_exception: .long _C_LABEL(tlb_exception) 245.L4_curlwp: .long _C_LABEL(curlwp) 246.L4_EXPEVT4: .long SH4_EXPEVT 247.L4_TEA4: .long SH4_TEA 248 249/* LINTSTUB: Var: char sh4_vector_tlbmiss_end[1]; */ 250VECTOR_END_MARKER(sh4_vector_tlbmiss_end) 251 SET_ENTRY_SIZE(sh4_vector_tlbmiss) 252#endif /* SH4 */ 253 254 255/* 256 * LINTSTUB: Var: char sh_vector_interrupt[1]; 257 * 258 * void sh_vector_interrupt(void); 259 * Copied to VBR+0x600. This code should be position independent. 260 */ 261NENTRY(sh_vector_interrupt) 262 __EXCEPTION_ENTRY 263 xor r0, r0 264 mov.l r0, @(TF_EXPEVT, r14) /* (for debug) */ 265 stc r0_bank, r6 /* ssp */ 266 /* Enable exceptions for P3 access */ 267 __INTR_MASK(r0, r1) 268 __EXCEPTION_UNBLOCK(r0, r1) 269 /* ++uvmexp.intrs */ 270 mov.l .Li_uvmexp_intrs, r0 271 mov.l @r0, r1 272 add #1 r1 273 mov.l r1, @r0 274 /* Dispatch interrupt handler */ 275 mov.l .Li_intc_intr, r0 276 jsr @r0 /* intc_intr(ssr, spc, ssp) */ 277 nop 278 /* Check for ASTs on exit to user mode. */ 279 mov.l .Li_curlwp, r0 280 mov.l @r0, r4 /* 1st arg */ 281 mov.l .Li_ast, r0 282 jsr @r0 283 mov r14, r5 /* 2nd arg */ 284 __EXCEPTION_RETURN 285 286 .align 5 287.Li_curlwp: .long _C_LABEL(curlwp) 288.Li_intc_intr: .long _C_LABEL(intc_intr) 289.Li_ast: .long _C_LABEL(ast) 290.Li_uvmexp_intrs: .long _C_LABEL(uvmexp) + UVMEXP_INTRS 291 292/* LINTSTUB: Var: char sh_vector_interrupt_end[1]; */ 293VECTOR_END_MARKER(sh_vector_interrupt_end) 294 SET_ENTRY_SIZE(sh_vector_interrupt) 295