exception_vector.S revision 1.28
1/* $NetBSD: exception_vector.S,v 1.28 2007/03/18 20:18:36 uwe Exp $ */ 2 3/*- 4 * Copyright (c) 2002 The NetBSD Foundation, Inc. 5 * All rights reserved. 6 * 7 * Redistribution and use in source and binary forms, with or without 8 * modification, are permitted provided that the following conditions 9 * are met: 10 * 1. Redistributions of source code must retain the above copyright 11 * notice, this list of conditions and the following disclaimer. 12 * 2. Redistributions in binary form must reproduce the above copyright 13 * notice, this list of conditions and the following disclaimer in the 14 * documentation and/or other materials provided with the distribution. 15 * 3. All advertising materials mentioning features or use of this software 16 * must display the following acknowledgement: 17 * This product includes software developed by the NetBSD 18 * Foundation, Inc. and its contributors. 19 * 4. Neither the name of The NetBSD Foundation nor the names of its 20 * contributors may be used to endorse or promote products derived 21 * from this software without specific prior written permission. 22 * 23 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 24 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 25 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 26 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 27 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 28 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 29 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 30 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 31 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 32 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 33 * POSSIBILITY OF SUCH DAMAGE. 34 */ 35 36#include "opt_cputype.h" 37#include "opt_ddb.h" 38#include "assym.h" 39 40#include <sh3/param.h> 41#include <sh3/locore.h> 42#include <sh3/exception.h> 43#include <sh3/ubcreg.h> 44#include <sh3/mmu_sh3.h> 45#include <sh3/mmu_sh4.h> 46 47/* 48 * Align vectors more strictly here (where we don't really care) so 49 * that .align 5 (i.e. 32B cache line) before data block does the 50 * right thing w.r.t. final destinations after vectors are copied. 51 */ 52#define _ALIGN_TEXT .align 5 53#include <sh3/asm.h> 54 55__KERNEL_RCSID(0, "$NetBSD: exception_vector.S,v 1.28 2007/03/18 20:18:36 uwe Exp $") 56 57 58/* 59 * Exception vectors. 60 * The following routines are copied to vector addresses. 61 * sh_vector_generic: VBR + 0x100 62 * sh_vector_tlbmiss: VBR + 0x400 63 * sh_vector_interrupt: VBR + 0x600 64 */ 65 66#define VECTOR_END_MARKER(sym) \ 67 .globl _C_LABEL(sym); \ 68 _C_LABEL(sym): 69 70 71/* 72 * LINTSTUB: Var: char sh_vector_generic[1]; 73 * 74 * void sh_vector_generic(void); 75 * Copied to VBR+0x100. This code should be position independent 76 * and maximum 786 bytes long (== 0x400 - 0x100). 77 */ 78NENTRY(sh_vector_generic) 79 __EXCEPTION_ENTRY 80 __INTR_MASK(r0, r1) 81 /* Identify exception cause */ 82 MOV (EXPEVT, r0) 83 mov.l @r0, r0 84 mov.l r0, @(TF_EXPEVT, r14) /* tf->tf_expevt = EXPEVT */ 85 /* Get curlwp */ 86 mov.l .Lg_curlwp, r1 87 mov.l @r1, r4 /* 1st arg */ 88 /* Get TEA */ 89 MOV (TEA, r1) 90 mov.l @r1, r6 /* 3rd arg */ 91 /* Check TLB exception or not */ 92 mov.l .Lg_TLB_PROT_ST, r1 93 cmp/hi r1, r0 94 bt 1f 95 96 /* tlb_exception(curlwp, tf, TEA); */ 97 __EXCEPTION_UNBLOCK(r0, r1) 98 mov.l .Lg_tlb_exception, r0 99 jsr @r0 100 mov r14, r5 /* 2nd arg */ 101 bra 2f 102 nop 103 104 /* general_exception(curlwp, tf, TEA); */ 1051: mov r4, r8 106#ifdef DDB 107 mov #0, r2 108 MOV (BBRA, r1) 109 mov.w r2, @r1 /* disable UBC */ 110 mov.l r2, @(TF_UBC, r14) /* clear tf->tf_ubc */ 111#endif /* DDB */ 112 __EXCEPTION_UNBLOCK(r0, r1) 113 mov.l .Lg_general_exception, r0 114 jsr @r0 115 mov r14, r5 /* 2nd arg */ 116 117 /* Check for ASTs on exit to user mode. */ 118 mov r8, r4 119 mov.l .Lg_ast, r0 120 jsr @r0 121 mov r14, r5 122#ifdef DDB /* BBRA = tf->tf_ubc */ 123 __EXCEPTION_BLOCK(r0, r1) 124 mov.l @(TF_UBC, r14), r0 125 MOV (BBRA, r1) 126 mov.w r0, @r1 127#endif /* DDB */ 1282: __EXCEPTION_RETURN 129 130 .align 5 131.Lg_curlwp: .long _C_LABEL(curlwp) 132REG_SYMBOL(EXPEVT) 133REG_SYMBOL(BBRA) 134REG_SYMBOL(TEA) 135.Lg_tlb_exception: .long _C_LABEL(tlb_exception) 136.Lg_general_exception: .long _C_LABEL(general_exception) 137.Lg_ast: .long _C_LABEL(ast) 138.Lg_TLB_PROT_ST: .long EXPEVT_TLB_PROT_ST 139 140/* LINTSTUB: Var: char sh_vector_generic_end[1]; */ 141VECTOR_END_MARKER(sh_vector_generic_end) 142 SET_ENTRY_SIZE(sh_vector_generic) 143 144 145#ifdef SH3 146/* 147 * LINTSTUB: Var: char sh3_vector_tlbmiss[1]; 148 * 149 * void sh3_vector_tlbmiss(void); 150 * Copied to VBR+0x400. This code should be position independent 151 * and maximum 512 bytes long (== 0x600 - 0x400). 152 */ 153NENTRY(sh3_vector_tlbmiss) 154 __EXCEPTION_ENTRY 155 mov #(SH3_TEA & 0xff), r0 156 mov.l @r0, r6 ! 3rd arg: va = TEA 157 158 !! if kernel stack is in P3, handle it here fast 159#if !defined(P1_STACK) 160 cmp/pz r6 161 bt 6f ! user space address, proceed to handler 162 163 mov.l .L3_VPN_MASK, r0 164 and r6, r0 ! vpn 165 166 mov.l .L3_curupte, r1 167 mov.l @r1, r1 ! upte = &l->l_md.md_upte[0] 168 mov #UPAGES, r3 ! loop limit 169 170 !! for each page of u-area 1714: mov.l @r1+, r7 ! upte->addr: u-area VPN 172 cmp/eq r7, r0 ! if (vpn == upte->addr) 173 bt/s 5f ! goto found; 174 dt r3 175 bf/s 4b 176 add #4, r1 ! skip upte->data; point to next md_upte[i] 177 178 !! not a page of u-area, proceed to handler 179 bra 7f ! pull insn at 6f into delay slot 180 mov #(SH3_EXPEVT & 0xff), r0 181 182 !! load entry for this uarea page into tlb 1835: mov #(SH3_PTEH & 0xff), r2 184 mov.l @r1, r1 ! md_upte[i]->data 185 mov.l @r2, r3 ! save ASID 186 mov.l r0, @r2 ! SH3_PTEH = { VPN, ASID = 0 } 187 mov.l r1, @(4, r2) ! SH3_PTEL = md_upte[i]->data 188 189 ldtlb 190 191 bra 99f ! return 192 mov.l r3, @r2 ! restore ASID 193#endif /* !P1_STACK */ 194 195 !! tlb_exception(curlwp, trapframe, tea) 1966: mov #(SH3_EXPEVT & 0xff), r0 1977: mov.l @r0, r0 198 mov.l r0, @(TF_EXPEVT, r14) ! tf->tf_expevt = EXPEVT 199 mov.l .L3_curlwp, r0 200 mov.l @r0, r4 ! 1st arg: curlwp 201 __INTR_MASK(r0, r1) 202 __EXCEPTION_UNBLOCK(r0, r1) 203 mov.l .L3_tlb_exception, r0 204 jsr @r0 205 mov r14, r5 ! 2nd arg: trap frame 20699: __EXCEPTION_RETURN 207 208 .align 5 209.L3_curlwp: .long _C_LABEL(curlwp) 210.L3_tlb_exception: .long _C_LABEL(tlb_exception) 211.L3_VPN_MASK: .long 0xfffff000 212.L3_curupte: .long _C_LABEL(curupte) 213 214/* LINTSTUB: Var: char sh3_vector_tlbmiss_end[1]; */ 215VECTOR_END_MARKER(sh3_vector_tlbmiss_end) 216 SET_ENTRY_SIZE(sh3_vector_tlbmiss) 217#endif /* SH3 */ 218 219 220#ifdef SH4 221/* 222 * LINTSTUB: Var: char sh4_vector_tlbmiss[1]; 223 * 224 * void sh4_vector_tlbmiss(void); 225 * Copied to VBR+0x400. This code should be position independent 226 * and maximum 512 bytes long (== 0x600 - 0x400). 227 */ 228NENTRY(sh4_vector_tlbmiss) 229 __EXCEPTION_ENTRY 230 mov.l .L4_TEA4, r0 231 mov.l @r0, r6 232 mov.l .L4_EXPEVT4, r0 233 mov.l @r0, r0 234 mov.l r0, @(TF_EXPEVT, r14) /* tf->tf_expevt = EXPEVT */ 235 mov.l .L4_curlwp, r0 236 mov.l @r0, r4 /* 1st arg */ 237 __INTR_MASK(r0, r1) 238 __EXCEPTION_UNBLOCK(r0, r1) 239 mov.l .L4_tlb_exception, r0 240 jsr @r0 241 mov r14, r5 /* 2nd arg */ 242 __EXCEPTION_RETURN 243 244 .align 5 245.L4_tlb_exception: .long _C_LABEL(tlb_exception) 246.L4_curlwp: .long _C_LABEL(curlwp) 247.L4_EXPEVT4: .long SH4_EXPEVT 248.L4_TEA4: .long SH4_TEA 249 250/* LINTSTUB: Var: char sh4_vector_tlbmiss_end[1]; */ 251VECTOR_END_MARKER(sh4_vector_tlbmiss_end) 252 SET_ENTRY_SIZE(sh4_vector_tlbmiss) 253#endif /* SH4 */ 254 255 256/* 257 * LINTSTUB: Var: char sh_vector_interrupt[1]; 258 * 259 * void sh_vector_interrupt(void); 260 * Copied to VBR+0x600. This code should be position independent. 261 */ 262NENTRY(sh_vector_interrupt) 263 __EXCEPTION_ENTRY 264 xor r0, r0 265 mov.l r0, @(TF_EXPEVT, r14) /* (for debug) */ 266 stc r0_bank, r6 /* ssp */ 267 /* Enable exceptions for P3 access */ 268 __INTR_MASK(r0, r1) 269 __EXCEPTION_UNBLOCK(r0, r1) 270 /* ++uvmexp.intrs */ 271 mov.l .Li_uvmexp_intrs, r0 272 mov.l @r0, r1 273 add #1 r1 274 mov.l r1, @r0 275 /* Dispatch interrupt handler */ 276 mov.l .Li_intc_intr, r0 277 jsr @r0 /* intc_intr(ssr, spc, ssp) */ 278 nop 279 /* Check for ASTs on exit to user mode. */ 280 mov.l .Li_curlwp, r0 281 mov.l @r0, r4 /* 1st arg */ 282 mov.l .Li_ast, r0 283 jsr @r0 284 mov r14, r5 /* 2nd arg */ 285 __EXCEPTION_RETURN 286 287 .align 5 288.Li_curlwp: .long _C_LABEL(curlwp) 289.Li_intc_intr: .long _C_LABEL(intc_intr) 290.Li_ast: .long _C_LABEL(ast) 291.Li_uvmexp_intrs: .long _C_LABEL(uvmexp) + UVMEXP_INTRS 292 293/* LINTSTUB: Var: char sh_vector_interrupt_end[1]; */ 294VECTOR_END_MARKER(sh_vector_interrupt_end) 295 SET_ENTRY_SIZE(sh_vector_interrupt) 296