exception_vector.S revision 1.23
1/* $NetBSD: exception_vector.S,v 1.23 2007/03/15 01:54:18 uwe Exp $ */ 2 3/*- 4 * Copyright (c) 2002 The NetBSD Foundation, Inc. 5 * All rights reserved. 6 * 7 * Redistribution and use in source and binary forms, with or without 8 * modification, are permitted provided that the following conditions 9 * are met: 10 * 1. Redistributions of source code must retain the above copyright 11 * notice, this list of conditions and the following disclaimer. 12 * 2. Redistributions in binary form must reproduce the above copyright 13 * notice, this list of conditions and the following disclaimer in the 14 * documentation and/or other materials provided with the distribution. 15 * 3. All advertising materials mentioning features or use of this software 16 * must display the following acknowledgement: 17 * This product includes software developed by the NetBSD 18 * Foundation, Inc. and its contributors. 19 * 4. Neither the name of The NetBSD Foundation nor the names of its 20 * contributors may be used to endorse or promote products derived 21 * from this software without specific prior written permission. 22 * 23 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 24 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 25 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 26 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 27 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 28 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 29 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 30 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 31 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 32 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 33 * POSSIBILITY OF SUCH DAMAGE. 34 */ 35 36#include "opt_cputype.h" 37#include "opt_ddb.h" 38#include "assym.h" 39 40#include <sh3/param.h> 41#include <sh3/locore.h> 42#include <sh3/exception.h> 43#include <sh3/ubcreg.h> 44#include <sh3/mmu_sh3.h> 45#include <sh3/mmu_sh4.h> 46 47/* 48 * Align vectors more strictly here (where we don't really care) so 49 * that .align 5 (i.e. 32B cache line) before data block does the 50 * right thing w.r.t. final destinations after vectors are copied. 51 */ 52#define _ALIGN_TEXT .align 5 53#include <sh3/asm.h> 54 55__KERNEL_RCSID(0, "$NetBSD: exception_vector.S,v 1.23 2007/03/15 01:54:18 uwe Exp $") 56 57 58/* 59 * Exception vectors. 60 * The following routines are copied to vector addresses. 61 * sh_vector_generic: VBR + 0x100 62 * sh_vector_tlbmiss: VBR + 0x400 63 * sh_vector_interrupt: VBR + 0x600 64 */ 65 66#define VECTOR_END_MARKER(sym) \ 67 .globl _C_LABEL(sym); \ 68 _C_LABEL(sym): 69 70 71/* 72 * LINTSTUB: Var: char sh_vector_generic[1]; 73 * 74 * void sh_vector_generic(void); 75 * Copied to VBR+0x100. This code should be position independent 76 * and maximum 786 bytes long (== 0x400 - 0x100). 77 */ 78NENTRY(sh_vector_generic) 79 __EXCEPTION_ENTRY 80 __INTR_MASK(r0, r1) 81 /* Identify exception cause */ 82 MOV (EXPEVT, r0) 83 mov.l @r0, r0 84 mov.l r0, @(TF_EXPEVT, r14) /* tf->tf_expevt = EXPEVT */ 85 /* Get curlwp */ 86 mov.l .Lg_curlwp, r1 87 mov.l @r1, r4 /* 1st arg */ 88 /* Get TEA */ 89 MOV (TEA, r1) 90 mov.l @r1, r6 /* 3rd arg */ 91 /* Check TLB exception or not */ 92 mov.l .Lg_TLB_PROT_ST, r1 93 cmp/hi r1, r0 94 bt 1f 95 96 /* tlb_exception(curlwp, tf, TEA); */ 97 __EXCEPTION_UNBLOCK(r0, r1) 98 mov.l .Lg_tlb_exception, r0 99 jsr @r0 100 mov r14, r5 /* 2nd arg */ 101 bra 2f 102 nop 103 104 /* general_exception(curlwp, tf, TEA); */ 1051: mov r4, r8 106#ifdef DDB 107 mov #0, r2 108 MOV (BBRA, r1) 109 mov.w r2, @r1 /* disable UBC */ 110 mov.l r2, @(TF_UBC, r14) /* clear tf->tf_ubc */ 111#endif /* DDB */ 112 __EXCEPTION_UNBLOCK(r0, r1) 113 mov.l .Lg_general_exception, r0 114 jsr @r0 115 mov r14, r5 /* 2nd arg */ 116 117 /* Check for ASTs on exit to user mode. */ 118 mov r8, r4 119 mov.l .Lg_ast, r0 120 jsr @r0 121 mov r14, r5 122#ifdef DDB /* BBRA = tf->tf_ubc */ 123 __EXCEPTION_BLOCK(r0, r1) 124 mov.l @(TF_UBC, r14), r0 125 MOV (BBRA, r1) 126 mov.w r0, @r1 127#endif /* DDB */ 1282: __EXCEPTION_RETURN 129 130 .align 5 131.Lg_curlwp: .long _C_LABEL(curlwp) 132REG_SYMBOL(EXPEVT) 133REG_SYMBOL(BBRA) 134REG_SYMBOL(TEA) 135.Lg_tlb_exception: .long _C_LABEL(tlb_exception) 136.Lg_general_exception: .long _C_LABEL(general_exception) 137.Lg_ast: .long _C_LABEL(ast) 138.Lg_TLB_PROT_ST: .long EXPEVT_TLB_PROT_ST 139 140/* LINTSTUB: Var: char sh_vector_generic_end[1]; */ 141VECTOR_END_MARKER(sh_vector_generic_end) 142 SET_ENTRY_SIZE(sh_vector_generic) 143 144 145#ifdef SH3 146/* 147 * LINTSTUB: Var: char sh3_vector_tlbmiss[1]; 148 * 149 * void sh3_vector_tlbmiss(void); 150 * Copied to VBR+0x400. This code should be position independent 151 * and maximum 512 bytes long (== 0x600 - 0x400). 152 */ 153NENTRY(sh3_vector_tlbmiss) 154 __EXCEPTION_ENTRY 155 mov #(SH3_TEA & 0xff), r0 156 mov.l @r0, r6 /* 3rd arg: va = TEA */ 157#if !defined(P1_STACK) 158 /* Load kernel stack */ 159 mov.l .L3_VPN_MASK, r0 160 and r6, r0 161 tst r0, r0 /* check VPN == 0 */ 162 bt 6f 163 mov.l .L3_CURUPTE, r1 164 mov.l @r1, r1 165 mov #UPAGES, r3 166 mov #1, r2 1674: mov.l @r1+, r7 168 cmp/eq r7, r0 /* md_upte.addr: u-area VPN */ 169 bt 5f 170 add #4, r1 /* skip md_upte.data */ 171 cmp/eq r2, r3 172 bf/s 4b 173 add #1, r2 174 bra 7f /* pull insn at 6f into delay slot */ 175 mov #(SH3_EXPEVT & 0xff), r0 1765: mov.l @r1, r2 /* md_upte.data: u-area PTE */ 177 mov #(SH3_PTEL & 0xff), r1 178 mov.l r2, @r1 179 mov #(SH3_PTEH & 0xff), r1 180 mov.l @r1, r2 181 mov.l .L3_VPN_MASK, r0 182 and r2, r0 183 mov.l r0, @r1 /* ASID 0 */ 184 ldtlb 185 bra 3f 186 mov.l r2, @r1 /* restore ASID */ 187#endif /* !P1_STACK */ 1886: mov #(SH3_EXPEVT & 0xff), r0 1897: mov.l @r0, r0 190 mov.l r0, @(TF_EXPEVT, r14) /* tf->tf_expevt = EXPEVT */ 191 mov.l .L3_curlwp, r0 192 mov.l @r0, r4 /* 1st arg */ 193 __INTR_MASK(r0, r1) 194 __EXCEPTION_UNBLOCK(r0, r1) 195 mov.l .L3_tlb_exception, r0 196 jsr @r0 197 mov r14, r5 /* 2nd arg */ 1983: __EXCEPTION_RETURN 199 200 .align 5 201.L3_curlwp: .long _C_LABEL(curlwp) 202.L3_tlb_exception: .long _C_LABEL(tlb_exception) 203.L3_VPN_MASK: .long 0xfffff000 204.L3_CURUPTE: .long _C_LABEL(curupte) 205 206/* LINTSTUB: Var: char sh3_vector_tlbmiss_end[1]; */ 207VECTOR_END_MARKER(sh3_vector_tlbmiss_end) 208 SET_ENTRY_SIZE(sh3_vector_tlbmiss) 209#endif /* SH3 */ 210 211 212#ifdef SH4 213/* 214 * LINTSTUB: Var: char sh4_vector_tlbmiss[1]; 215 * 216 * void sh4_vector_tlbmiss(void); 217 * Copied to VBR+0x400. This code should be position independent 218 * and maximum 512 bytes long (== 0x600 - 0x400). 219 */ 220NENTRY(sh4_vector_tlbmiss) 221 __EXCEPTION_ENTRY 222 mov.l .L4_TEA4, r0 223 mov.l @r0, r6 224 mov.l .L4_EXPEVT4, r0 225 mov.l @r0, r0 226 mov.l r0, @(TF_EXPEVT, r14) /* tf->tf_expevt = EXPEVT */ 227 mov.l .L4_curlwp, r0 228 mov.l @r0, r4 /* 1st arg */ 229 __INTR_MASK(r0, r1) 230 __EXCEPTION_UNBLOCK(r0, r1) 231 mov.l .L4_tlb_exception, r0 232 jsr @r0 233 mov r14, r5 /* 2nd arg */ 234 __EXCEPTION_RETURN 235 236 .align 5 237.L4_tlb_exception: .long _C_LABEL(tlb_exception) 238.L4_curlwp: .long _C_LABEL(curlwp) 239.L4_EXPEVT4: .long SH4_EXPEVT 240.L4_TEA4: .long SH4_TEA 241 242/* LINTSTUB: Var: char sh4_vector_tlbmiss_end[1]; */ 243VECTOR_END_MARKER(sh4_vector_tlbmiss_end) 244 SET_ENTRY_SIZE(sh4_vector_tlbmiss) 245#endif /* SH4 */ 246 247 248/* 249 * LINTSTUB: Var: char sh_vector_interrupt[1]; 250 * 251 * void sh_vector_interrupt(void); 252 * Copied to VBR+0x600. This code should be position independent. 253 */ 254NENTRY(sh_vector_interrupt) 255 __EXCEPTION_ENTRY 256 xor r0, r0 257 mov.l r0, @(TF_EXPEVT, r14) /* (for debug) */ 258 stc r0_bank, r6 /* ssp */ 259 /* Enable exceptions for P3 access */ 260 __INTR_MASK(r0, r1) 261 __EXCEPTION_UNBLOCK(r0, r1) 262 /* ++uvmexp.intrs */ 263 mov.l .Li_uvmexp_intrs, r0 264 mov.l @r0, r1 265 add #1 r1 266 mov.l r1, @r0 267 /* Dispatch interrupt handler */ 268 mov.l .Li_intc_intr, r0 269 jsr @r0 /* intc_intr(ssr, spc, ssp) */ 270 nop 271 /* Check for ASTs on exit to user mode. */ 272 mov.l .Li_curlwp, r0 273 mov.l @r0, r4 /* 1st arg */ 274 mov.l .Li_ast, r0 275 jsr @r0 276 mov r14, r5 /* 2nd arg */ 277 __EXCEPTION_RETURN 278 279 .align 5 280.Li_curlwp: .long _C_LABEL(curlwp) 281.Li_intc_intr: .long _C_LABEL(intc_intr) 282.Li_ast: .long _C_LABEL(ast) 283.Li_uvmexp_intrs: .long _C_LABEL(uvmexp) + UVMEXP_INTRS 284 285/* LINTSTUB: Var: char sh_vector_interrupt_end[1]; */ 286VECTOR_END_MARKER(sh_vector_interrupt_end) 287 SET_ENTRY_SIZE(sh_vector_interrupt) 288