1 1.15 andvar /* $NetBSD: trap_subr.S,v 1.15 2022/07/05 20:15:40 andvar Exp $ */ 2 1.2 matt /*- 3 1.2 matt * Copyright (c) 2010, 2011 The NetBSD Foundation, Inc. 4 1.2 matt * All rights reserved. 5 1.2 matt * 6 1.2 matt * This code is derived from software contributed to The NetBSD Foundation 7 1.2 matt * by Raytheon BBN Technologies Corp and Defense Advanced Research Projects 8 1.2 matt * Agency and which was developed by Matt Thomas of 3am Software Foundry. 9 1.2 matt * 10 1.2 matt * This material is based upon work supported by the Defense Advanced Research 11 1.2 matt * Projects Agency and Space and Naval Warfare Systems Center, Pacific, under 12 1.2 matt * Contract No. N66001-09-C-2073. 13 1.2 matt * Approved for Public Release, Distribution Unlimited 14 1.2 matt * 15 1.2 matt * Redistribution and use in source and binary forms, with or without 16 1.2 matt * modification, are permitted provided that the following conditions 17 1.2 matt * are met: 18 1.2 matt * 1. Redistributions of source code must retain the above copyright 19 1.2 matt * notice, this list of conditions and the following disclaimer. 20 1.2 matt * 2. Redistributions in binary form must reproduce the above copyright 21 1.2 matt * notice, this list of conditions and the following disclaimer in the 22 1.2 matt * documentation and/or other materials provided with the distribution. 23 1.2 matt * 24 1.2 matt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 25 1.2 matt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 26 1.2 matt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 27 1.2 matt * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 28 1.2 matt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 29 1.2 matt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 30 1.2 matt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 31 1.2 matt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 32 1.2 matt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 33 1.2 matt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 34 1.2 matt * POSSIBILITY OF SUCH DAMAGE. 35 1.2 matt */ 36 1.2 matt 37 1.15 andvar RCSID("$NetBSD: trap_subr.S,v 1.15 2022/07/05 20:15:40 andvar Exp $") 38 1.13 rin 39 1.13 rin #ifdef _KERNEL_OPT 40 1.13 rin #include "opt_altivec.h" 41 1.13 rin #include "opt_ddb.h" 42 1.13 rin #include "opt_mpc85xx.h" 43 1.13 rin #include "opt_multiprocessor.h" 44 1.13 rin #endif 45 1.2 matt 46 1.2 matt .globl _C_LABEL(sctrapexit), _C_LABEL(trapexit), _C_LABEL(intrcall) 47 1.2 matt 48 1.2 matt /* 49 1.2 matt * We have a problem with critical (MSR[CE] or machine check (MSR[ME]) 50 1.2 matt * or debug (MSR[DE]) interrupts/exception in that they could happen 51 1.15 andvar * in-between the mtsprg1 %r2 and mfsprg1 %r2. If that happens, %r2 52 1.2 matt * will be lost. Even if we moved to a different sprg, subsequent 53 1.15 andvar * exceptions would use SPRG1 and its value would be lost. The only 54 1.2 matt * way to be safe for CE/ME/DE faults to save and restore SPRG1. 55 1.2 matt * 56 1.2 matt * Since CE/ME/DE faults may happen anytime, we need r1 to always 57 1.2 matt * contain a valid kernel stack pointer. Therefore we use r2 as 58 1.2 matt * our temporary register. 59 1.2 matt * 60 1.2 matt * To prevent %r2 being overwritten, each "level" (normal, critical, 61 1.2 matt * mchk) uses a unique sprg to save %r2 (sprg1, sprg4, sprg5). 62 1.2 matt * 63 1.2 matt * Since we can't control how many nested exceptions we might get, 64 1.15 andvar * we don't use a dedicated save area. Instead we have an upwards 65 1.2 matt * growing "stack" of them; the pointer to which is kept in sprg3. 66 1.2 matt * 67 1.2 matt * To allocate from the stack, one fetches sprg3, adds the amount 68 1.2 matt * needed, saves sprg3, and then refers to the save using a 69 1.2 matt * displacement of -amount. 70 1.2 matt */ 71 1.14 rin /* 72 1.14 rin * XXXclang 73 1.14 rin * At the moment, clang cannot correctly assemble m[ft]sprgN 74 1.14 rin * (N >= 4); use m[ft]spr for SPR_SPRGN instead. 75 1.14 rin */ 76 1.2 matt #define FRAME_EXC_PROLOGUE(start, sprg, srr) \ 77 1.14 rin mtspr sprg,%r2; /* save r2 */ \ 78 1.2 matt mfsprg3 %r2; /* get save_area pointer */ \ 79 1.2 matt addi %r2,%r2,4*(32-start); \ 80 1.2 matt /* allocate save area */ \ 81 1.2 matt mtsprg3 %r2; /* save updated pointer */ \ 82 1.2 matt stmw %r##start,-4*(32-start)(%r2); \ 83 1.2 matt /* free r24-r31 for use */ \ 84 1.14 rin mfspr %r26,sprg; /* get saved r2 */ \ 85 1.2 matt mfcr %r27; /* get Condition Register */ \ 86 1.2 matt mfxer %r28; /* get XER */ \ 87 1.2 matt mfspr %r30, SPR_##srr##0; /* get SRR0 */ \ 88 1.2 matt mfspr %r31, SPR_##srr##1 /* get SRR1 */ 89 1.2 matt 90 1.2 matt #define PROLOGUE_GET_DEAR mfspr %r24, SPR_DEAR 91 1.2 matt #define PROLOGUE_GET_ESR mfspr %r25, SPR_ESR 92 1.2 matt #define PROLOGUE_GET_SRRS mfsrr0 %r24; \ 93 1.2 matt mfsrr1 %r25 94 1.2 matt #define PROLOGUE_GET_SPRG1 mfsprg1 %r29 95 1.2 matt #define PROLOGUE_GET_DBSR mfspr %r25, SPR_DBSR 96 1.2 matt #define SAVE_ESR stw %r25, FRAME_ESR(%r1) 97 1.2 matt #define SAVE_DEAR stw %r24, FRAME_DEAR(%r1) 98 1.2 matt #define SAVE_DEAR_ESR SAVE_ESR; SAVE_DEAR 99 1.2 matt #define SAVE_SRRS SAVE_DEAR_ESR 100 1.2 matt #define SAVE_SPRG1 stw %r29, FRAME_SPRG1(%r1) 101 1.2 matt #define SAVE_DBSR stw %r25, FRAME_DBSR(%r1) 102 1.2 matt #define SAVE_NOTHING /* nothing */ 103 1.2 matt #define RESTORE_SPRG1(r) lwz r, FRAME_SPRG1(%r1); \ 104 1.2 matt mtsprg1 r 105 1.2 matt #define RESTORE_SRR0(r) lwz r, FRAME_DEAR(%r1); \ 106 1.2 matt mtsrr0 r 107 1.2 matt #define RESTORE_SRR1(r) lwz r, FRAME_ESR(%r1); \ 108 1.2 matt mtsrr1 r 109 1.2 matt 110 1.2 matt #define FRAME_PROLOGUE \ 111 1.14 rin FRAME_EXC_PROLOGUE(26, SPR_SPRG1, SRR) 112 1.2 matt 113 1.2 matt #define FRAME_PROLOGUE_DEAR_ESR \ 114 1.14 rin FRAME_EXC_PROLOGUE(24, SPR_SPRG1, SRR); \ 115 1.2 matt PROLOGUE_GET_ESR; \ 116 1.2 matt PROLOGUE_GET_DEAR 117 1.2 matt 118 1.2 matt #define FRAME_PROLOGUE_ESR \ 119 1.14 rin FRAME_EXC_PROLOGUE(25, SPR_SPRG1, SRR); \ 120 1.2 matt PROLOGUE_GET_ESR 121 1.2 matt 122 1.2 matt #define FRAME_TLBPROLOGUE \ 123 1.14 rin FRAME_EXC_PROLOGUE(20, SPR_SPRG1, SRR); \ 124 1.2 matt PROLOGUE_GET_ESR; \ 125 1.2 matt PROLOGUE_GET_DEAR 126 1.2 matt 127 1.2 matt #define FRAME_INTR_PROLOGUE \ 128 1.14 rin FRAME_EXC_PROLOGUE(26, SPR_SPRG1, SRR) 129 1.2 matt 130 1.2 matt /* 131 1.2 matt * These need to save SRR0/SRR1 as well their SRR0/SRR1 in case normal 132 1.2 matt * exceptions happened during their execution. 133 1.2 matt */ 134 1.2 matt #define FRAME_CRIT_PROLOGUE \ 135 1.14 rin FRAME_EXC_PROLOGUE(24, SPR_SPRG4, CSRR); \ 136 1.2 matt PROLOGUE_GET_SPRG1; \ 137 1.2 matt PROLOGUE_GET_SRRS 138 1.2 matt 139 1.2 matt #define FRAME_MCHK_PROLOGUE \ 140 1.14 rin FRAME_EXC_PROLOGUE(24, SPR_SPRG5, MCSRR); \ 141 1.2 matt PROLOGUE_GET_SPRG1; \ 142 1.2 matt PROLOGUE_GET_SRRS 143 1.2 matt 144 1.2 matt #define FRAME_DEBUG_PROLOGUE \ 145 1.14 rin FRAME_EXC_PROLOGUE(24, SPR_SPRG4, CSRR); \ 146 1.2 matt PROLOGUE_GET_SPRG1; \ 147 1.2 matt PROLOGUE_GET_SRRS 148 1.2 matt 149 1.2 matt /* 150 1.2 matt * DDB expects to fetch the LR from the previous frame. But it also 151 1.2 matt * expects to be pointing at the instruction after the branch link. Since 152 1.8 matt * we didn't branch, we need to advance it by to fake out DDB. But there's 153 1.8 matt * problem. If the routine is in either its first or last two instructions 154 1.8 matt * (before or after its adjusted its stack pointer), we could possibly 155 1.8 matt * overwrite stored return address. So that stored return address needs to 156 1.8 matt * saved and restored. 157 1.2 matt */ 158 1.8 matt #if defined(DDB) 159 1.2 matt #define FRAME_SAVE_SRR0_FOR_DDB \ 160 1.8 matt lwz %r29, FRAMELEN+CFRAME_LR(%r1); /* fetch old return address */\ 161 1.8 matt stw %r29, FRAME_CFRAME_LR(%r1); /* save it */ \ 162 1.8 matt addi %r30, %r30, 4; /* point to s the next insn */ \ 163 1.8 matt stw %r30, FRAMELEN+CFRAME_LR(%r1) /* appease ddb stacktrace */ 164 1.8 matt #define FRAME_RESTORE_RETURN_ADDRESS \ 165 1.8 matt lwz %r3, FRAME_CFRAME_LR(%r1); /* fetch old return address */ \ 166 1.8 matt stw %r3, FRAMELEN+CFRAME_LR(%r1) /* restore it */ 167 1.2 matt #else 168 1.2 matt #define FRAME_SAVE_SRR0_FOR_DDB 169 1.8 matt #define FRAME_RESTORE_RETURN_ADDRESS 170 1.2 matt #endif 171 1.2 matt 172 1.2 matt #ifdef PPC_HAVE_SPE 173 1.2 matt #define FRAME_SAVE_SPEFSCR \ 174 1.2 matt mfspefscr %r0; /* get spefscr */ \ 175 1.2 matt stw %r0, FRAME_SPEFSCR(%r1) /* save into trapframe */ 176 1.2 matt #define FRAME_RESTORE_SPEFSCR \ 177 1.2 matt lwz %r0, FRAME_SPEFSCR(%r1); /* fetch from trapframe */ \ 178 1.2 matt mtspefscr %r0 /* save spefscr */ 179 1.2 matt #else 180 1.2 matt #define FRAME_SAVE_SPEFSCR 181 1.2 matt #define FRAME_RESTORE_SPEFSCR 182 1.2 matt #endif 183 1.2 matt /* 184 1.15 andvar * Before the first memory reference, we must have our state inside registers 185 1.2 matt * since the first memory access might cause an exception which would cause 186 1.2 matt * SRR0/SRR1 and DEAR/ESR to become unrecoverable. CR and XER also need to be 187 1.15 andvar * saved early since they will modified by instruction flow. The saved stack 188 1.2 matt * pointer is also critical but LR and CTR can be deferred being saved until 189 1.2 matt * we are actually filling a trapframe. 190 1.2 matt */ 191 1.2 matt #define FRAME_EXC_ENTER(exc, tf, start, save_prologue) \ 192 1.2 matt mtcr %r31; /* user mode exception? */ \ 193 1.2 matt mr %r31, %r1; /* save SP (SRR1 is safe in CR) */ \ 194 1.2 matt bf MSR_PR, 1f; /* nope, sp is good */ \ 195 1.2 matt mfsprg2 %r2; /* get curlwp */ \ 196 1.2 matt lwz %r2, L_PCB(%r2); /* get uarea of curlwp */ \ 197 1.2 matt addi %r1, %r2, USPACE-CALLFRAMELEN; \ 198 1.2 matt /* start stack at top of it */ \ 199 1.2 matt 1: \ 200 1.2 matt stwu %r31, -FRAMELEN(%r1); /* get space for trapframe */ \ 201 1.5 matt stw %r0, FRAME_R0(%r1); /* save r0 */ \ 202 1.5 matt stw %r31, FRAME_R1(%r1); /* save (saved) r1 */ \ 203 1.5 matt stw %r26, FRAME_R2(%r1); /* save (saved) r2 */ \ 204 1.2 matt save_prologue; /* save SPRG1/ESR/DEAR */ \ 205 1.2 matt /* At this point, r26, r29, and r31 have been saved so we */ \ 206 1.2 matt /* can use them for LR, CTR, and SRR1. */ \ 207 1.2 matt mflr %r26; /* get Link Register */ \ 208 1.2 matt mfctr %r29; /* get CTR */ \ 209 1.2 matt mfcr %r31; /* get SRR1 */ \ 210 1.2 matt stmw %r26, FRAME_LR(%r1); /* save LR CR XER CTR SRR0/1 */ \ 211 1.2 matt FRAME_SAVE_SRR0_FOR_DDB; \ 212 1.2 matt mr %r0, %r31; /* save SRR1 for a bit */ \ 213 1.2 matt mfsprg3 %r2; /* get save_area pointer */ \ 214 1.2 matt addi %r2,%r2,-4*(32-start); /* find our save area */ \ 215 1.2 matt lmw %r##start,0(%r2); /* get start-r31 */ \ 216 1.2 matt mtsprg3 %r2; /* save updated pointer */ \ 217 1.5 matt stmw %r3, FRAME_R3(%r1); /* save r2-r31 */ \ 218 1.2 matt /* Now everything has been saved */ \ 219 1.2 matt mr %r31, %r0; /* move SRR1 back to r31 */ \ 220 1.3 matt mfsprg2 %r13; /* put curlwp in r13 */ \ 221 1.2 matt FRAME_SAVE_SPEFSCR; \ 222 1.2 matt li %r7, exc; /* load EXC_* */ \ 223 1.2 matt stw %r7, FRAME_EXC(%r1); /* save into trapframe */ \ 224 1.2 matt addi tf, %r1, FRAME_TF /* get address of trap frame */ 225 1.2 matt 226 1.2 matt #define FRAME_EXC_EXIT(rfi, srr) \ 227 1.8 matt FRAME_RESTORE_RETURN_ADDRESS; /* restore return address */ \ 228 1.2 matt lmw %r26, FRAME_LR(%r1); /* get LR CR XER CTR SRR0/1 */ \ 229 1.2 matt oris %r31,%r31,PSL_CE@h; \ 230 1.2 matt mtspr SPR_##srr##1, %r31; /* restore SRR1 */ \ 231 1.2 matt mtspr SPR_##srr##0, %r30; /* restore SRR0 */ \ 232 1.2 matt FRAME_RESTORE_SPEFSCR; \ 233 1.2 matt mtctr %r29; /* restore CTR */ \ 234 1.2 matt mtxer %r28; /* restore XER */ \ 235 1.2 matt mtcr %r27; /* restore CR */ \ 236 1.2 matt mtlr %r26; /* restore LR */ \ 237 1.5 matt lmw %r2, FRAME_R2(%r1); /* restore r2-r31 */ \ 238 1.5 matt lwz %r0, FRAME_R0(%r1); /* restore r0 */ \ 239 1.5 matt lwz %r1, FRAME_R1(%r1); /* restore r1 */ \ 240 1.2 matt rfi /* return from interrupt */ 241 1.2 matt 242 1.2 matt 243 1.2 matt #define FRAME_ENTER(exc, tf) \ 244 1.2 matt FRAME_EXC_ENTER(exc, tf, 26, SAVE_NOTHING) 245 1.2 matt 246 1.2 matt #define FRAME_ENTER_ESR(exc, tf) \ 247 1.2 matt FRAME_EXC_ENTER(exc, tf, 25, SAVE_ESR) 248 1.2 matt 249 1.2 matt #define FRAME_ENTER_DEAR_ESR(exc, tf) \ 250 1.2 matt FRAME_EXC_ENTER(exc, tf, 24, SAVE_DEAR_ESR) 251 1.2 matt 252 1.2 matt #define FRAME_EXIT FRAME_EXC_EXIT(rfi, SRR) 253 1.2 matt 254 1.2 matt #define FRAME_TLBENTER(exc) \ 255 1.2 matt FRAME_EXC_ENTER(exc, %r4, 20, SAVE_DEAR_ESR) 256 1.2 matt #define FRAME_TLBEXIT FRAME_EXC_EXIT(rfi, SRR) 257 1.2 matt 258 1.2 matt #define FRAME_MCHK_ENTER(exc) \ 259 1.2 matt FRAME_EXC_ENTER(exc, %r3, 26, SAVE_SPRG1; SAVE_SRRS) 260 1.2 matt #define FRAME_MCHK_EXIT \ 261 1.2 matt RESTORE_SRR0(%r28); \ 262 1.2 matt RESTORE_SRR1(%r27); \ 263 1.2 matt RESTORE_SPRG1(%r26); \ 264 1.2 matt FRAME_EXC_EXIT(rfmci, MCSRR) 265 1.2 matt 266 1.2 matt #define FRAME_DEBUG_ENTER(exc) \ 267 1.2 matt FRAME_EXC_ENTER(exc, %r4, 26, SAVE_SPRG1; SAVE_SRRS) 268 1.2 matt #define FRAME_DEBUG_EXIT \ 269 1.2 matt RESTORE_SPRG1(%r26); FRAME_EXC_EXIT(rfci, CSRR) 270 1.2 matt 271 1.2 matt #define FRAME_INTR_SP \ 272 1.2 matt bf MSR_PR, 1f; /* nope, sp is good */ \ 273 1.2 matt mfsprg2 %r2; /* get curlwp */ \ 274 1.2 matt lwz %r2, L_PCB(%r2); /* get uarea of curlwp */ \ 275 1.2 matt addi %r1, %r2, USPACE-CALLFRAMELEN; \ 276 1.2 matt /* start stack at top of it */ \ 277 1.2 matt 1: 278 1.2 matt 279 1.2 matt #define FRAME_INTR_SP_NEW(sym) \ 280 1.2 matt lis %r2,(sym)@ha; \ 281 1.2 matt addi %r1,%r2,(sym)@l 282 1.2 matt 283 1.2 matt #define FRAME_INTR_XENTER(exc, start, get_intr_sp, save_prologue) \ 284 1.2 matt mtcr %r31; /* user mode exception? */ \ 285 1.2 matt mr %r31, %r1; /* save SP (SRR1 is safe in CR) */ \ 286 1.2 matt get_intr_sp; /* get kernel stack pointer */ \ 287 1.2 matt stwu %r31, -FRAMELEN(%r1); /* get space for trapframe */ \ 288 1.5 matt stw %r0, FRAME_R0(%r1); /* save r0 */ \ 289 1.5 matt stw %r31, FRAME_R1(%r1); /* save (saved) r1 */ \ 290 1.5 matt stw %r26, FRAME_R2(%r1); /* save (saved) r2 */ \ 291 1.2 matt save_prologue; /* save SPRG1 (maybe) */ \ 292 1.2 matt mflr %r26; /* get LR */ \ 293 1.2 matt mfctr %r29; /* get CTR */ \ 294 1.2 matt mfcr %r31; /* get SRR1 */ \ 295 1.2 matt stmw %r26, FRAME_LR(%r1); /* save LR CR XER CTR SRR0/1 */ \ 296 1.2 matt FRAME_SAVE_SRR0_FOR_DDB; \ 297 1.5 matt stw %r3, FRAME_R3(%r1); /* save r3 */ \ 298 1.5 matt stw %r4, FRAME_R4(%r1); /* save r4 */ \ 299 1.5 matt stw %r5, FRAME_R5(%r1); /* save r5 */ \ 300 1.5 matt stw %r6, FRAME_R6(%r1); /* save r6 */ \ 301 1.5 matt stw %r7, FRAME_R7(%r1); /* save r7 */ \ 302 1.5 matt stw %r8, FRAME_R8(%r1); /* save r8 */ \ 303 1.5 matt stw %r9, FRAME_R9(%r1); /* save r9 */ \ 304 1.5 matt stw %r10, FRAME_R10(%r1); /* save r10 */ \ 305 1.5 matt stw %r11, FRAME_R11(%r1); /* save r11 */ \ 306 1.5 matt stw %r12, FRAME_R12(%r1); /* save r12 */ \ 307 1.5 matt stw %r13, FRAME_R13(%r1); /* save r13 */ \ 308 1.2 matt mfsprg3 %r2; /* get save_area pointer */ \ 309 1.2 matt addi %r2,%r2,-4*(32-start); /* find our save area */ \ 310 1.2 matt lmw %r##start,0(%r2); /* get start-r31 */ \ 311 1.2 matt mtsprg3 %r2; /* save updated pointer */ \ 312 1.3 matt mfsprg2 %r13; /* put curlwp into r13 */ \ 313 1.2 matt li %r7, exc; /* load EXC_* */ \ 314 1.2 matt stw %r7, FRAME_EXC(%r1); /* save into trapframe */ \ 315 1.2 matt addi %r3, %r1, FRAME_TF /* only argument is trapframe */ 316 1.2 matt 317 1.2 matt #define FRAME_INTR_XEXIT(rfi, srr) \ 318 1.8 matt FRAME_RESTORE_RETURN_ADDRESS; /* restore return address */ \ 319 1.2 matt lwz %r8, FRAME_LR(%r1); /* get LR */ \ 320 1.2 matt lwz %r9, FRAME_CR(%r1); /* get CR */ \ 321 1.2 matt lwz %r10, FRAME_XER(%r1); /* get XER */ \ 322 1.2 matt lwz %r11, FRAME_CTR(%r1); /* get CTR */ \ 323 1.2 matt lwz %r12, FRAME_SRR0(%r1); /* get SRR0 */ \ 324 1.2 matt lwz %r13, FRAME_SRR1(%r1); /* get SRR1 */ \ 325 1.2 matt mtspr SPR_##srr##1, %r13; /* restore SRR1 */ \ 326 1.2 matt mtspr SPR_##srr##0, %r12; /* restore SRR0 */ \ 327 1.2 matt mtctr %r11; /* restore CTR */ \ 328 1.2 matt mtxer %r10; /* restore XER */ \ 329 1.2 matt mtcr %r9; /* restore CR */ \ 330 1.2 matt mtlr %r8; /* restore LR */ \ 331 1.5 matt lwz %r13, FRAME_R13(%r1); /* restore r13 */ \ 332 1.5 matt lwz %r12, FRAME_R12(%r1); /* restore r12 */ \ 333 1.5 matt lwz %r11, FRAME_R11(%r1); /* restore r11 */ \ 334 1.5 matt lwz %r10, FRAME_R10(%r1); /* restore r10 */ \ 335 1.5 matt lwz %r9, FRAME_R9(%r1); /* restore r9 */ \ 336 1.5 matt lwz %r8, FRAME_R8(%r1); /* restore r8 */ \ 337 1.5 matt lwz %r7, FRAME_R7(%r1); /* restore r7 */ \ 338 1.5 matt lwz %r6, FRAME_R6(%r1); /* restore r6 */ \ 339 1.5 matt lwz %r5, FRAME_R5(%r1); /* restore r5 */ \ 340 1.5 matt lwz %r4, FRAME_R4(%r1); /* restore r4 */ \ 341 1.5 matt lwz %r3, FRAME_R3(%r1); /* restore r3 */ \ 342 1.5 matt lwz %r2, FRAME_R2(%r1); /* restore r2 */ \ 343 1.5 matt lwz %r0, FRAME_R0(%r1); /* restore r0 */ \ 344 1.5 matt lwz %r1, FRAME_R1(%r1); /* restore r1 */ \ 345 1.2 matt rfi /* return from interrupt */ 346 1.2 matt 347 1.2 matt #define FRAME_INTR_ENTER(exc) \ 348 1.2 matt FRAME_INTR_XENTER(exc, 26, FRAME_INTR_SP, SAVE_NOTHING) 349 1.2 matt #define FRAME_INTR_EXIT \ 350 1.2 matt FRAME_INTR_XEXIT(rfi, SRR) 351 1.2 matt #define FRAME_CRIT_ENTER(exc) \ 352 1.2 matt FRAME_INTR_XENTER(exc, 24, FRAME_INTR_SP, SAVE_SPRG1) 353 1.2 matt #define FRAME_WDOG_ENTER(exc, sym) \ 354 1.2 matt FRAME_INTR_XENTER(exc, 24, FRAME_INTR_SP_NEW(sym), SAVE_SPRG1) 355 1.2 matt #define FRAME_CRIT_EXIT \ 356 1.2 matt RESTORE_SRR0(%r4); \ 357 1.2 matt RESTORE_SRR1(%r5); \ 358 1.2 matt RESTORE_SPRG1(%r6); \ 359 1.2 matt FRAME_INTR_XEXIT(rfci, CSRR) 360 1.2 matt 361 1.12 nonaka #if defined(MULTIPROCESSOR) 362 1.12 nonaka #define FRAME_TLBMISSLOCK \ 363 1.12 nonaka GET_CPUINFO(%r23); \ 364 1.12 nonaka ldint %r22, CI_MTX_COUNT(%r23); \ 365 1.12 nonaka subi %r22, %r22, 1; \ 366 1.12 nonaka stint %r22, CI_MTX_COUNT(%r23); \ 367 1.12 nonaka isync; \ 368 1.12 nonaka cmpwi %r22, 0; \ 369 1.12 nonaka bne 1f; \ 370 1.12 nonaka ldint %r22, CI_CPL(%r23); \ 371 1.12 nonaka stint %r22, CI_MTX_OLDSPL(%r23); \ 372 1.12 nonaka 1: lis %r23, _C_LABEL(pmap_tlb_miss_lock)@h; \ 373 1.12 nonaka ori %r23, %r23, _C_LABEL(pmap_tlb_miss_lock)@l; \ 374 1.12 nonaka li %r20, MTX_LOCK; \ 375 1.12 nonaka 2: lwarx %r22, %r20, %r23; \ 376 1.12 nonaka cmpwi %r22, __SIMPLELOCK_UNLOCKED; \ 377 1.12 nonaka beq+ 4f; \ 378 1.12 nonaka 3: lwzx %r22, %r20, %r23; \ 379 1.12 nonaka cmpwi %r22, __SIMPLELOCK_UNLOCKED; \ 380 1.12 nonaka beq+ 2b; \ 381 1.12 nonaka b 3b; \ 382 1.12 nonaka 4: li %r21, __SIMPLELOCK_LOCKED; \ 383 1.12 nonaka stwcx. %r21, %r20, %r23; \ 384 1.12 nonaka bne- 2b; \ 385 1.12 nonaka isync; \ 386 1.12 nonaka msync; 387 1.12 nonaka #define FRAME_TLBMISSUNLOCK \ 388 1.12 nonaka sync; \ 389 1.12 nonaka lis %r23, _C_LABEL(pmap_tlb_miss_lock)@h; \ 390 1.12 nonaka ori %r23, %r23, _C_LABEL(pmap_tlb_miss_lock)@l; \ 391 1.12 nonaka li %r22, __SIMPLELOCK_UNLOCKED; \ 392 1.12 nonaka stw %r22, MTX_LOCK(%r23); \ 393 1.12 nonaka isync; \ 394 1.12 nonaka msync; \ 395 1.12 nonaka GET_CPUINFO(%r23); \ 396 1.12 nonaka ldint %r22, CI_MTX_COUNT(%r23); \ 397 1.12 nonaka addi %r22, %r22, 1; \ 398 1.12 nonaka stint %r22, CI_MTX_COUNT(%r23); \ 399 1.12 nonaka isync; 400 1.12 nonaka #else /* !MULTIPROCESSOR */ 401 1.12 nonaka #define FRAME_TLBMISSLOCK 402 1.12 nonaka #define FRAME_TLBMISSUNLOCK 403 1.12 nonaka #endif /* MULTIPROCESSOR */ 404 1.12 nonaka 405 1.2 matt .text 406 1.2 matt .p2align 4 407 1.2 matt _C_LABEL(critical_input_vector): 408 1.2 matt /* MSR[ME] is unchanged, all others cleared */ 409 1.2 matt FRAME_CRIT_PROLOGUE /* save SP r26-31 CR LR XER */ 410 1.2 matt FRAME_CRIT_ENTER(EXC_CII) 411 1.4 matt bl _C_LABEL(intr_critintr) /* critintr(tf) */ 412 1.2 matt FRAME_CRIT_EXIT 413 1.2 matt 414 1.2 matt .p2align 4 415 1.2 matt _C_LABEL(machine_check_vector): 416 1.2 matt /* all MSR bits are cleared */ 417 1.2 matt FRAME_MCHK_PROLOGUE /* save SP r25-31 CR LR XER */ 418 1.2 matt FRAME_MCHK_ENTER(EXC_MCHK) 419 1.2 matt /* 420 1.2 matt * MCAR/MCSR don't need to be saved early since MSR[ME] is cleared 421 1.2 matt * on entry. 422 1.2 matt */ 423 1.2 matt mfspr %r7, SPR_MCAR 424 1.2 matt mfspr %r6, SPR_MCSR 425 1.2 matt stw %r6, FRAME_MCSR(%r1) 426 1.2 matt stw %r7, FRAME_MCAR(%r1) 427 1.2 matt li %r3, T_MACHINE_CHECK 428 1.2 matt bl _C_LABEL(trap) /* trap(T_MACHINE_CHECK, tf) */ 429 1.2 matt FRAME_MCHK_EXIT 430 1.2 matt 431 1.2 matt .p2align 4 432 1.2 matt _C_LABEL(data_storage_vector): 433 1.2 matt /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ 434 1.2 matt FRAME_PROLOGUE_DEAR_ESR /* save r2 DEAR ESR r24-31 CR XER SRR */ 435 1.2 matt FRAME_ENTER_DEAR_ESR(EXC_DSI, %r4) 436 1.2 matt li %r3, T_DSI 437 1.2 matt /* FRAME_ENTER leaves SRR1 in %r31 */ 438 1.2 matt trapenter: 439 1.2 matt trapagain: 440 1.2 matt wrtee %r31 /* restore MSR[EE] */ 441 1.2 matt 442 1.2 matt bl _C_LABEL(trap) /* trap(trapcode, tf) */ 443 1.2 matt _C_LABEL(trapexit): 444 1.2 matt wrteei 0 /* disable interrupts */ 445 1.2 matt # andis. %r0, %r31, PSL_CE@h 446 1.2 matt # tweqi %r0, 0 447 1.2 matt andi. %r4, %r31, PSL_PR /* lets look at PSL_PR */ 448 1.2 matt beq trapdone /* if clear, skip to exit */ 449 1.3 matt lwz %r4, L_MD_ASTPENDING(%r13) /* get ast pending */ 450 1.2 matt cmplwi %r4, 0 /* is there an ast pending */ 451 1.2 matt beq+ trapdone /* nope, proceed to exit */ 452 1.2 matt li %r6, EXC_AST /* yes. */ 453 1.2 matt stw %r6, FRAME_EXC(%r1) /* pretend this is an AST */ 454 1.2 matt addi %r4, %r1, FRAME_TF /* get address of trap frame */ 455 1.2 matt li %r3, T_AST 456 1.2 matt b trapagain /* and deal with it */ 457 1.2 matt trapdone: 458 1.2 matt FRAME_EXIT 459 1.2 matt 460 1.2 matt .p2align 4 461 1.2 matt _C_LABEL(instruction_storage_vector): 462 1.2 matt /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ 463 1.2 matt FRAME_PROLOGUE_ESR /* save ESR r2 r25-31 CR XER SRR0/1 */ 464 1.2 matt FRAME_ENTER_ESR(EXC_ISI, %r4) 465 1.2 matt li %r3, T_ISI 466 1.2 matt b trapenter 467 1.2 matt 468 1.2 matt .p2align 4 469 1.2 matt _ENTRY(external_input_vector) 470 1.2 matt /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ 471 1.2 matt FRAME_INTR_PROLOGUE /* save SP r25-31 CR LR XER */ 472 1.2 matt FRAME_INTR_ENTER(EXC_EXI) 473 1.2 matt 474 1.4 matt bl _C_LABEL(intr_extintr) 475 1.2 matt _C_LABEL(intrcall): 476 1.2 matt GET_CPUINFO(%r6) /* get curcpu() */ 477 1.2 matt lwz %r5, FRAME_SRR1(%r1) /* get saved SRR1 */ 478 1.2 matt # andis. %r0, %r5, PSL_CE@h 479 1.2 matt # tweqi %r0, 0 480 1.2 matt andi. %r4, %r5, PSL_PR /* lets look at PSL_PR */ 481 1.2 matt beq intrexit /* if clear, skip to exit */ 482 1.3 matt lwz %r4, L_MD_ASTPENDING(%r13) /* get ast pending */ 483 1.2 matt cmplwi %r4, 0 /* is there an ast pending */ 484 1.2 matt beq+ intrexit /* nope, proceed to exit */ 485 1.5 matt stmw %r14, FRAME_R14(%r1) /* save rest of registers */ 486 1.2 matt FRAME_SAVE_SPEFSCR 487 1.2 matt mr %r31, %r5 /* needed for trapagain */ 488 1.2 matt li %r4, EXC_AST /* */ 489 1.2 matt stw %r4, FRAME_EXC(%r1) /* pretend this is an AST */ 490 1.2 matt addi %r4, %r1, FRAME_TF /* get address of trap frame */ 491 1.2 matt li %r3, T_AST 492 1.2 matt b trapagain /* and deal with it */ 493 1.2 matt intrexit: 494 1.2 matt FRAME_INTR_EXIT 495 1.2 matt 496 1.2 matt .p2align 4 497 1.2 matt _C_LABEL(alignment_vector): 498 1.2 matt /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ 499 1.2 matt FRAME_PROLOGUE_DEAR_ESR /* save SP r25-31 CR LR XER */ 500 1.2 matt FRAME_ENTER_DEAR_ESR(EXC_ALI, %r4) 501 1.2 matt li %r3, T_ALIGNMENT 502 1.2 matt b trapenter 503 1.2 matt 504 1.2 matt .p2align 4 505 1.2 matt _C_LABEL(program_vector): 506 1.2 matt /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ 507 1.2 matt FRAME_PROLOGUE_ESR /* save SP r25-31 CR LR XER */ 508 1.2 matt FRAME_ENTER_ESR(EXC_PGM, %r4) 509 1.2 matt li %r3, T_PROGRAM 510 1.2 matt b trapenter 511 1.2 matt 512 1.2 matt #ifdef SPR_IVOR7 513 1.2 matt .p2align 4 514 1.2 matt _C_LABEL(fp_unavailable_vector): 515 1.2 matt /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ 516 1.2 matt FRAME_PROLOGUE_ESR /* save SP r25-31 CR LR XER */ 517 1.2 matt FRAME_ENTER_ESR(EXC_FPU, %r4) 518 1.2 matt li %r3, T_FP_UNAVAILABLE 519 1.2 matt b trapenter 520 1.2 matt #endif 521 1.2 matt 522 1.2 matt .p2align 4 523 1.2 matt _C_LABEL(system_call_vector): 524 1.2 matt /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ 525 1.2 matt FRAME_PROLOGUE /* save SP r26-31 CR LR XER */ 526 1.2 matt FRAME_ENTER(EXC_SC, %r3) 527 1.2 matt 528 1.2 matt wrteei 1 /* enable interrupts */ 529 1.3 matt lwz %r7, L_PROC(%r13) /* get proc for lwp */ 530 1.2 matt lwz %r8, P_MD_SYSCALL(%r7) /* get syscall */ 531 1.3 matt mtlr %r8 /* need to call indirect */ 532 1.3 matt blrl /* syscall(tf) */ 533 1.2 matt _C_LABEL(sctrapexit): 534 1.2 matt wrteei 0 /* disable interrupts */ 535 1.3 matt lwz %r4, L_MD_ASTPENDING(%r13) /* get ast pending */ 536 1.2 matt cmplwi %r4, 0 /* is there an ast pending */ 537 1.2 matt beq+ trapdone /* nope, proceed to exit */ 538 1.2 matt li %r0, EXC_AST /* yes. */ 539 1.2 matt stw %r0, FRAME_EXC(%r1) /* pretend this is an AST */ 540 1.2 matt addi %r4, %r1, FRAME_TF /* get address of trap frame */ 541 1.2 matt li %r3, T_AST 542 1.2 matt b trapenter /* and deal with it */ 543 1.2 matt 544 1.2 matt #ifdef SPR_IVOR9 545 1.2 matt .p2align 4 546 1.2 matt _C_LABEL(ap_unavailable_vector): 547 1.2 matt /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ 548 1.2 matt FRAME_PROLOGUE /* save SP r25-31 CR LR XER */ 549 1.2 matt FRAME_ENTER(EXC_PGM, %r4) 550 1.2 matt li %r3, T_AP_UNAVAILABLE 551 1.2 matt b trapenter 552 1.2 matt #endif 553 1.2 matt 554 1.2 matt .p2align 4 555 1.2 matt _C_LABEL(decrementer_vector): 556 1.2 matt /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ 557 1.2 matt FRAME_INTR_PROLOGUE /* save SP r25-31 CR LR XER */ 558 1.2 matt FRAME_INTR_ENTER(EXC_DECR) 559 1.2 matt 560 1.4 matt bl _C_LABEL(intr_decrintr) 561 1.2 matt b intrexit 562 1.2 matt 563 1.2 matt .p2align 4 564 1.2 matt _C_LABEL(fixed_interval_timer_vector): 565 1.2 matt /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ 566 1.2 matt FRAME_PROLOGUE /* save SP r25-31 CR LR XER */ 567 1.2 matt FRAME_INTR_ENTER(EXC_FIT) 568 1.2 matt 569 1.4 matt bl _C_LABEL(intr_fitintr) 570 1.2 matt b intrexit 571 1.2 matt 572 1.7 matt #ifdef E500_WDOG_STACK 573 1.2 matt .data 574 1.2 matt .lcomm wdogstk,4096 575 1.7 matt #endif 576 1.2 matt .text 577 1.2 matt .p2align 4 578 1.2 matt _C_LABEL(watchdog_timer_vector): 579 1.2 matt /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ 580 1.2 matt FRAME_CRIT_PROLOGUE /* save SP r25-31 CR LR XER */ 581 1.7 matt #ifdef E500_WDOG_STACK 582 1.2 matt FRAME_WDOG_ENTER(EXC_WDOG, wdogstk+4096-CALLFRAMELEN) 583 1.7 matt #else 584 1.7 matt FRAME_CRIT_ENTER(EXC_WDOG); 585 1.2 matt #endif 586 1.2 matt 587 1.4 matt bl _C_LABEL(intr_wdogintr) 588 1.2 matt FRAME_CRIT_EXIT 589 1.2 matt 590 1.2 matt .p2align 4 591 1.2 matt _C_LABEL(data_tlb_error_vector): 592 1.2 matt /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ 593 1.2 matt FRAME_TLBPROLOGUE 594 1.12 nonaka FRAME_TLBMISSLOCK 595 1.2 matt /* 596 1.2 matt * Registers as this point: 597 1.2 matt * 598 1.2 matt * r2 = cpu_info 599 1.2 matt * r20 = scratch 600 1.2 matt * r21 = scratch 601 1.2 matt * r22 = scratch 602 1.2 matt * r23 = scratch 603 1.2 matt * r24 = DEAR 604 1.2 matt * r25 = ESR 605 1.2 matt * r26 = saved r2 606 1.2 matt * r27 = CR 607 1.2 matt * r28 = XER 608 1.2 matt * r29 = scratch 609 1.2 matt * r30 = SRR0 610 1.2 matt * r31 = SRR1 611 1.2 matt * 612 1.2 matt * Except for r29, these values must be retained. However we must 613 1.2 matt * be cognizant of nesting. There are two cases here, both related. 614 1.2 matt * 615 1.2 matt * We get a critical input or machine check exception and the kernel 616 1.2 matt * stack doesn't have a TLB entry so we take an exception. The other 617 1.2 matt * nesting path is some page used by the exception handler will cause 618 1.2 matt * a TLB data error. 619 1.2 matt * 620 1.2 matt * The second case (more probable) is that the PTE loading will fail 621 1.2 matt * so we will have to do a hard trap to resolve it. But in doing so 622 1.2 matt * we need to save a trapframe which could result in another DTLB 623 1.2 matt * fault. 624 1.2 matt * 625 1.2 matt * In all cases, the save area stack shall protect us. 626 1.2 matt */ 627 1.2 matt /* 628 1.2 matt * Attempt to update the TLB from the page table. 629 1.2 matt */ 630 1.2 matt mflr %r29 /* save LR */ 631 1.2 matt mr %r23, %r24 /* address of exception */ 632 1.2 matt rlwinm %r22, %r31, /* index into ci_pmap_segtab */\ 633 1.2 matt MSR_DS+PTR_SCALESHIFT+1, \ 634 1.2 matt 31-PTR_SCALESHIFT, \ 635 1.2 matt 31-PTR_SCALESHIFT /* move PSL_DS[27] to bit 29 */ 636 1.2 matt bl pte_load 637 1.12 nonaka FRAME_TLBMISSUNLOCK 638 1.2 matt mtlr %r29 /* restore LR */ 639 1.2 matt /* 640 1.2 matt * If we returned, pte load failed so let trap deal with it but 641 1.2 matt * has kept the contents of r24-r31 (expect r29) intact. 642 1.2 matt */ 643 1.2 matt FRAME_TLBENTER(EXC_DSI) 644 1.2 matt li %r3, T_DATA_TLB_ERROR 645 1.2 matt b trapenter 646 1.2 matt 647 1.2 matt .p2align 4 648 1.2 matt _C_LABEL(instruction_tlb_error_vector): 649 1.2 matt /* MSR[CE], MSR[ME], MSR[DE] are unchanged, all others cleared */ 650 1.2 matt FRAME_TLBPROLOGUE 651 1.12 nonaka FRAME_TLBMISSLOCK 652 1.2 matt /* 653 1.2 matt * Attempt to update the TLB from the page table. 654 1.2 matt */ 655 1.2 matt mflr %r29 /* save LR */ 656 1.2 matt mr %r23, %r30 /* PC of exception */ 657 1.2 matt rlwinm %r22, %r31, /* index into ci_pmap_segtab */\ 658 1.2 matt MSR_IS+PTR_SCALESHIFT+1, \ 659 1.2 matt 31-PTR_SCALESHIFT, \ 660 1.2 matt 31-PTR_SCALESHIFT /* move PSL_IS[26] to bit 29 */ 661 1.2 matt bl pte_load 662 1.12 nonaka FRAME_TLBMISSUNLOCK 663 1.2 matt mtlr %r29 /* restore LR */ 664 1.2 matt /* 665 1.2 matt * If we returned, pte load failed so let trap deal with it but 666 1.2 matt * has kept the contents of r24-r31 (expect r29) intact. 667 1.2 matt */ 668 1.2 matt FRAME_TLBENTER(EXC_ISI) 669 1.2 matt li %r3, T_INSTRUCTION_TLB_ERROR 670 1.2 matt b trapenter 671 1.2 matt 672 1.2 matt .p2align 4 673 1.2 matt _C_LABEL(debug_vector): 674 1.2 matt FRAME_CRIT_PROLOGUE /* save SP r25-31 CR LR XER */ 675 1.2 matt FRAME_CRIT_ENTER(EXC_DEBUG) 676 1.2 matt mfspr %r6, SPR_DBSR 677 1.2 matt stw %r6, FRAME_ESR(%r1) 678 1.2 matt li %r3, T_DEBUG 679 1.2 matt bl _C_LABEL(trap) 680 1.2 matt FRAME_CRIT_EXIT 681 1.2 matt 682 1.2 matt .p2align 4 683 1.2 matt _C_LABEL(spv_unavailable_vector): 684 1.2 matt FRAME_PROLOGUE_ESR /* save SP r25-31 CR LR XER */ 685 1.2 matt FRAME_ENTER_ESR(EXC_VEC, %r4) 686 1.2 matt li %r3, T_SPE_UNAVAILABLE 687 1.2 matt b trapenter 688 1.2 matt 689 1.2 matt .p2align 4 690 1.2 matt _C_LABEL(fpdata_vector): 691 1.2 matt FRAME_PROLOGUE_ESR /* save SP r25-31 CR LR XER */ 692 1.2 matt FRAME_ENTER_ESR(EXC_FPA, %r4) 693 1.2 matt li %r3, T_EMBEDDED_FP_DATA 694 1.2 matt b trapenter 695 1.2 matt 696 1.2 matt .p2align 4 697 1.2 matt _C_LABEL(fpround_vector): 698 1.2 matt FRAME_PROLOGUE_ESR /* save SP r25-31 CR LR XER */ 699 1.2 matt FRAME_ENTER_ESR(EXC_FPA, %r4) 700 1.2 matt li %r3, T_EMBEDDED_FP_ROUND 701 1.2 matt b trapenter 702 1.2 matt 703 1.2 matt .p2align 4 704 1.2 matt _C_LABEL(perfmon_vector): 705 1.2 matt FRAME_PROLOGUE_ESR /* save SP r25-31 CR LR XER */ 706 1.2 matt FRAME_ENTER_ESR(EXC_PERF, %r4) 707 1.2 matt li %r3, T_EMBEDDED_PERF_MONITOR 708 1.2 matt b trapenter 709 1.2 matt 710 1.2 matt .p2align 4 711 1.2 matt pte_load: 712 1.2 matt /* 713 1.2 matt * r2 = scratch 714 1.2 matt * r20 = scratch 715 1.2 matt * r21 = scratch 716 1.2 matt * r22 = index into ci_pmap_{kern,user}_segtab 717 1.2 matt * r23 = faulting address 718 1.2 matt * The rest are for reference and aren't modifiable. If the load 719 1.2 matt * fails, they will be used by FRAME_TLBENTER to create the trapframe. 720 1.2 matt * r24 = DEAR 721 1.2 matt * r25 = ESR 722 1.2 matt * r26 = saved r2 723 1.2 matt * r27 = CR 724 1.2 matt * r28 = XER 725 1.2 matt * r29 = LR 726 1.2 matt * r30 = SRR0 727 1.2 matt * r31 = SRR1 728 1.2 matt */ 729 1.2 matt cmplwi %cr2, %r22, 0 /* remember address space */ 730 1.2 matt GET_CPUINFO(%r2) 731 1.2 matt addi %r22, %r22, CI_PMAP_SEGTAB /* index into segtab(s) */ 732 1.2 matt lwzx %r20, %r22, %r2 /* load kern/user L1 PT addr */ 733 1.2 matt cmplwi %r20, 0 /* is segtab null? */ 734 1.2 matt beqlr %cr0 /* yes, return to fallback to trap */ 735 1.2 matt 736 1.2 matt rlwinm %r22, %r23, NSEGPG_SCALESHIFT + PTR_SCALESHIFT, \ 737 1.2 matt 31-(NSEGPG_SCALESHIFT + PTR_SCALESHIFT - 1), \ 738 1.2 matt 31-PTR_SCALESHIFT /* extract addr bits [0:9] to [20:29] */ 739 1.2 matt lwzx %r20, %r22, %r20 /* load address of page table page */ 740 1.2 matt cmplwi %r20, 0 /* is page null? */ 741 1.2 matt beqlr %cr0 /* yes, return to fallback to trap */ 742 1.2 matt 743 1.2 matt rlwinm %r22, %r23, \ 744 1.2 matt NSEGPG_SCALESHIFT + NPTEPG_SCALESHIFT + PTE_SCALESHIFT, \ 745 1.2 matt 31-(NPTEPG_SCALESHIFT + PTE_SCALESHIFT - 1), \ 746 1.2 matt 31-PTE_SCALESHIFT /* extract addr bits [10:19] to [20:29] */ 747 1.2 matt lwzx %r20, %r22, %r20 /* load PTE from page table page */ 748 1.2 matt cmplwi %r20, 0 /* is there a valid PTE? */ 749 1.2 matt beqlr %cr0 /* no, return to fallback to trap */ 750 1.2 matt 751 1.2 matt #if (PTE_UNSYNCED << 1) != PTE_xX 752 1.2 matt #error PTE_UNSYNCED definition error 753 1.2 matt #endif 754 1.2 matt #if (PTE_UNMODIFIED << 1) != PTE_xW 755 1.2 matt #error PTE_UNMODIFIED definition error 756 1.2 matt #endif 757 1.2 matt andi. %r22, %r20, (PTE_UNSYNCED|PTE_UNMODIFIED) 758 1.2 matt /* Does the PTE need to be changed? */ 759 1.2 matt rotlwi %r22, %r22, 1 /* if so, clear the right PTE bits */ 760 1.2 matt andc %r20, %r20, %r22 /* pte &= ~((pte & (PTE_UNSYNCED|PTE_UNMODIFIED)) << 1)*/ 761 1.2 matt 762 1.2 matt /* 763 1.2 matt * r24-r32 = (no touch) 764 1.2 matt * r23 = scratch (was fault addr) 765 1.2 matt * r22 = scratch 766 1.2 matt * r21 = scratch 767 1.2 matt * r20 = pte 768 1.2 matt * cr2 = AS 0=eq/!0=ne 769 1.2 matt */ 770 1.2 matt 771 1.2 matt /* 772 1.2 matt * This is all E500 specific. We should have a patchable branch 773 1.2 matt * to support other BookE (440) implementations. 774 1.2 matt */ 775 1.2 matt e500_pte_load: 776 1.2 matt bne+ %cr2, 1f /* user access? MAS1 is ok. */ 777 1.2 matt mfspr %r22, SPR_MAS1 /* get MAS1 */ 778 1.2 matt lis %r21, MAS1_TID@h /* get TID mask */ 779 1.2 matt andc %r22, %r22, %r21 /* clear TID */ 780 1.2 matt mtspr SPR_MAS1, %r22 /* save MAS1 */ 781 1.2 matt 1: 782 1.2 matt andi. %r21, %r20, PTE_WIMGE_MASK /* extract WIMGE from PTE */ 783 1.2 matt cmplwi %r21, PTE_M /* if just PTE_M is set, */ 784 1.2 matt beq+ %cr0, 2f /* skip munging mas2 */ 785 1.2 matt mfspr %r22, SPR_MAS2 /* get MAS2 (updated by error) */ 786 1.2 matt clrrwi %r22, %r22, PTE_RWX_SHIFT /* clear WIMGE bits */ 787 1.2 matt or %r22, %r22, %r21 /* combine with MAS2 contents */ 788 1.2 matt mtspr SPR_MAS2, %r22 /* put back into MAS2 */ 789 1.2 matt 2: 790 1.2 matt /* 791 1.2 matt * r23 = fault addr 792 1.2 matt * r22 = scratch 793 1.2 matt * r21 = scratch 794 1.2 matt * r20 = pte 795 1.2 matt */ 796 1.2 matt 797 1.2 matt /* 798 1.2 matt * In MAS3, the protection bits are in the low 6 bits: 799 1.2 matt * UX SX UW SW UR SR 800 1.2 matt * The User bits are 1 bit left of their Supervisor counterparts. 801 1.2 matt * Rotate the PTE protection bits left until they wrap around to become 802 1.2 matt * the least significant bits, where the Supervisor protection bits 803 1.2 matt * are located. Increase the rotate amount by 1 to place them where 804 1.2 matt * the User protection bits are located. We get that 1 by extracting 805 1.2 matt * the MAS1[TS] (set for User access) and moving it to bit 31 (LSB). 806 1.2 matt */ 807 1.2 matt mfspr %r21, SPR_MAS1 /* get MAS1 which has TS bit */ 808 1.2 matt extrwi %r21, %r21, 1, 31-MAS1_TS_SHIFT 809 1.2 matt /* extract MAS1_TS to LSB */ 810 1.2 matt clrrwi %r23, %r20, PAGE_SHIFT /* clear non-RPN bits from PTE */ 811 1.2 matt andi. %r20, %r20, PTE_RWX_MASK /* isolate protection bits */ 812 1.2 matt rotrwi %r20, %r20, PTE_RWX_SHIFT 813 1.2 matt andi. %r22, %r20, (MAS3_SW|MAS3_SR) /* user pages need to be R/W by kernel */ 814 1.2 matt rotlw %r20, %r20, %r21 /* rotate protection to correct loc */ 815 1.2 matt or %r20, %r20, %r22 /* combine system protection bits */ 816 1.2 matt or %r23, %r23, %r20 /* combine RPN and protection bits */ 817 1.2 matt mtspr SPR_MAS3, %r23 /* put into MAS3 */ 818 1.2 matt isync /* because ECORE500RM tells us too */ 819 1.2 matt tlbwe /* write the TLB entry */ 820 1.2 matt /* 821 1.2 matt * Increment a counter to show how many tlb misses we've handled here. 822 1.2 matt */ 823 1.2 matt lmw %r30, CI_EV_TLBMISS_SOFT(%r2) 824 1.2 matt addic %r31, %r31, 1 825 1.2 matt addze %r30, %r30 826 1.2 matt stmw %r30, CI_EV_TLBMISS_SOFT(%r2) 827 1.12 nonaka 828 1.12 nonaka FRAME_TLBMISSUNLOCK 829 1.12 nonaka 830 1.2 matt /* 831 1.2 matt * Cleanup and leave. We know any higher priority exception will 832 1.2 matt * save and restore SPRG1 and %r2 thereby preserving their values. 833 1.2 matt * 834 1.2 matt * r24 = DEAR (don't care) 835 1.2 matt * r25 = ESR (don't care) 836 1.2 matt * r26 = saved r2 837 1.2 matt * r27 = CR 838 1.2 matt * r28 = XER 839 1.2 matt * r29 = LR 840 1.2 matt * r30 = LSW of counter 841 1.2 matt * r31 = MSW of counter 842 1.2 matt */ 843 1.2 matt mtlr %r29 /* restore Link Register */ 844 1.2 matt mtxer %r28 /* restore XER */ 845 1.2 matt mtcr %r27 /* restore Condition Register */ 846 1.2 matt mtsprg1 %r26 /* save saved r2 across load multiple */ 847 1.2 matt mfsprg3 %r2 /* get end of save area */ 848 1.2 matt addi %r2,%r2,-4*(32-20) /* adjust save area down */ 849 1.2 matt lmw %r20,0(%r2) /* restore r20-r31 */ 850 1.2 matt mtsprg3 %r2 /* save new end of save area */ 851 1.2 matt mfsprg1 %r2 /* restore r2 */ 852 1.2 matt rfi 853 1.2 matt 854 1.2 matt .p2align 4 855 1.2 matt .globl _C_LABEL(exception_init) 856 1.2 matt _C_LABEL(exception_init): 857 1.2 matt lis %r6,_C_LABEL(critical_input_vector)@h 858 1.2 matt mtspr SPR_IVPR, %r6 859 1.2 matt 860 1.2 matt ori %r5,%r6,_C_LABEL(critical_input_vector)@l 861 1.2 matt mtspr SPR_IVOR0, %r5 862 1.2 matt 863 1.2 matt ori %r5,%r6,_C_LABEL(machine_check_vector)@l 864 1.2 matt mtspr SPR_IVOR1, %r5 865 1.2 matt 866 1.2 matt ori %r5,%r6,_C_LABEL(data_storage_vector)@l 867 1.2 matt mtspr SPR_IVOR2, %r5 868 1.2 matt 869 1.2 matt ori %r5,%r6,_C_LABEL(instruction_storage_vector)@l 870 1.2 matt mtspr SPR_IVOR3, %r5 871 1.2 matt 872 1.2 matt ori %r5,%r6,_C_LABEL(external_input_vector)@l 873 1.2 matt mtspr SPR_IVOR4, %r5 874 1.2 matt 875 1.2 matt ori %r5,%r6,_C_LABEL(alignment_vector)@l 876 1.2 matt mtspr SPR_IVOR5, %r5 877 1.2 matt 878 1.2 matt ori %r5,%r6,_C_LABEL(program_vector)@l 879 1.2 matt mtspr SPR_IVOR6, %r5 880 1.2 matt 881 1.2 matt #ifdef SPR_IVOR7 882 1.2 matt ori %r5,%r6,_C_LABEL(fp_unavailable_vector)@l 883 1.2 matt mtspr SPR_IVOR7, %r5 884 1.2 matt #endif 885 1.2 matt 886 1.2 matt ori %r5,%r6,_C_LABEL(system_call_vector)@l 887 1.2 matt mtspr SPR_IVOR8, %r5 888 1.2 matt 889 1.2 matt #ifdef SPR_IVOR9 890 1.2 matt ori %r5,%r6,_C_LABEL(ap_unavailable_vector)@l 891 1.2 matt mtspr SPR_IVOR9, %r5 892 1.2 matt #endif 893 1.2 matt 894 1.2 matt ori %r5,%r6,_C_LABEL(decrementer_vector)@l 895 1.2 matt mtspr SPR_IVOR10, %r5 896 1.2 matt 897 1.2 matt ori %r5,%r6,_C_LABEL(fixed_interval_timer_vector)@l 898 1.2 matt mtspr SPR_IVOR11, %r5 899 1.2 matt 900 1.2 matt ori %r5,%r6,_C_LABEL(watchdog_timer_vector)@l 901 1.2 matt mtspr SPR_IVOR12, %r5 902 1.2 matt 903 1.2 matt ori %r5,%r6,_C_LABEL(data_tlb_error_vector)@l 904 1.2 matt mtspr SPR_IVOR13, %r5 905 1.2 matt 906 1.2 matt ori %r5,%r6,_C_LABEL(instruction_tlb_error_vector)@l 907 1.2 matt mtspr SPR_IVOR14, %r5 908 1.2 matt 909 1.2 matt ori %r5,%r6,_C_LABEL(debug_vector)@l 910 1.2 matt mtspr SPR_IVOR15, %r5 911 1.2 matt 912 1.2 matt ori %r5,%r6,_C_LABEL(spv_unavailable_vector)@l 913 1.2 matt mtspr SPR_IVOR32, %r5 914 1.2 matt 915 1.2 matt ori %r5,%r6,_C_LABEL(fpdata_vector)@l 916 1.2 matt mtspr SPR_IVOR33, %r5 917 1.2 matt 918 1.2 matt ori %r5,%r6,_C_LABEL(fpround_vector)@l 919 1.2 matt mtspr SPR_IVOR34, %r5 920 1.2 matt 921 1.2 matt ori %r5,%r6,_C_LABEL(perfmon_vector)@l 922 1.2 matt mtspr SPR_IVOR35, %r5 923 1.2 matt 924 1.10 matt mfspr %r5, SPR_PIR /* get Processor ID register */ 925 1.3 matt cmplwi %r5,0 926 1.3 matt bnelr /* return if non-0 (non-primary) */ 927 1.3 matt 928 1.2 matt lis %r5,_C_LABEL(powerpc_intrsw)@ha 929 1.2 matt stw %r3,_C_LABEL(powerpc_intrsw)@l(%r5) 930 1.2 matt 931 1.2 matt blr 932 1.2 matt 933 1.2 matt #ifdef notyet 934 1.2 matt .data 935 1.2 matt .lcomm ddbstk,4096 936 1.2 matt .text 937 1.2 matt 938 1.2 matt _ENTRY(cpu_Debugger) 939 1.2 matt mflr %r0 940 1.2 matt stw %r0, CFRAME_LR(%r1) 941 1.2 matt 942 1.2 matt mfmsr %r3 943 1.2 matt wrteei 0 944 1.2 matt mr %r4,%r1 945 1.2 matt lis %r10,ddbstk@ha 946 1.2 matt addi %r10,%r10,ddbstk@l 947 1.2 matt sub %r5,%r1,%r10 948 1.2 matt cmplwi %r5,4096 949 1.2 matt blt %cr0, 1f 950 1.2 matt addi %r1,%r10,4096-CALLFRAMELEN 951 1.2 matt 1: 952 1.2 matt stwu %r4,-FRAMELEN(%r1) 953 1.5 matt stw %r4,FRAME_R1(%r1) 954 1.5 matt stmw %r13,FRAME_R13(%r1) 955 1.2 matt mr %r26,%r0 956 1.2 matt mfcr %r27 957 1.2 matt mfxer %r28 958 1.2 matt mfctr %r29 959 1.2 matt mr %r30,%r0 960 1.2 matt mr %r31,%r3 961 1.2 matt stmw %r26,FRAME_LR(%r1) 962 1.2 matt mr %r31,%r1 963 1.2 matt mr %r1,%r10 964 1.2 matt addi %r4,%r1,FRAME_TF 965 1.2 matt li %r3,EXC_PGM 966 1.2 matt stw %r3,FRAME_EXC(%r1) 967 1.2 matt li %r3,T_PROGRAM 968 1.2 matt bl _C_LABEL(trap) 969 1.2 matt lmw %r26,FRAME_LR(%r1) 970 1.2 matt mtlr %r26 971 1.2 matt mtcr %r27 972 1.2 matt mtxer %r28 973 1.2 matt mtctr %r29 974 1.2 matt mr %r0,%r31 975 1.5 matt lmw %r13,FRAME_R13(%r1) 976 1.5 matt lwz %r1,FRAME_R1(%r1) 977 1.2 matt wrtee %r0 978 1.2 matt blr 979 1.2 matt #endif /* notyet */ 980