locore.h revision 1.20 1 /* $NetBSD: locore.h,v 1.20 2011/01/18 00:26:57 joerg Exp $ */
2
3 /*-
4 * Copyright (c) 2002 The NetBSD Foundation, Inc.
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 *
16 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
17 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
18 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
19 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
20 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
21 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
22 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
23 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
24 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
25 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
26 * POSSIBILITY OF SUCH DAMAGE.
27 */
28
29 #ifdef _LOCORE
30
31 #ifdef __STDC__
32 #if defined(SH3) && defined(SH4)
33 #define MOV(x, r) mov.l .L_ ## x, r; mov.l @r, r
34 #define REG_SYMBOL(x) .L_ ## x: .long _C_LABEL(__sh_ ## x)
35 #define FUNC_SYMBOL(x) .L_ ## x: .long _C_LABEL(__sh_ ## x)
36 #elif defined(SH3)
37 #define MOV(x, r) mov.l .L_ ## x, r
38 #define REG_SYMBOL(x) .L_ ## x: .long SH3_ ## x
39 #define FUNC_SYMBOL(x) .L_ ## x: .long _C_LABEL(sh3_ ## x)
40 #elif defined(SH4)
41 #define MOV(x, r) mov.l .L_ ## x, r
42 #define REG_SYMBOL(x) .L_ ## x: .long SH4_ ## x
43 #define FUNC_SYMBOL(x) .L_ ## x: .long _C_LABEL(sh4_ ## x)
44 #endif /* SH3 && SH4 */
45 #else /* !__STDC__ */
46 #if defined(SH3) && defined(SH4)
47 #define MOV(x, r) mov.l .L_/**/x, r; mov.l @r, r
48 #define REG_SYMBOL(x) .L_/**/x: .long _C_LABEL(__sh_/**/x)
49 #define FUNC_SYMBOL(x) .L_/**/x: .long _C_LABEL(__sh_/**/x)
50 #elif defined(SH3)
51 #define MOV(x, r) mov.l .L_/**/x, r
52 #define REG_SYMBOL(x) .L_/**/x: .long SH3_/**/x
53 #define FUNC_SYMBOL(x) .L_/**/x: .long _C_LABEL(sh3_/**/x)
54 #elif defined(SH4)
55 #define MOV(x, r) mov.l .L_/**/x, r
56 #define REG_SYMBOL(x) .L_/**/x: .long SH4_/**/x
57 #define FUNC_SYMBOL(x) .L_/**/x: .long _C_LABEL(sh4_/**/x)
58 #endif /* SH3 && SH4 */
59 #endif /* __STDC__ */
60
61 /*
62 * BANK1 r6 contains current trapframe pointer.
63 * BANK1 r7 contains bottom address of lwp's kernel stack.
64 */
65 /*
66 * __EXCEPTION_ENTRY:
67 * + setup stack pointer
68 * + save all registers to trapframe.
69 * + setup kernel stack.
70 * + change bank from 1 to 0
71 * + set BANK0 (r4, r5, r6) = (ssr, spc, ssp)
72 */
73 #define __EXCEPTION_ENTRY ;\
74 /* Check kernel/user mode. */ ;\
75 mov #0x40, r3 ;\
76 swap.b r3, r3 ;\
77 stc ssr, r2 ;\
78 swap.w r3, r3 /* r3 = 0x40000000 */ ;\
79 mov r2, r0 /* r2 = r0 = SSR */ ;\
80 and r3, r0 ;\
81 tst r0, r0 /* if (SSR.MD == 0) T = 1 */ ;\
82 mov r14, r1 ;\
83 mov r6, r14 /* frame pointer */ ;\
84 bf/s 1f /* T==0 ...Exception from kernel mode */;\
85 mov r15, r0 ;\
86 /* Exception from user mode */ ;\
87 mov r7, r15 /* change to kernel stack */ ;\
88 1: ;\
89 /* Save registers */ ;\
90 mov.l r1, @-r14 /* tf_r14 */ ;\
91 mov.l r0, @-r14 /* tf_r15 */ ;\
92 stc.l r0_bank,@-r14 /* tf_r0 */ ;\
93 stc.l r1_bank,@-r14 /* tf_r1 */ ;\
94 stc.l r2_bank,@-r14 /* tf_r2 */ ;\
95 stc.l r3_bank,@-r14 /* tf_r3 */ ;\
96 stc.l r4_bank,@-r14 /* tf_r4 */ ;\
97 stc.l r5_bank,@-r14 /* tf_r5 */ ;\
98 stc.l r6_bank,@-r14 /* tf_r6 */ ;\
99 stc.l r7_bank,@-r14 /* tf_r7 */ ;\
100 mov.l r8, @-r14 /* tf_r8 */ ;\
101 mov.l r9, @-r14 /* tf_r9 */ ;\
102 mov.l r10, @-r14 /* tf_r10 */ ;\
103 mov.l r11, @-r14 /* tf_r11 */ ;\
104 mov.l r12, @-r14 /* tf_r12 */ ;\
105 mov.l r13, @-r14 /* tf_r13 */ ;\
106 sts.l pr, @-r14 /* tf_pr */ ;\
107 sts.l mach, @-r14 /* tf_mach*/ ;\
108 sts.l macl, @-r14 /* tf_macl*/ ;\
109 stc.l gbr, @-r14 /* tf_gbr */ ;\
110 mov.l r2, @-r14 /* tf_ssr */ ;\
111 stc.l spc, @-r14 /* tf_spc */ ;\
112 add #-8, r14 /* skip tf_ubc, tf_expevt */ ;\
113 mov r14, r6 /* store frame pointer */ ;\
114 /* Change register bank to 0 */ ;\
115 shlr r3 /* r3 = 0x20000000 */ ;\
116 stc sr, r1 /* r1 = SR */ ;\
117 not r3, r3 ;\
118 and r1, r3 ;\
119 ldc r3, sr /* SR.RB = 0 */ ;\
120 /* Set up arguments. r4 = ssr, r5 = spc */ ;\
121 stc r2_bank,r4 ;\
122 stc spc, r5
123
124 /*
125 * __EXCEPTION_RETURN:
126 * + block exceptions
127 * + restore all registers from stack.
128 * + rte.
129 */
130 #define __EXCEPTION_RETURN ;\
131 mov #0x10, r0 ;\
132 swap.b r0, r0 ;\
133 swap.w r0, r0 /* r0 = 0x10000000 */ ;\
134 stc sr, r1 ;\
135 or r0, r1 ;\
136 ldc r1, sr /* SR.BL = 1 */ ;\
137 stc r6_bank,r0 ;\
138 mov r0, r14 ;\
139 add #TF_SIZE, r0 ;\
140 ldc r0, r6_bank /* roll up frame pointer */ ;\
141 add #8, r14 /* skip tf_expevt, tf_ubc */ ;\
142 ldc.l @r14+, spc /* tf_spc */ ;\
143 ldc.l @r14+, ssr /* tf_ssr */ ;\
144 ldc.l @r14+, gbr /* tf_gbr */ ;\
145 lds.l @r14+, macl /* tf_macl*/ ;\
146 lds.l @r14+, mach /* tf_mach*/ ;\
147 lds.l @r14+, pr /* tf_pr */ ;\
148 mov.l @r14+, r13 /* tf_r13 */ ;\
149 mov.l @r14+, r12 /* tf_r12 */ ;\
150 mov.l @r14+, r11 /* tf_r11 */ ;\
151 mov.l @r14+, r10 /* tf_r10 */ ;\
152 mov.l @r14+, r9 /* tf_r9 */ ;\
153 mov.l @r14+, r8 /* tf_r8 */ ;\
154 mov.l @r14+, r7 /* tf_r7 */ ;\
155 mov.l @r14+, r6 /* tf_r6 */ ;\
156 mov.l @r14+, r5 /* tf_r5 */ ;\
157 mov.l @r14+, r4 /* tf_r4 */ ;\
158 mov.l @r14+, r3 /* tf_r3 */ ;\
159 mov.l @r14+, r2 /* tf_r2 */ ;\
160 mov.l @r14+, r1 /* tf_r1 */ ;\
161 mov.l @r14+, r0 /* tf_r0 */ ;\
162 mov.l @r14+ r15 /* tf_r15 */ ;\
163 mov.l @r14+, r14 /* tf_r14 */ ;\
164 rte ;\
165 nop
166
167
168 /*
169 * Macros to disable and enable exceptions (including interrupts).
170 * This modifies SR.BL
171 */
172
173 #define __EXCEPTION_BLOCK(Rn, Rm) ;\
174 mov #0x10, Rn ;\
175 swap.b Rn, Rn ;\
176 swap.w Rn, Rn /* Rn = 0x10000000 */ ;\
177 stc sr, Rm ;\
178 or Rm, Rn ;\
179 ldc Rn, sr /* block exceptions */
180
181 #define __EXCEPTION_UNBLOCK(Rn, Rm) ;\
182 mov #0xef, Rn /* ~0x10 */ ;\
183 swap.b Rn, Rn ;\
184 swap.w Rn, Rn /* Rn = ~0x10000000 */ ;\
185 stc sr, Rm ;\
186 and Rn, Rm ;\
187 ldc Rm, sr /* unblock exceptions */
188
189 /*
190 * Macros to disable and enable interrupts.
191 * This modifies SR.I[0-3]
192 */
193 #define __INTR_MASK(Rn, Rm) ;\
194 mov #0x78, Rn ;\
195 shll Rn /* Rn = 0x000000f0 */ ;\
196 stc sr, Rm ;\
197 or Rn, Rm ;\
198 ldc Rm, sr /* mask all interrupts */
199
200 #define __INTR_UNMASK(Rn, Rm) ;\
201 mov #0x78, Rn ;\
202 shll Rn /* Rn = 0x000000f0 */ ;\
203 not Rn, Rn ;\
204 stc sr, Rm ;\
205 and Rn, Rm ;\
206 ldc Rm, sr /* unmask all interrupts */
207
208 #else /* !_LOCORE */
209
210 void sh3_switch_setup(struct lwp *);
211 void sh4_switch_setup(struct lwp *);
212 void sh3_switch_resume(struct lwp *);
213 void sh4_switch_resume(struct lwp *);
214 extern void (*__sh_switch_resume)(struct lwp *);
215
216 #endif /* !_LOCORE */
217