exception_vector.S revision 1.25
1/*	$NetBSD: exception_vector.S,v 1.25 2007/03/16 04:22:22 uwe Exp $	*/
2
3/*-
4 * Copyright (c) 2002 The NetBSD Foundation, Inc.
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 *    notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 *    notice, this list of conditions and the following disclaimer in the
14 *    documentation and/or other materials provided with the distribution.
15 * 3. All advertising materials mentioning features or use of this software
16 *    must display the following acknowledgement:
17 *        This product includes software developed by the NetBSD
18 *        Foundation, Inc. and its contributors.
19 * 4. Neither the name of The NetBSD Foundation nor the names of its
20 *    contributors may be used to endorse or promote products derived
21 *    from this software without specific prior written permission.
22 *
23 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
24 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
25 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
26 * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
27 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
28 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
29 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
30 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
31 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
32 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
33 * POSSIBILITY OF SUCH DAMAGE.
34 */
35
36#include "opt_cputype.h"
37#include "opt_ddb.h"
38#include "assym.h"
39
40#include <sh3/param.h>
41#include <sh3/locore.h>
42#include <sh3/exception.h>
43#include <sh3/ubcreg.h>
44#include <sh3/mmu_sh3.h>
45#include <sh3/mmu_sh4.h>
46
47/*
48 * Align vectors more strictly here (where we don't really care) so
49 * that .align 5 (i.e. 32B cache line) before data block does the
50 * right thing w.r.t. final destinations after vectors are copied.
51 */
52#define _ALIGN_TEXT	.align 5
53#include <sh3/asm.h>
54
55__KERNEL_RCSID(0, "$NetBSD: exception_vector.S,v 1.25 2007/03/16 04:22:22 uwe Exp $")
56
57
58/*
59 * Exception vectors.
60 * The following routines are copied to vector addresses.
61 *	sh_vector_generic:	VBR + 0x100
62 *	sh_vector_tlbmiss:	VBR + 0x400
63 *	sh_vector_interrupt:	VBR + 0x600
64 */
65
66#define VECTOR_END_MARKER(sym)			\
67		.globl	_C_LABEL(sym);		\
68	_C_LABEL(sym):
69
70
71/*
72 * LINTSTUB: Var: char sh_vector_generic[1];
73 *
74 * void sh_vector_generic(void);
75 *	Copied to VBR+0x100.  This code should be position independent
76 *	and maximum 786 bytes long (== 0x400 - 0x100).
77 */
78NENTRY(sh_vector_generic)
79	__EXCEPTION_ENTRY
80	__INTR_MASK(r0, r1)
81	/* Identify exception cause */
82	MOV	(EXPEVT, r0)
83	mov.l	@r0, r0
84	mov.l	r0, @(TF_EXPEVT, r14)	/* tf->tf_expevt = EXPEVT */
85	/* Get curlwp */
86	mov.l	.Lg_curlwp, r1
87	mov.l	@r1, r4			/* 1st arg */
88	/* Get TEA */
89	MOV	(TEA, r1)
90	mov.l	@r1, r6			/* 3rd arg */
91	/* Check TLB exception or not */
92	mov.l	.Lg_TLB_PROT_ST, r1
93	cmp/hi	r1, r0
94	bt	1f
95
96	/* tlb_exception(curlwp, tf, TEA); */
97	__EXCEPTION_UNBLOCK(r0, r1)
98	mov.l	.Lg_tlb_exception, r0
99	jsr	@r0
100	 mov	r14, r5			/* 2nd arg */
101	bra	2f
102	 nop
103
104	/* general_exception(curlwp, tf, TEA); */
1051:	mov	r4, r8
106#ifdef DDB
107	mov	#0, r2
108	MOV	(BBRA, r1)
109	mov.w	r2, @r1			/* disable UBC */
110	mov.l	r2, @(TF_UBC, r14)	/* clear tf->tf_ubc */
111#endif /* DDB */
112	__EXCEPTION_UNBLOCK(r0, r1)
113	mov.l	.Lg_general_exception, r0
114	jsr	@r0
115	 mov	r14, r5			/* 2nd arg */
116
117	/* Check for ASTs on exit to user mode. */
118	mov	r8, r4
119	mov.l	.Lg_ast, r0
120	jsr	@r0
121	 mov	r14, r5
122#ifdef DDB	/* BBRA = tf->tf_ubc */
123	__EXCEPTION_BLOCK(r0, r1)
124	mov.l	@(TF_UBC, r14), r0
125	MOV	(BBRA, r1)
126	mov.w	r0, @r1
127#endif /* DDB */
1282:	__EXCEPTION_RETURN
129
130	.align	5
131.Lg_curlwp:		.long	_C_LABEL(curlwp)
132REG_SYMBOL(EXPEVT)
133REG_SYMBOL(BBRA)
134REG_SYMBOL(TEA)
135.Lg_tlb_exception:	.long	_C_LABEL(tlb_exception)
136.Lg_general_exception:	.long	_C_LABEL(general_exception)
137.Lg_ast:		.long	_C_LABEL(ast)
138.Lg_TLB_PROT_ST:	.long	EXPEVT_TLB_PROT_ST
139
140/* LINTSTUB: Var: char sh_vector_generic_end[1]; */
141VECTOR_END_MARKER(sh_vector_generic_end)
142	SET_ENTRY_SIZE(sh_vector_generic)
143
144
145#ifdef SH3
146/*
147 * LINTSTUB: Var: char sh3_vector_tlbmiss[1];
148 *
149 * void sh3_vector_tlbmiss(void);
150 *	Copied to VBR+0x400.  This code should be position independent
151 *	and maximum 512 bytes long (== 0x600 - 0x400).
152 */
153NENTRY(sh3_vector_tlbmiss)
154	__EXCEPTION_ENTRY
155	mov	#(SH3_TEA & 0xff), r0
156	mov.l	@r0, r6		! 3rd arg: va = TEA
157
158	!! if kernel stack is in P3, handle it here fast
159#if !defined(P1_STACK)
160	mov.l	.L3_VPN_MASK, r0
161	and	r6, r0		! vpn
162	tst	r0, r0
163	bt	6f		! punt if vpn is 0
164
165	mov.l	.L3_CURUPTE, r1
166	mov.l	@r1, r1		! upte = &l->l_md.md_upte[0]
167	mov	#UPAGES, r3	! loop limit
168
169	!! for each page of u-area
1704:	mov.l	@r1+, r7	! upte->addr: u-area VPN
171	cmp/eq	r7, r0		! if (vpn == upte->addr)
172	bt/s	5f		!     goto found;
173	 dt	r3
174	bf/s	4b
175	 add	#4, r1		! skip upte->data; point to next md_upte[i]
176
177	!! not a page of u-area, proceed to handler
178	bra	7f		! pull insn at 6f into delay slot
179	 mov	#(SH3_EXPEVT & 0xff), r0
180
181	!! load entry for this uarea page into tlb
1825:	mov.l	@r1, r2		! upte->data: u-area PTE
183	mov	#(SH3_PTEL & 0xff), r1
184	mov.l	r2, @r1
185
186	mov	#(SH3_PTEH & 0xff), r1
187	mov.l	@r1, r2
188	mov.l	.L3_VPN_MASK, r0
189	and	r2, r0
190	mov.l	r0, @r1		! ASID 0
191
192	ldtlb
193
194	bra	99f		! return
195	 mov.l	r2, @r1		!  restore ASID
196#endif /* !P1_STACK */
197
198	!! tlb_exception(curlwp, trapframe, tea)
1996:	mov	#(SH3_EXPEVT & 0xff), r0
2007:	mov.l	@r0, r0
201	mov.l	r0, @(TF_EXPEVT, r14)	! tf->tf_expevt = EXPEVT
202	mov.l	.L3_curlwp, r0
203	mov.l	@r0, r4			! 1st arg: curlwp
204	__INTR_MASK(r0, r1)
205	__EXCEPTION_UNBLOCK(r0, r1)
206	mov.l	.L3_tlb_exception, r0
207	jsr	@r0
208	 mov	r14, r5			! 2nd arg: trap frame
20999:	__EXCEPTION_RETURN
210
211	.align	5
212.L3_curlwp:		.long	_C_LABEL(curlwp)
213.L3_tlb_exception:	.long	_C_LABEL(tlb_exception)
214.L3_VPN_MASK:		.long	0xfffff000
215.L3_CURUPTE:		.long	_C_LABEL(curupte)
216
217/* LINTSTUB: Var: char sh3_vector_tlbmiss_end[1]; */
218VECTOR_END_MARKER(sh3_vector_tlbmiss_end)
219	SET_ENTRY_SIZE(sh3_vector_tlbmiss)
220#endif /* SH3 */
221
222
223#ifdef SH4
224/*
225 * LINTSTUB: Var: char sh4_vector_tlbmiss[1];
226 *
227 * void sh4_vector_tlbmiss(void);
228 *	Copied to VBR+0x400.  This code should be position independent
229 *	and maximum 512 bytes long (== 0x600 - 0x400).
230 */
231NENTRY(sh4_vector_tlbmiss)
232	__EXCEPTION_ENTRY
233	mov.l	.L4_TEA4, r0
234	mov.l	@r0, r6
235	mov.l	.L4_EXPEVT4, r0
236	mov.l	@r0, r0
237	mov.l	r0, @(TF_EXPEVT, r14)	/* tf->tf_expevt = EXPEVT */
238	mov.l	.L4_curlwp, r0
239	mov.l	@r0, r4			/* 1st arg */
240	__INTR_MASK(r0, r1)
241	__EXCEPTION_UNBLOCK(r0, r1)
242	mov.l	.L4_tlb_exception, r0
243	jsr	@r0
244	 mov	r14, r5			/* 2nd arg */
245	__EXCEPTION_RETURN
246
247	.align	5
248.L4_tlb_exception:	.long	_C_LABEL(tlb_exception)
249.L4_curlwp:		.long	_C_LABEL(curlwp)
250.L4_EXPEVT4:		.long	SH4_EXPEVT
251.L4_TEA4:		.long	SH4_TEA
252
253/* LINTSTUB: Var: char sh4_vector_tlbmiss_end[1]; */
254VECTOR_END_MARKER(sh4_vector_tlbmiss_end)
255	SET_ENTRY_SIZE(sh4_vector_tlbmiss)
256#endif /* SH4 */
257
258
259/*
260 * LINTSTUB: Var: char sh_vector_interrupt[1];
261 *
262 * void sh_vector_interrupt(void);
263 *	Copied to VBR+0x600.  This code should be position independent.
264 */
265NENTRY(sh_vector_interrupt)
266	__EXCEPTION_ENTRY
267	xor	r0, r0
268	mov.l	r0, @(TF_EXPEVT, r14)	/* (for debug) */
269	stc	r0_bank, r6		/* ssp */
270	/* Enable exceptions for P3 access */
271	__INTR_MASK(r0, r1)
272	__EXCEPTION_UNBLOCK(r0, r1)
273	/* ++uvmexp.intrs */
274	mov.l	.Li_uvmexp_intrs, r0
275	mov.l	@r0, r1
276	add	#1 r1
277	mov.l	r1, @r0
278	/* Dispatch interrupt handler */
279	mov.l	.Li_intc_intr, r0
280	jsr	@r0		/* intc_intr(ssr, spc, ssp) */
281	 nop
282	/* Check for ASTs on exit to user mode. */
283	mov.l	.Li_curlwp, r0
284	mov.l	@r0, r4		/* 1st arg */
285	mov.l	.Li_ast, r0
286	jsr	@r0
287	 mov	r14, r5		/* 2nd arg */
288	__EXCEPTION_RETURN
289
290	.align	5
291.Li_curlwp:		.long	_C_LABEL(curlwp)
292.Li_intc_intr:		.long	_C_LABEL(intc_intr)
293.Li_ast:		.long	_C_LABEL(ast)
294.Li_uvmexp_intrs:	.long	_C_LABEL(uvmexp) + UVMEXP_INTRS
295
296/* LINTSTUB: Var: char sh_vector_interrupt_end[1]; */
297VECTOR_END_MARKER(sh_vector_interrupt_end)
298	SET_ENTRY_SIZE(sh_vector_interrupt)
299