exception_vector.S revision 1.18
11.18Suwe/*	$NetBSD: exception_vector.S,v 1.18 2006/02/08 02:19:57 uwe Exp $	*/
21.1Such
31.1Such/*-
41.1Such * Copyright (c) 2002 The NetBSD Foundation, Inc.
51.1Such * All rights reserved.
61.1Such *
71.1Such * Redistribution and use in source and binary forms, with or without
81.1Such * modification, are permitted provided that the following conditions
91.1Such * are met:
101.1Such * 1. Redistributions of source code must retain the above copyright
111.1Such *    notice, this list of conditions and the following disclaimer.
121.1Such * 2. Redistributions in binary form must reproduce the above copyright
131.1Such *    notice, this list of conditions and the following disclaimer in the
141.1Such *    documentation and/or other materials provided with the distribution.
151.1Such * 3. All advertising materials mentioning features or use of this software
161.1Such *    must display the following acknowledgement:
171.1Such *        This product includes software developed by the NetBSD
181.1Such *        Foundation, Inc. and its contributors.
191.1Such * 4. Neither the name of The NetBSD Foundation nor the names of its
201.1Such *    contributors may be used to endorse or promote products derived
211.1Such *    from this software without specific prior written permission.
221.1Such *
231.1Such * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
241.1Such * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
251.1Such * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
261.1Such * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
271.1Such * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
281.1Such * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
291.1Such * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
301.1Such * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
311.1Such * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
321.1Such * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
331.1Such * POSSIBILITY OF SUCH DAMAGE.
341.1Such */
351.1Such
361.9Such#include "opt_cputype.h"
371.9Such#include "opt_ddb.h"
381.1Such#include "assym.h"
391.7Such
401.3Such#include <sh3/param.h>
411.1Such#include <sh3/asm.h>
421.1Such#include <sh3/locore.h>
431.8Such#include <sh3/exception.h>
441.2Such#include <sh3/ubcreg.h>
451.3Such#include <sh3/mmu_sh3.h>
461.3Such#include <sh3/mmu_sh4.h>
471.12Suwe
481.18Suwe__KERNEL_RCSID(0, "$NetBSD: exception_vector.S,v 1.18 2006/02/08 02:19:57 uwe Exp $")
491.12Suwe
501.1Such
511.1Such/*
521.15Suwe * Exception vectors. The following routines are copied to vector addreses.
531.8Such *	sh_vector_generic:	VBR + 0x100
541.9Such *	sh_vector_tlbmiss:	VBR + 0x400
551.1Such *	sh_vector_interrupt:	VBR + 0x600
561.8Such */
571.8Such
581.15Suwe#define VECTOR_END_MARKER(sym)			\
591.15Suwe		.globl	_C_LABEL(sym);		\
601.15Suwe	_C_LABEL(sym):
611.15Suwe
621.15Suwe
631.1Such/*
641.15Suwe * LINTSTUB: Var: char sh_vector_generic[1];
651.15Suwe *
661.15Suwe * void sh_vector_generic(void) __attribute__((__noreturn__))
671.15Suwe *	Copied to VBR+0x100.  This code should be position independent
681.15Suwe *	and no more than 786 bytes long (== 0x400 - 0x100).
691.8Such */
701.15SuweNENTRY(sh_vector_generic)
711.5Such	__EXCEPTION_ENTRY
721.9Such	__INTR_MASK(r0, r1)
731.1Such	/* Identify exception cause */
741.1Such	MOV	(EXPEVT, r0)
751.1Such	mov.l	@r0,	r0
761.9Such	mov.l	r0,	@(TF_EXPEVT, r14) /* trapframe->tf_expevt = EXPEVT */
771.10Sthorpej	/* Get curlwp */
781.13Suwe	mov.l	_L.curlwp, r1
791.9Such	mov.l	@r1,	r4	/* 1st arg */
801.11Suwe	/* Get TEA */
811.11Suwe	MOV	(TEA,	r1)
821.11Suwe	mov.l	@r1,	r6	/* 3rd arg */
831.9Such	/* Check TLB exception or not */
841.9Such	mov.l	_L.TLB_PROT_ST, r1
851.9Such	cmp/hi	r1,	r0
861.9Such	bt	1f
871.11Suwe
881.11Suwe	/* tlb_exception(curlwp, trapframe, trunc_page(TEA)); */
891.9Such	mov.l	_L.VPN_MASK, r1
901.9Such	and	r1,	r6	/* va = trunc_page(va) */
911.2Such	__EXCEPTION_UNBLOCK(r0, r1)
921.9Such	mov.l	_L.tlb, r0
931.1Such	jsr	@r0
941.9Such	 mov	r14,	r5	/* 2nd arg */
951.9Such	bra	2f
961.1Such	 nop
971.11Suwe
981.11Suwe	/* general_exception(curlwp, trapframe, TEA); */
991.9Such1:	mov	r4,	r8
1001.1Such#ifdef DDB
1011.1Such	mov	#0,	r2
1021.1Such	MOV	(BBRA, r1)
1031.1Such	mov.w	r2,	@r1	/* disable UBC */
1041.3Such	mov.l	r2,	@(TF_UBC, r14)	/* clear trapframe->tf_ubc */
1051.1Such#endif /* DDB */
1061.2Such	__EXCEPTION_UNBLOCK(r0, r1)
1071.9Such	mov.l	_L.general, r0
1081.1Such	jsr	@r0
1091.11Suwe	 mov	r14,	r5	/* 2nd arg */
1101.1Such
1111.1Such	/* Check for ASTs on exit to user mode. */
1121.9Such	mov	r8,	r4
1131.1Such	mov.l	_L.ast,	r0
1141.1Such	jsr	@r0
1151.9Such	 mov	r14,	r5
1161.8Such#ifdef DDB	/* BBRA = trapframe->tf_ubc */
1171.2Such	__EXCEPTION_BLOCK(r0, r1)
1181.3Such	mov.l	@(TF_UBC, r14), r0
1191.1Such	MOV	(BBRA, r1)
1201.1Such	mov.w	r0,	@r1
1211.1Such#endif /* DDB */
1221.9Such2:	__EXCEPTION_RETURN
1231.1Such	/* NOTREACHED */
1241.1Such	.align	2
1251.13Suwe_L.curlwp:	.long	_C_LABEL(curlwp)
1261.1SuchREG_SYMBOL(EXPEVT)
1271.1SuchREG_SYMBOL(BBRA)
1281.9SuchREG_SYMBOL(TEA)
1291.9Such_L.tlb:		.long	_C_LABEL(tlb_exception)
1301.9Such_L.general:	.long	_C_LABEL(general_exception)
1311.1Such_L.ast:		.long	_C_LABEL(ast)
1321.9Such_L.TLB_PROT_ST:	.long	0xc0
1331.9Such_L.VPN_MASK:	.long	0xfffff000
1341.15Suwe
1351.15Suwe/* LINTSTUB: Var: char sh_vector_generic_end[1]; */
1361.15SuweVECTOR_END_MARKER(sh_vector_generic_end)
1371.15Suwe	SET_ENTRY_SIZE(sh_vector_generic)
1381.15Suwe
1391.8Such
1401.3Such#ifdef SH3
1411.8Such/*
1421.15Suwe * LINTSTUB: Var: char sh3_vector_tlbmiss[1];
1431.15Suwe *
1441.15Suwe * void sh3_vector_tlbmiss(void) __attribute__((__noreturn__))
1451.15Suwe *	Copied to VBR+0x400.  This code should be position independent
1461.15Suwe *	and no more than 512 bytes long (== 0x600 - 0x400).
1471.1Such */
1481.15SuweNENTRY(sh3_vector_tlbmiss)
1491.8Such	__EXCEPTION_ENTRY
1501.17Suwe	mov	#(SH3_TEA & 0xff), r0
1511.16Suwe	mov.l	@r0,	r6	/* 3rd arg: va = TEA */
1521.3Such#if !defined(P1_STACK)
1531.4Such	/* Load kernel stack */
1541.16Suwe	mov.l	__L.VPN_MASK, r0
1551.16Suwe	and	r6,	r0
1561.16Suwe	tst	r0,	r0	/* check VPN == 0 */
1571.9Such	bt	6f
1581.3Such	mov.l	_L.CURUPTE, r1
1591.9Such	mov.l	@r1,	r1
1601.3Such	mov	#UPAGES,r3
1611.3Such	mov	#1,	r2
1621.9Such4:	mov.l	@r1+,	r7
1631.16Suwe	cmp/eq	r7,	r0	/* md_upte.addr: u-area VPN */
1641.3Such	bt	5f
1651.3Such	add	#4,	r1	/* skip md_upte.data */
1661.3Such	cmp/eq	r2,	r3
1671.3Such	bf/s	4b
1681.8Such	 add	#1,	r2
1691.18Suwe	bra	7f		/* pull insn at 6f into delay slot */
1701.18Suwe	 mov	#(SH3_EXPEVT & 0xff), r0
1711.3Such5:	mov.l	@r1,	r2	/* md_upte.data: u-area PTE */
1721.17Suwe	mov	#(SH3_PTEL & 0xff), r1
1731.3Such	mov.l	r2,	@r1
1741.17Suwe	mov	#(SH3_PTEH & 0xff), r1
1751.9Such	mov.l	@r1,	r2
1761.9Such	mov.l	__L.VPN_MASK, r0
1771.9Such	and	r2,	r0
1781.9Such	mov.l	r0,	@r1	/* ASID 0 */
1791.3Such	ldtlb
1801.3Such	bra	3f
1811.18Suwe	 mov.l	r2,	@r1	/* restore ASID */
1821.3Such#endif /* !P1_STACK */
1831.17Suwe6:	mov	#(SH3_EXPEVT & 0xff), r0
1841.18Suwe7:	mov.l	@r0,	r0
1851.9Such	mov.l	r0,	@(TF_EXPEVT, r14) /* trapframe->tf_expevt = EXPEVT */
1861.9Such	mov.l	2f,	r0
1871.9Such	mov.l	@r0,	r4	/* 1st arg */
1881.9Such	__INTR_MASK(r0, r1)
1891.9Such	__EXCEPTION_UNBLOCK(r0, r1)
1901.1Such	mov.l	1f,	r0
1911.1Such	jsr	@r0
1921.9Such	 mov	r14,	r5	/* 2nd arg */
1931.9Such3:	__EXCEPTION_RETURN
1941.3Such	.align	2
1951.10Sthorpej2:		.long	_C_LABEL(curlwp)
1961.9Such1:		.long	_C_LABEL(tlb_exception)
1971.9Such__L.VPN_MASK:	.long	0xfffff000
1981.3Such_L.CURUPTE:	.long	_C_LABEL(curupte)
1991.15Suwe
2001.15Suwe/* LINTSTUB: Var: char sh3_vector_tlbmiss_end[1]; */
2011.15SuweVECTOR_END_MARKER(sh3_vector_tlbmiss_end)
2021.15Suwe	SET_ENTRY_SIZE(sh3_vector_tlbmiss)
2031.3Such#endif /* SH3 */
2041.3Such
2051.15Suwe
2061.3Such#ifdef SH4
2071.8Such/*
2081.15Suwe * LINTSTUB: Var: char sh4_vector_tlbmiss[1];
2091.15Suwe *
2101.15Suwe * void sh4_vector_tlbmiss(void) __attribute__((__noreturn__))
2111.15Suwe *	Copied to VBR+0x400.  This code should be position independent
2121.15Suwe *	and no more than 512 bytes long (== 0x600 - 0x400).
2131.3Such */
2141.15SuweNENTRY(sh4_vector_tlbmiss)
2151.8Such	__EXCEPTION_ENTRY
2161.9Such	mov.l	_L.TEA4, r0
2171.9Such	mov.l	@r0,	r6
2181.9Such	mov.l	___L.VPN_MASK, r1
2191.9Such	and	r1,	r6	/* va = trunc_page(va) */
2201.9Such	mov.l	_L.EXPEVT4, r0
2211.9Such	mov.l	@r0,	r0
2221.9Such	mov.l	r0,	@(TF_EXPEVT, r14) /* trapframe->tf_expevt = EXPEVT */
2231.9Such	mov.l	2f,	r0
2241.9Such	mov.l	@r0,	r4	/* 1st arg */
2251.3Such	__INTR_MASK(r0, r1)
2261.3Such	__EXCEPTION_UNBLOCK(r0, r1)
2271.3Such	mov.l	1f,	r0
2281.3Such	jsr	@r0
2291.9Such	 mov	r14,	r5	/* 2nd arg */
2301.5Such	__EXCEPTION_RETURN
2311.1Such	.align	2
2321.9Such1:		.long	_C_LABEL(tlb_exception)
2331.10Sthorpej2:		.long	_C_LABEL(curlwp)
2341.9Such_L.EXPEVT4:	.long	SH4_EXPEVT
2351.9Such_L.TEA4:	.long	SH4_TEA
2361.9Such___L.VPN_MASK:	.long	0xfffff000
2371.15Suwe
2381.15Suwe/* LINTSTUB: Var: char sh4_vector_tlbmiss_end[1]; */
2391.15SuweVECTOR_END_MARKER(sh4_vector_tlbmiss_end)
2401.15Suwe	SET_ENTRY_SIZE(sh4_vector_tlbmiss)
2411.3Such#endif /* SH4 */
2421.1Such
2431.15Suwe
2441.1Such/*
2451.15Suwe * LINTSTUB: Var: char sh_vector_interrupt[1];
2461.15Suwe *
2471.8Such * void sh_vector_interrupt(void) __attribute__((__noreturn__)):
2481.1Such *	copied to VBR+0x600. This code should be relocatable.
2491.8Such */
2501.15SuweNENTRY(sh_vector_interrupt)
2511.5Such	__EXCEPTION_ENTRY
2521.9Such	xor	r0,	r0
2531.9Such	mov.l	r0,	@(TF_EXPEVT, r14) /* (for debug) */
2541.8Such	stc	r0_bank,r6	/* ssp */
2551.5Such	/* Enable exception for P3 access */
2561.5Such	__INTR_MASK(r0, r1)
2571.5Such	__EXCEPTION_UNBLOCK(r0, r1)
2581.6Such	/* uvmexp.intrs++ */
2591.6Such	mov.l	__L.uvmexp.intrs, r0
2601.6Such	mov.l	@r0,	r1
2611.6Such	add	#1	r1
2621.6Such	mov.l	r1,	@r0
2631.5Such	/* Dispatch interrupt handler */
2641.6Such	mov.l	__L.intc_intr, r0
2651.5Such	jsr	@r0		/* intc_intr(ssr, spc, ssp) */
2661.1Such	 nop
2671.5Such	/* Check for ASTs on exit to user mode. */
2681.9Such	mov.l	1f,	r0
2691.9Such	mov.l	@r0,	r4	/* 1st arg */
2701.6Such	mov.l	__L.ast, r0
2711.5Such	jsr	@r0
2721.9Such	 mov	r14,	r5	/* 2nd arg */
2731.5Such	__EXCEPTION_RETURN
2741.1Such	.align	2
2751.10Sthorpej1:			.long	_C_LABEL(curlwp)
2761.8Such__L.intc_intr:		.long	_C_LABEL(intc_intr)
2771.8Such__L.ast:		.long	_C_LABEL(ast)
2781.6Such__L.uvmexp.intrs:	.long	_C_LABEL(uvmexp) + UVMEXP_INTRS
2791.15Suwe
2801.15Suwe/* LINTSTUB: Var: char sh_vector_interrupt_end[1]; */
2811.15SuweVECTOR_END_MARKER(sh_vector_interrupt_end)
2821.15Suwe	SET_ENTRY_SIZE(sh_vector_interrupt)
283