exception_vector.S revision 1.23
11.23Suwe/*	$NetBSD: exception_vector.S,v 1.23 2007/03/15 01:54:18 uwe Exp $	*/
21.1Such
31.1Such/*-
41.1Such * Copyright (c) 2002 The NetBSD Foundation, Inc.
51.1Such * All rights reserved.
61.1Such *
71.1Such * Redistribution and use in source and binary forms, with or without
81.1Such * modification, are permitted provided that the following conditions
91.1Such * are met:
101.1Such * 1. Redistributions of source code must retain the above copyright
111.1Such *    notice, this list of conditions and the following disclaimer.
121.1Such * 2. Redistributions in binary form must reproduce the above copyright
131.1Such *    notice, this list of conditions and the following disclaimer in the
141.1Such *    documentation and/or other materials provided with the distribution.
151.1Such * 3. All advertising materials mentioning features or use of this software
161.1Such *    must display the following acknowledgement:
171.1Such *        This product includes software developed by the NetBSD
181.1Such *        Foundation, Inc. and its contributors.
191.1Such * 4. Neither the name of The NetBSD Foundation nor the names of its
201.1Such *    contributors may be used to endorse or promote products derived
211.1Such *    from this software without specific prior written permission.
221.1Such *
231.1Such * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
241.1Such * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
251.1Such * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
261.1Such * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
271.1Such * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
281.1Such * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
291.1Such * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
301.1Such * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
311.1Such * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
321.1Such * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
331.1Such * POSSIBILITY OF SUCH DAMAGE.
341.1Such */
351.1Such
361.9Such#include "opt_cputype.h"
371.9Such#include "opt_ddb.h"
381.1Such#include "assym.h"
391.7Such
401.3Such#include <sh3/param.h>
411.1Such#include <sh3/locore.h>
421.8Such#include <sh3/exception.h>
431.2Such#include <sh3/ubcreg.h>
441.3Such#include <sh3/mmu_sh3.h>
451.3Such#include <sh3/mmu_sh4.h>
461.12Suwe
471.23Suwe/*
481.23Suwe * Align vectors more strictly here (where we don't really care) so
491.23Suwe * that .align 5 (i.e. 32B cache line) before data block does the
501.23Suwe * right thing w.r.t. final destinations after vectors are copied.
511.23Suwe */
521.23Suwe#define _ALIGN_TEXT	.align 5
531.23Suwe#include <sh3/asm.h>
541.23Suwe
551.23Suwe__KERNEL_RCSID(0, "$NetBSD: exception_vector.S,v 1.23 2007/03/15 01:54:18 uwe Exp $")
561.12Suwe
571.1Such
581.1Such/*
591.22Suwe * Exception vectors.
601.22Suwe * The following routines are copied to vector addresses.
611.8Such *	sh_vector_generic:	VBR + 0x100
621.9Such *	sh_vector_tlbmiss:	VBR + 0x400
631.1Such *	sh_vector_interrupt:	VBR + 0x600
641.8Such */
651.8Such
661.15Suwe#define VECTOR_END_MARKER(sym)			\
671.15Suwe		.globl	_C_LABEL(sym);		\
681.15Suwe	_C_LABEL(sym):
691.15Suwe
701.15Suwe
711.1Such/*
721.15Suwe * LINTSTUB: Var: char sh_vector_generic[1];
731.15Suwe *
741.22Suwe * void sh_vector_generic(void);
751.15Suwe *	Copied to VBR+0x100.  This code should be position independent
761.22Suwe *	and maximum 786 bytes long (== 0x400 - 0x100).
771.8Such */
781.15SuweNENTRY(sh_vector_generic)
791.5Such	__EXCEPTION_ENTRY
801.9Such	__INTR_MASK(r0, r1)
811.1Such	/* Identify exception cause */
821.1Such	MOV	(EXPEVT, r0)
831.21Suwe	mov.l	@r0, r0
841.22Suwe	mov.l	r0, @(TF_EXPEVT, r14)	/* tf->tf_expevt = EXPEVT */
851.10Sthorpej	/* Get curlwp */
861.22Suwe	mov.l	.Lg_curlwp, r1
871.21Suwe	mov.l	@r1, r4			/* 1st arg */
881.11Suwe	/* Get TEA */
891.21Suwe	MOV	(TEA, r1)
901.21Suwe	mov.l	@r1, r6			/* 3rd arg */
911.9Such	/* Check TLB exception or not */
921.22Suwe	mov.l	.Lg_TLB_PROT_ST, r1
931.21Suwe	cmp/hi	r1, r0
941.9Such	bt	1f
951.11Suwe
961.22Suwe	/* tlb_exception(curlwp, tf, TEA); */
971.2Such	__EXCEPTION_UNBLOCK(r0, r1)
981.22Suwe	mov.l	.Lg_tlb_exception, r0
991.1Such	jsr	@r0
1001.21Suwe	 mov	r14, r5			/* 2nd arg */
1011.9Such	bra	2f
1021.1Such	 nop
1031.11Suwe
1041.22Suwe	/* general_exception(curlwp, tf, TEA); */
1051.21Suwe1:	mov	r4, r8
1061.1Such#ifdef DDB
1071.21Suwe	mov	#0, r2
1081.1Such	MOV	(BBRA, r1)
1091.21Suwe	mov.w	r2, @r1			/* disable UBC */
1101.22Suwe	mov.l	r2, @(TF_UBC, r14)	/* clear tf->tf_ubc */
1111.1Such#endif /* DDB */
1121.2Such	__EXCEPTION_UNBLOCK(r0, r1)
1131.22Suwe	mov.l	.Lg_general_exception, r0
1141.1Such	jsr	@r0
1151.21Suwe	 mov	r14, r5			/* 2nd arg */
1161.1Such
1171.1Such	/* Check for ASTs on exit to user mode. */
1181.21Suwe	mov	r8, r4
1191.22Suwe	mov.l	.Lg_ast, r0
1201.1Such	jsr	@r0
1211.21Suwe	 mov	r14, r5
1221.22Suwe#ifdef DDB	/* BBRA = tf->tf_ubc */
1231.2Such	__EXCEPTION_BLOCK(r0, r1)
1241.3Such	mov.l	@(TF_UBC, r14), r0
1251.1Such	MOV	(BBRA, r1)
1261.21Suwe	mov.w	r0, @r1
1271.1Such#endif /* DDB */
1281.9Such2:	__EXCEPTION_RETURN
1291.22Suwe
1301.23Suwe	.align	5
1311.22Suwe.Lg_curlwp:		.long	_C_LABEL(curlwp)
1321.1SuchREG_SYMBOL(EXPEVT)
1331.1SuchREG_SYMBOL(BBRA)
1341.9SuchREG_SYMBOL(TEA)
1351.22Suwe.Lg_tlb_exception:	.long	_C_LABEL(tlb_exception)
1361.22Suwe.Lg_general_exception:	.long	_C_LABEL(general_exception)
1371.22Suwe.Lg_ast:		.long	_C_LABEL(ast)
1381.22Suwe.Lg_TLB_PROT_ST:	.long	EXPEVT_TLB_PROT_ST
1391.15Suwe
1401.15Suwe/* LINTSTUB: Var: char sh_vector_generic_end[1]; */
1411.15SuweVECTOR_END_MARKER(sh_vector_generic_end)
1421.15Suwe	SET_ENTRY_SIZE(sh_vector_generic)
1431.15Suwe
1441.8Such
1451.3Such#ifdef SH3
1461.8Such/*
1471.15Suwe * LINTSTUB: Var: char sh3_vector_tlbmiss[1];
1481.15Suwe *
1491.22Suwe * void sh3_vector_tlbmiss(void);
1501.15Suwe *	Copied to VBR+0x400.  This code should be position independent
1511.22Suwe *	and maximum 512 bytes long (== 0x600 - 0x400).
1521.1Such */
1531.15SuweNENTRY(sh3_vector_tlbmiss)
1541.8Such	__EXCEPTION_ENTRY
1551.17Suwe	mov	#(SH3_TEA & 0xff), r0
1561.21Suwe	mov.l	@r0, r6		/* 3rd arg: va = TEA */
1571.3Such#if !defined(P1_STACK)
1581.4Such	/* Load kernel stack */
1591.22Suwe	mov.l	.L3_VPN_MASK, r0
1601.21Suwe	and	r6, r0
1611.21Suwe	tst	r0, r0		/* check VPN == 0 */
1621.9Such	bt	6f
1631.22Suwe	mov.l	.L3_CURUPTE, r1
1641.21Suwe	mov.l	@r1, r1
1651.21Suwe	mov	#UPAGES, r3
1661.21Suwe	mov	#1, r2
1671.21Suwe4:	mov.l	@r1+, r7
1681.21Suwe	cmp/eq	r7, r0		/* md_upte.addr: u-area VPN */
1691.3Such	bt	5f
1701.21Suwe	add	#4, r1		/* skip md_upte.data */
1711.21Suwe	cmp/eq	r2, r3
1721.3Such	bf/s	4b
1731.21Suwe	 add	#1, r2
1741.18Suwe	bra	7f		/* pull insn at 6f into delay slot */
1751.18Suwe	 mov	#(SH3_EXPEVT & 0xff), r0
1761.21Suwe5:	mov.l	@r1, r2		/* md_upte.data: u-area PTE */
1771.17Suwe	mov	#(SH3_PTEL & 0xff), r1
1781.21Suwe	mov.l	r2, @r1
1791.17Suwe	mov	#(SH3_PTEH & 0xff), r1
1801.21Suwe	mov.l	@r1, r2
1811.22Suwe	mov.l	.L3_VPN_MASK, r0
1821.21Suwe	and	r2, r0
1831.21Suwe	mov.l	r0, @r1		/* ASID 0 */
1841.3Such	ldtlb
1851.3Such	bra	3f
1861.21Suwe	 mov.l	r2, @r1		/* restore ASID */
1871.3Such#endif /* !P1_STACK */
1881.17Suwe6:	mov	#(SH3_EXPEVT & 0xff), r0
1891.21Suwe7:	mov.l	@r0, r0
1901.22Suwe	mov.l	r0, @(TF_EXPEVT, r14)	/* tf->tf_expevt = EXPEVT */
1911.22Suwe	mov.l	.L3_curlwp, r0
1921.21Suwe	mov.l	@r0, r4		/* 1st arg */
1931.9Such	__INTR_MASK(r0, r1)
1941.9Such	__EXCEPTION_UNBLOCK(r0, r1)
1951.22Suwe	mov.l	.L3_tlb_exception, r0
1961.1Such	jsr	@r0
1971.21Suwe	 mov	r14, r5		/* 2nd arg */
1981.9Such3:	__EXCEPTION_RETURN
1991.22Suwe
2001.23Suwe	.align	5
2011.22Suwe.L3_curlwp:		.long	_C_LABEL(curlwp)
2021.22Suwe.L3_tlb_exception:	.long	_C_LABEL(tlb_exception)
2031.22Suwe.L3_VPN_MASK:		.long	0xfffff000
2041.22Suwe.L3_CURUPTE:		.long	_C_LABEL(curupte)
2051.15Suwe
2061.15Suwe/* LINTSTUB: Var: char sh3_vector_tlbmiss_end[1]; */
2071.15SuweVECTOR_END_MARKER(sh3_vector_tlbmiss_end)
2081.15Suwe	SET_ENTRY_SIZE(sh3_vector_tlbmiss)
2091.3Such#endif /* SH3 */
2101.3Such
2111.15Suwe
2121.3Such#ifdef SH4
2131.8Such/*
2141.15Suwe * LINTSTUB: Var: char sh4_vector_tlbmiss[1];
2151.15Suwe *
2161.22Suwe * void sh4_vector_tlbmiss(void);
2171.15Suwe *	Copied to VBR+0x400.  This code should be position independent
2181.22Suwe *	and maximum 512 bytes long (== 0x600 - 0x400).
2191.3Such */
2201.15SuweNENTRY(sh4_vector_tlbmiss)
2211.8Such	__EXCEPTION_ENTRY
2221.22Suwe	mov.l	.L4_TEA4, r0
2231.21Suwe	mov.l	@r0, r6
2241.22Suwe	mov.l	.L4_EXPEVT4, r0
2251.21Suwe	mov.l	@r0, r0
2261.22Suwe	mov.l	r0, @(TF_EXPEVT, r14)	/* tf->tf_expevt = EXPEVT */
2271.22Suwe	mov.l	.L4_curlwp, r0
2281.21Suwe	mov.l	@r0, r4			/* 1st arg */
2291.3Such	__INTR_MASK(r0, r1)
2301.3Such	__EXCEPTION_UNBLOCK(r0, r1)
2311.22Suwe	mov.l	.L4_tlb_exception, r0
2321.3Such	jsr	@r0
2331.21Suwe	 mov	r14, r5			/* 2nd arg */
2341.5Such	__EXCEPTION_RETURN
2351.22Suwe
2361.23Suwe	.align	5
2371.22Suwe.L4_tlb_exception:	.long	_C_LABEL(tlb_exception)
2381.22Suwe.L4_curlwp:		.long	_C_LABEL(curlwp)
2391.22Suwe.L4_EXPEVT4:		.long	SH4_EXPEVT
2401.22Suwe.L4_TEA4:		.long	SH4_TEA
2411.15Suwe
2421.15Suwe/* LINTSTUB: Var: char sh4_vector_tlbmiss_end[1]; */
2431.15SuweVECTOR_END_MARKER(sh4_vector_tlbmiss_end)
2441.15Suwe	SET_ENTRY_SIZE(sh4_vector_tlbmiss)
2451.3Such#endif /* SH4 */
2461.1Such
2471.15Suwe
2481.1Such/*
2491.15Suwe * LINTSTUB: Var: char sh_vector_interrupt[1];
2501.15Suwe *
2511.22Suwe * void sh_vector_interrupt(void);
2521.22Suwe *	Copied to VBR+0x600.  This code should be position independent.
2531.8Such */
2541.15SuweNENTRY(sh_vector_interrupt)
2551.5Such	__EXCEPTION_ENTRY
2561.21Suwe	xor	r0, r0
2571.21Suwe	mov.l	r0, @(TF_EXPEVT, r14)	/* (for debug) */
2581.21Suwe	stc	r0_bank, r6		/* ssp */
2591.22Suwe	/* Enable exceptions for P3 access */
2601.5Such	__INTR_MASK(r0, r1)
2611.5Such	__EXCEPTION_UNBLOCK(r0, r1)
2621.22Suwe	/* ++uvmexp.intrs */
2631.22Suwe	mov.l	.Li_uvmexp_intrs, r0
2641.21Suwe	mov.l	@r0, r1
2651.21Suwe	add	#1 r1
2661.21Suwe	mov.l	r1, @r0
2671.5Such	/* Dispatch interrupt handler */
2681.22Suwe	mov.l	.Li_intc_intr, r0
2691.5Such	jsr	@r0		/* intc_intr(ssr, spc, ssp) */
2701.1Such	 nop
2711.5Such	/* Check for ASTs on exit to user mode. */
2721.22Suwe	mov.l	.Li_curlwp, r0
2731.21Suwe	mov.l	@r0, r4		/* 1st arg */
2741.22Suwe	mov.l	.Li_ast, r0
2751.5Such	jsr	@r0
2761.21Suwe	 mov	r14, r5		/* 2nd arg */
2771.5Such	__EXCEPTION_RETURN
2781.22Suwe
2791.23Suwe	.align	5
2801.22Suwe.Li_curlwp:		.long	_C_LABEL(curlwp)
2811.22Suwe.Li_intc_intr:		.long	_C_LABEL(intc_intr)
2821.22Suwe.Li_ast:		.long	_C_LABEL(ast)
2831.22Suwe.Li_uvmexp_intrs:	.long	_C_LABEL(uvmexp) + UVMEXP_INTRS
2841.15Suwe
2851.15Suwe/* LINTSTUB: Var: char sh_vector_interrupt_end[1]; */
2861.15SuweVECTOR_END_MARKER(sh_vector_interrupt_end)
2871.15Suwe	SET_ENTRY_SIZE(sh_vector_interrupt)
288