Home | History | Annotate | Line # | Download | only in include
locore.h revision 1.18
      1  1.18      uwe /*	$NetBSD: locore.h,v 1.18 2008/05/31 22:45:32 uwe Exp $	*/
      2   1.1      uch 
      3   1.1      uch /*-
      4   1.1      uch  * Copyright (c) 2002 The NetBSD Foundation, Inc.
      5   1.1      uch  * All rights reserved.
      6   1.1      uch  *
      7   1.1      uch  * Redistribution and use in source and binary forms, with or without
      8   1.1      uch  * modification, are permitted provided that the following conditions
      9   1.1      uch  * are met:
     10   1.1      uch  * 1. Redistributions of source code must retain the above copyright
     11   1.1      uch  *    notice, this list of conditions and the following disclaimer.
     12   1.1      uch  * 2. Redistributions in binary form must reproduce the above copyright
     13   1.1      uch  *    notice, this list of conditions and the following disclaimer in the
     14   1.1      uch  *    documentation and/or other materials provided with the distribution.
     15   1.1      uch  *
     16   1.1      uch  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     17   1.1      uch  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     18   1.1      uch  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     19   1.1      uch  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     20   1.1      uch  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     21   1.1      uch  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     22   1.1      uch  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     23   1.1      uch  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     24   1.1      uch  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     25   1.1      uch  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     26   1.1      uch  * POSSIBILITY OF SUCH DAMAGE.
     27   1.1      uch  */
     28   1.1      uch 
     29  1.12      uwe #ifdef _LOCORE
     30  1.12      uwe 
     31   1.1      uch #if defined(SH3) && defined(SH4)
     32  1.11      uwe #define	MOV(x, r)	mov.l .L_/**/x, r; mov.l @r, r
     33  1.11      uwe #define	REG_SYMBOL(x)	.L_/**/x:	.long	_C_LABEL(__sh_/**/x)
     34  1.11      uwe #define	FUNC_SYMBOL(x)	.L_/**/x:	.long	_C_LABEL(__sh_/**/x)
     35   1.1      uch #elif defined(SH3)
     36  1.11      uwe #define	MOV(x, r)	mov.l .L_/**/x, r
     37  1.11      uwe #define	REG_SYMBOL(x)	.L_/**/x:	.long	SH3_/**/x
     38  1.11      uwe #define	FUNC_SYMBOL(x)	.L_/**/x:	.long	_C_LABEL(sh3_/**/x)
     39   1.6      uch #elif defined(SH4)
     40  1.11      uwe #define	MOV(x, r)	mov.l .L_/**/x, r
     41  1.11      uwe #define	REG_SYMBOL(x)	.L_/**/x:	.long	SH4_/**/x
     42  1.11      uwe #define	FUNC_SYMBOL(x)	.L_/**/x:	.long	_C_LABEL(sh4_/**/x)
     43   1.4      uch #endif /* SH3 && SH4 */
     44   1.1      uch 
     45   1.1      uch /*
     46  1.13      uwe  * BANK1 r6 contains current trapframe pointer.
     47  1.13      uwe  * BANK1 r7 contains bottom address of lwp's kernel stack.
     48   1.6      uch  */
     49   1.1      uch /*
     50   1.6      uch  * __EXCEPTION_ENTRY:
     51   1.1      uch  *	+ setup stack pointer
     52  1.13      uwe  *	+ save all registers to trapframe.
     53   1.3      uch  *	+ setup kernel stack.
     54   1.1      uch  *	+ change bank from 1 to 0
     55   1.5      uch  *	+ set BANK0 (r4, r5, r6) = (ssr, spc, ssp)
     56   1.1      uch  */
     57   1.9      uwe #define	__EXCEPTION_ENTRY						;\
     58   1.1      uch 	/* Check kernel/user mode. */					;\
     59   1.1      uch 	mov	#0x40,	r3						;\
     60   1.1      uch 	swap.b	r3,	r3						;\
     61   1.3      uch 	stc	ssr,	r2						;\
     62   1.1      uch 	swap.w	r3,	r3	/* r3 = 0x40000000 */			;\
     63   1.3      uch 	mov	r2,	r0	/* r2 = r0 = SSR */			;\
     64   1.1      uch 	and	r3,	r0						;\
     65   1.1      uch 	tst	r0,	r0	/* if (SSR.MD == 0) T = 1 */		;\
     66   1.3      uch 	mov	r14,	r1						;\
     67   1.3      uch 	mov	r6,	r14	/* frame pointer */			;\
     68   1.1      uch 	bf/s	1f		/* T==0 ...Exception from kernel mode */;\
     69   1.3      uch 	 mov	r15,	r0						;\
     70   1.1      uch 	/* Exception from user mode */					;\
     71   1.1      uch 	mov	r7,	r15	/* change to kernel stack */		;\
     72   1.1      uch 1:									;\
     73   1.1      uch 	/* Save registers */						;\
     74   1.3      uch 	mov.l	r1,	@-r14	/* tf_r14 */				;\
     75   1.3      uch 	mov.l	r0,	@-r14	/* tf_r15 */				;\
     76   1.3      uch 	stc.l	r0_bank,@-r14	/* tf_r0  */				;\
     77   1.3      uch 	stc.l	r1_bank,@-r14	/* tf_r1  */				;\
     78   1.3      uch 	stc.l	r2_bank,@-r14	/* tf_r2  */				;\
     79   1.3      uch 	stc.l	r3_bank,@-r14	/* tf_r3  */				;\
     80   1.3      uch 	stc.l	r4_bank,@-r14	/* tf_r4  */				;\
     81   1.3      uch 	stc.l	r5_bank,@-r14	/* tf_r5  */				;\
     82   1.3      uch 	stc.l	r6_bank,@-r14	/* tf_r6  */				;\
     83   1.3      uch 	stc.l	r7_bank,@-r14	/* tf_r7  */				;\
     84   1.3      uch 	mov.l	r8,	@-r14	/* tf_r8  */				;\
     85   1.3      uch 	mov.l	r9,	@-r14	/* tf_r9  */				;\
     86   1.3      uch 	mov.l	r10,	@-r14	/* tf_r10 */				;\
     87   1.3      uch 	mov.l	r11,	@-r14	/* tf_r11 */				;\
     88   1.3      uch 	mov.l	r12,	@-r14	/* tf_r12 */				;\
     89   1.3      uch 	mov.l	r13,	@-r14	/* tf_r13 */				;\
     90   1.3      uch 	sts.l	pr,	@-r14	/* tf_pr  */				;\
     91   1.3      uch 	sts.l	mach,	@-r14	/* tf_mach*/				;\
     92   1.3      uch 	sts.l	macl,	@-r14	/* tf_macl*/				;\
     93   1.3      uch 	mov.l	r2,	@-r14	/* tf_ssr */				;\
     94   1.3      uch 	stc.l	spc,	@-r14	/* tf_spc */				;\
     95   1.7      uch 	add	#-8,	r14	/* skip tf_ubc, tf_expevt */		;\
     96   1.3      uch 	mov	r14,	r6	/* store frame pointer */		;\
     97   1.1      uch 	/* Change register bank to 0 */					;\
     98   1.1      uch 	shlr	r3		/* r3 = 0x20000000 */			;\
     99   1.5      uch 	stc	sr,	r1	/* r1 = SR */				;\
    100   1.1      uch 	not	r3,	r3						;\
    101   1.5      uch 	and	r1,	r3						;\
    102   1.1      uch 	ldc	r3,	sr	/* SR.RB = 0 */				;\
    103  1.13      uwe 	/* Set up arguments. r4 = ssr, r5 = spc */			;\
    104   1.3      uch 	stc	r2_bank,r4						;\
    105   1.1      uch 	stc	spc,	r5
    106   1.1      uch 
    107   1.6      uch /*
    108   1.6      uch  * __EXCEPTION_RETURN:
    109  1.13      uwe  *	+ block exceptions
    110  1.13      uwe  *	+ restore all registers from stack.
    111   1.6      uch  *	+ rte.
    112   1.1      uch  */
    113   1.5      uch #define	__EXCEPTION_RETURN						;\
    114   1.1      uch 	mov	#0x10,	r0						;\
    115   1.1      uch 	swap.b	r0,	r0						;\
    116   1.1      uch 	swap.w	r0,	r0	/* r0 = 0x10000000 */			;\
    117   1.1      uch 	stc	sr,	r1						;\
    118   1.1      uch 	or	r0,	r1						;\
    119   1.1      uch 	ldc	r1,	sr	/* SR.BL = 1 */				;\
    120   1.3      uch 	stc	r6_bank,r0						;\
    121   1.3      uch 	mov	r0,	r14						;\
    122   1.3      uch 	add	#TF_SIZE, r0						;\
    123   1.3      uch 	ldc	r0,	r6_bank	/* roll up frame pointer */		;\
    124   1.7      uch 	add	#8,	r14	/* skip tf_expevt, tf_ubc */		;\
    125  1.18      uwe 	ldc.l	@r14+,	spc	/* tf_spc */				;\
    126  1.18      uwe 	ldc.l	@r14+,	ssr	/* tf_ssr */				;\
    127   1.3      uch 	lds.l	@r14+,	macl	/* tf_macl*/				;\
    128   1.3      uch 	lds.l	@r14+,	mach	/* tf_mach*/				;\
    129   1.3      uch 	lds.l	@r14+,	pr	/* tf_pr  */				;\
    130   1.3      uch 	mov.l	@r14+,	r13	/* tf_r13 */				;\
    131   1.3      uch 	mov.l	@r14+,	r12	/* tf_r12 */				;\
    132   1.3      uch 	mov.l	@r14+,	r11	/* tf_r11 */				;\
    133   1.3      uch 	mov.l	@r14+,	r10	/* tf_r10 */				;\
    134   1.3      uch 	mov.l	@r14+,	r9	/* tf_r9  */				;\
    135   1.3      uch 	mov.l	@r14+,	r8	/* tf_r8  */				;\
    136   1.3      uch 	mov.l	@r14+,	r7	/* tf_r7  */				;\
    137   1.3      uch 	mov.l	@r14+,	r6	/* tf_r6  */				;\
    138   1.3      uch 	mov.l	@r14+,	r5	/* tf_r5  */				;\
    139   1.3      uch 	mov.l	@r14+,	r4	/* tf_r4  */				;\
    140   1.3      uch 	mov.l	@r14+,	r3	/* tf_r3  */				;\
    141   1.3      uch 	mov.l	@r14+,	r2	/* tf_r2  */				;\
    142   1.3      uch 	mov.l	@r14+,	r1	/* tf_r1  */				;\
    143   1.3      uch 	mov.l	@r14+,	r0	/* tf_r0  */				;\
    144   1.3      uch 	mov.l	@r14+	r15	/* tf_r15 */				;\
    145   1.3      uch 	mov.l	@r14+,	r14	/* tf_r14 */				;\
    146   1.1      uch 	rte								;\
    147   1.2      uch 	 nop
    148   1.1      uch 
    149   1.1      uch 
    150   1.1      uch /*
    151   1.1      uch  * Macros to disable and enable exceptions (including interrupts).
    152   1.1      uch  * This modifies SR.BL
    153   1.1      uch  */
    154   1.1      uch 
    155   1.6      uch #define	__EXCEPTION_BLOCK(Rn, Rm)					;\
    156  1.12      uwe 	mov	#0x10,	Rn						;\
    157   1.2      uch 	swap.b	Rn,	Rn						;\
    158   1.2      uch 	swap.w	Rn,	Rn	/* Rn = 0x10000000 */			;\
    159   1.2      uch 	stc	sr,	Rm						;\
    160  1.16      uwe 	or	Rm,	Rn						;\
    161  1.16      uwe 	ldc	Rn,	sr	/* block exceptions */
    162   1.2      uch 
    163   1.6      uch #define	__EXCEPTION_UNBLOCK(Rn, Rm)					;\
    164  1.14      uwe 	mov	#0xef,	Rn	/* ~0x10 */				;\
    165   1.2      uch 	swap.b	Rn,	Rn						;\
    166  1.14      uwe 	swap.w	Rn,	Rn	/* Rn = ~0x10000000 */			;\
    167   1.2      uch 	stc	sr,	Rm						;\
    168   1.2      uch 	and	Rn,	Rm						;\
    169   1.2      uch 	ldc	Rm,	sr	/* unblock exceptions */
    170   1.1      uch 
    171   1.1      uch /*
    172   1.1      uch  * Macros to disable and enable interrupts.
    173   1.1      uch  * This modifies SR.I[0-3]
    174   1.1      uch  */
    175   1.2      uch #define	__INTR_MASK(Rn, Rm)						;\
    176  1.12      uwe 	mov	#0x78,	Rn						;\
    177   1.2      uch 	shll	Rn		/* Rn = 0x000000f0 */			;\
    178   1.2      uch 	stc	sr,	Rm						;\
    179   1.2      uch 	or	Rn,	Rm						;\
    180  1.13      uwe 	ldc	Rm,	sr	/* mask all interrupts */
    181   1.2      uch 
    182   1.6      uch #define	__INTR_UNMASK(Rn, Rm)						;\
    183  1.12      uwe 	mov	#0x78,	Rn						;\
    184   1.2      uch 	shll	Rn		/* Rn = 0x000000f0 */			;\
    185   1.2      uch 	not	Rn,	Rn						;\
    186   1.2      uch 	stc	sr,	Rm						;\
    187   1.2      uch 	and	Rn,	Rm						;\
    188  1.13      uwe 	ldc	Rm,	sr	/* unmask all interrupts */
    189   1.3      uch 
    190  1.12      uwe #else /* !_LOCORE */
    191  1.12      uwe 
    192   1.8  thorpej void sh3_switch_setup(struct lwp *);
    193   1.8  thorpej void sh4_switch_setup(struct lwp *);
    194   1.8  thorpej void sh3_switch_resume(struct lwp *);
    195   1.8  thorpej void sh4_switch_resume(struct lwp *);
    196   1.8  thorpej extern void (*__sh_switch_resume)(struct lwp *);
    197  1.12      uwe 
    198   1.4      uch #endif /* !_LOCORE */
    199