Home | History | Annotate | Line # | Download | only in include
locore.h revision 1.18
      1 /*	$NetBSD: locore.h,v 1.18 2008/05/31 22:45:32 uwe Exp $	*/
      2 
      3 /*-
      4  * Copyright (c) 2002 The NetBSD Foundation, Inc.
      5  * All rights reserved.
      6  *
      7  * Redistribution and use in source and binary forms, with or without
      8  * modification, are permitted provided that the following conditions
      9  * are met:
     10  * 1. Redistributions of source code must retain the above copyright
     11  *    notice, this list of conditions and the following disclaimer.
     12  * 2. Redistributions in binary form must reproduce the above copyright
     13  *    notice, this list of conditions and the following disclaimer in the
     14  *    documentation and/or other materials provided with the distribution.
     15  *
     16  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     17  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     18  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     19  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     20  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     21  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     22  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     23  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     24  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     25  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     26  * POSSIBILITY OF SUCH DAMAGE.
     27  */
     28 
     29 #ifdef _LOCORE
     30 
     31 #if defined(SH3) && defined(SH4)
     32 #define	MOV(x, r)	mov.l .L_/**/x, r; mov.l @r, r
     33 #define	REG_SYMBOL(x)	.L_/**/x:	.long	_C_LABEL(__sh_/**/x)
     34 #define	FUNC_SYMBOL(x)	.L_/**/x:	.long	_C_LABEL(__sh_/**/x)
     35 #elif defined(SH3)
     36 #define	MOV(x, r)	mov.l .L_/**/x, r
     37 #define	REG_SYMBOL(x)	.L_/**/x:	.long	SH3_/**/x
     38 #define	FUNC_SYMBOL(x)	.L_/**/x:	.long	_C_LABEL(sh3_/**/x)
     39 #elif defined(SH4)
     40 #define	MOV(x, r)	mov.l .L_/**/x, r
     41 #define	REG_SYMBOL(x)	.L_/**/x:	.long	SH4_/**/x
     42 #define	FUNC_SYMBOL(x)	.L_/**/x:	.long	_C_LABEL(sh4_/**/x)
     43 #endif /* SH3 && SH4 */
     44 
     45 /*
     46  * BANK1 r6 contains current trapframe pointer.
     47  * BANK1 r7 contains bottom address of lwp's kernel stack.
     48  */
     49 /*
     50  * __EXCEPTION_ENTRY:
     51  *	+ setup stack pointer
     52  *	+ save all registers to trapframe.
     53  *	+ setup kernel stack.
     54  *	+ change bank from 1 to 0
     55  *	+ set BANK0 (r4, r5, r6) = (ssr, spc, ssp)
     56  */
     57 #define	__EXCEPTION_ENTRY						;\
     58 	/* Check kernel/user mode. */					;\
     59 	mov	#0x40,	r3						;\
     60 	swap.b	r3,	r3						;\
     61 	stc	ssr,	r2						;\
     62 	swap.w	r3,	r3	/* r3 = 0x40000000 */			;\
     63 	mov	r2,	r0	/* r2 = r0 = SSR */			;\
     64 	and	r3,	r0						;\
     65 	tst	r0,	r0	/* if (SSR.MD == 0) T = 1 */		;\
     66 	mov	r14,	r1						;\
     67 	mov	r6,	r14	/* frame pointer */			;\
     68 	bf/s	1f		/* T==0 ...Exception from kernel mode */;\
     69 	 mov	r15,	r0						;\
     70 	/* Exception from user mode */					;\
     71 	mov	r7,	r15	/* change to kernel stack */		;\
     72 1:									;\
     73 	/* Save registers */						;\
     74 	mov.l	r1,	@-r14	/* tf_r14 */				;\
     75 	mov.l	r0,	@-r14	/* tf_r15 */				;\
     76 	stc.l	r0_bank,@-r14	/* tf_r0  */				;\
     77 	stc.l	r1_bank,@-r14	/* tf_r1  */				;\
     78 	stc.l	r2_bank,@-r14	/* tf_r2  */				;\
     79 	stc.l	r3_bank,@-r14	/* tf_r3  */				;\
     80 	stc.l	r4_bank,@-r14	/* tf_r4  */				;\
     81 	stc.l	r5_bank,@-r14	/* tf_r5  */				;\
     82 	stc.l	r6_bank,@-r14	/* tf_r6  */				;\
     83 	stc.l	r7_bank,@-r14	/* tf_r7  */				;\
     84 	mov.l	r8,	@-r14	/* tf_r8  */				;\
     85 	mov.l	r9,	@-r14	/* tf_r9  */				;\
     86 	mov.l	r10,	@-r14	/* tf_r10 */				;\
     87 	mov.l	r11,	@-r14	/* tf_r11 */				;\
     88 	mov.l	r12,	@-r14	/* tf_r12 */				;\
     89 	mov.l	r13,	@-r14	/* tf_r13 */				;\
     90 	sts.l	pr,	@-r14	/* tf_pr  */				;\
     91 	sts.l	mach,	@-r14	/* tf_mach*/				;\
     92 	sts.l	macl,	@-r14	/* tf_macl*/				;\
     93 	mov.l	r2,	@-r14	/* tf_ssr */				;\
     94 	stc.l	spc,	@-r14	/* tf_spc */				;\
     95 	add	#-8,	r14	/* skip tf_ubc, tf_expevt */		;\
     96 	mov	r14,	r6	/* store frame pointer */		;\
     97 	/* Change register bank to 0 */					;\
     98 	shlr	r3		/* r3 = 0x20000000 */			;\
     99 	stc	sr,	r1	/* r1 = SR */				;\
    100 	not	r3,	r3						;\
    101 	and	r1,	r3						;\
    102 	ldc	r3,	sr	/* SR.RB = 0 */				;\
    103 	/* Set up arguments. r4 = ssr, r5 = spc */			;\
    104 	stc	r2_bank,r4						;\
    105 	stc	spc,	r5
    106 
    107 /*
    108  * __EXCEPTION_RETURN:
    109  *	+ block exceptions
    110  *	+ restore all registers from stack.
    111  *	+ rte.
    112  */
    113 #define	__EXCEPTION_RETURN						;\
    114 	mov	#0x10,	r0						;\
    115 	swap.b	r0,	r0						;\
    116 	swap.w	r0,	r0	/* r0 = 0x10000000 */			;\
    117 	stc	sr,	r1						;\
    118 	or	r0,	r1						;\
    119 	ldc	r1,	sr	/* SR.BL = 1 */				;\
    120 	stc	r6_bank,r0						;\
    121 	mov	r0,	r14						;\
    122 	add	#TF_SIZE, r0						;\
    123 	ldc	r0,	r6_bank	/* roll up frame pointer */		;\
    124 	add	#8,	r14	/* skip tf_expevt, tf_ubc */		;\
    125 	ldc.l	@r14+,	spc	/* tf_spc */				;\
    126 	ldc.l	@r14+,	ssr	/* tf_ssr */				;\
    127 	lds.l	@r14+,	macl	/* tf_macl*/				;\
    128 	lds.l	@r14+,	mach	/* tf_mach*/				;\
    129 	lds.l	@r14+,	pr	/* tf_pr  */				;\
    130 	mov.l	@r14+,	r13	/* tf_r13 */				;\
    131 	mov.l	@r14+,	r12	/* tf_r12 */				;\
    132 	mov.l	@r14+,	r11	/* tf_r11 */				;\
    133 	mov.l	@r14+,	r10	/* tf_r10 */				;\
    134 	mov.l	@r14+,	r9	/* tf_r9  */				;\
    135 	mov.l	@r14+,	r8	/* tf_r8  */				;\
    136 	mov.l	@r14+,	r7	/* tf_r7  */				;\
    137 	mov.l	@r14+,	r6	/* tf_r6  */				;\
    138 	mov.l	@r14+,	r5	/* tf_r5  */				;\
    139 	mov.l	@r14+,	r4	/* tf_r4  */				;\
    140 	mov.l	@r14+,	r3	/* tf_r3  */				;\
    141 	mov.l	@r14+,	r2	/* tf_r2  */				;\
    142 	mov.l	@r14+,	r1	/* tf_r1  */				;\
    143 	mov.l	@r14+,	r0	/* tf_r0  */				;\
    144 	mov.l	@r14+	r15	/* tf_r15 */				;\
    145 	mov.l	@r14+,	r14	/* tf_r14 */				;\
    146 	rte								;\
    147 	 nop
    148 
    149 
    150 /*
    151  * Macros to disable and enable exceptions (including interrupts).
    152  * This modifies SR.BL
    153  */
    154 
    155 #define	__EXCEPTION_BLOCK(Rn, Rm)					;\
    156 	mov	#0x10,	Rn						;\
    157 	swap.b	Rn,	Rn						;\
    158 	swap.w	Rn,	Rn	/* Rn = 0x10000000 */			;\
    159 	stc	sr,	Rm						;\
    160 	or	Rm,	Rn						;\
    161 	ldc	Rn,	sr	/* block exceptions */
    162 
    163 #define	__EXCEPTION_UNBLOCK(Rn, Rm)					;\
    164 	mov	#0xef,	Rn	/* ~0x10 */				;\
    165 	swap.b	Rn,	Rn						;\
    166 	swap.w	Rn,	Rn	/* Rn = ~0x10000000 */			;\
    167 	stc	sr,	Rm						;\
    168 	and	Rn,	Rm						;\
    169 	ldc	Rm,	sr	/* unblock exceptions */
    170 
    171 /*
    172  * Macros to disable and enable interrupts.
    173  * This modifies SR.I[0-3]
    174  */
    175 #define	__INTR_MASK(Rn, Rm)						;\
    176 	mov	#0x78,	Rn						;\
    177 	shll	Rn		/* Rn = 0x000000f0 */			;\
    178 	stc	sr,	Rm						;\
    179 	or	Rn,	Rm						;\
    180 	ldc	Rm,	sr	/* mask all interrupts */
    181 
    182 #define	__INTR_UNMASK(Rn, Rm)						;\
    183 	mov	#0x78,	Rn						;\
    184 	shll	Rn		/* Rn = 0x000000f0 */			;\
    185 	not	Rn,	Rn						;\
    186 	stc	sr,	Rm						;\
    187 	and	Rn,	Rm						;\
    188 	ldc	Rm,	sr	/* unmask all interrupts */
    189 
    190 #else /* !_LOCORE */
    191 
    192 void sh3_switch_setup(struct lwp *);
    193 void sh4_switch_setup(struct lwp *);
    194 void sh3_switch_resume(struct lwp *);
    195 void sh4_switch_resume(struct lwp *);
    196 extern void (*__sh_switch_resume)(struct lwp *);
    197 
    198 #endif /* !_LOCORE */
    199