Home | History | Annotate | Line # | Download | only in include
      1  1.22      uwe /*	$NetBSD: locore.h,v 1.22 2011/02/04 04:13:52 uwe Exp $	*/
      2   1.1      uch 
      3   1.1      uch /*-
      4   1.1      uch  * Copyright (c) 2002 The NetBSD Foundation, Inc.
      5   1.1      uch  * All rights reserved.
      6   1.1      uch  *
      7   1.1      uch  * Redistribution and use in source and binary forms, with or without
      8   1.1      uch  * modification, are permitted provided that the following conditions
      9   1.1      uch  * are met:
     10   1.1      uch  * 1. Redistributions of source code must retain the above copyright
     11   1.1      uch  *    notice, this list of conditions and the following disclaimer.
     12   1.1      uch  * 2. Redistributions in binary form must reproduce the above copyright
     13   1.1      uch  *    notice, this list of conditions and the following disclaimer in the
     14   1.1      uch  *    documentation and/or other materials provided with the distribution.
     15   1.1      uch  *
     16   1.1      uch  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     17   1.1      uch  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     18   1.1      uch  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     19   1.1      uch  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     20   1.1      uch  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     21   1.1      uch  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     22   1.1      uch  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     23   1.1      uch  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     24   1.1      uch  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     25   1.1      uch  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     26   1.1      uch  * POSSIBILITY OF SUCH DAMAGE.
     27   1.1      uch  */
     28   1.1      uch 
     29  1.12      uwe #ifdef _LOCORE
     30  1.12      uwe 
     31  1.20    joerg #ifdef __STDC__
     32  1.20    joerg #if defined(SH3) && defined(SH4)
     33  1.20    joerg #define	MOV(x, r)	mov.l .L_ ## x, r; mov.l @r, r
     34  1.20    joerg #define	REG_SYMBOL(x)	.L_ ## x:	.long	_C_LABEL(__sh_ ## x)
     35  1.20    joerg #define	FUNC_SYMBOL(x)	.L_ ## x:	.long	_C_LABEL(__sh_ ## x)
     36  1.20    joerg #elif defined(SH3)
     37  1.20    joerg #define	MOV(x, r)	mov.l .L_ ## x, r
     38  1.20    joerg #define	REG_SYMBOL(x)	.L_ ## x:	.long	SH3_ ## x
     39  1.20    joerg #define	FUNC_SYMBOL(x)	.L_ ## x:	.long	_C_LABEL(sh3_ ## x)
     40  1.20    joerg #elif defined(SH4)
     41  1.20    joerg #define	MOV(x, r)	mov.l .L_ ## x, r
     42  1.20    joerg #define	REG_SYMBOL(x)	.L_ ## x:	.long	SH4_ ## x
     43  1.20    joerg #define	FUNC_SYMBOL(x)	.L_ ## x:	.long	_C_LABEL(sh4_ ## x)
     44  1.20    joerg #endif /* SH3 && SH4 */
     45  1.20    joerg #else /* !__STDC__ */
     46   1.1      uch #if defined(SH3) && defined(SH4)
     47  1.11      uwe #define	MOV(x, r)	mov.l .L_/**/x, r; mov.l @r, r
     48  1.11      uwe #define	REG_SYMBOL(x)	.L_/**/x:	.long	_C_LABEL(__sh_/**/x)
     49  1.11      uwe #define	FUNC_SYMBOL(x)	.L_/**/x:	.long	_C_LABEL(__sh_/**/x)
     50   1.1      uch #elif defined(SH3)
     51  1.11      uwe #define	MOV(x, r)	mov.l .L_/**/x, r
     52  1.11      uwe #define	REG_SYMBOL(x)	.L_/**/x:	.long	SH3_/**/x
     53  1.11      uwe #define	FUNC_SYMBOL(x)	.L_/**/x:	.long	_C_LABEL(sh3_/**/x)
     54   1.6      uch #elif defined(SH4)
     55  1.11      uwe #define	MOV(x, r)	mov.l .L_/**/x, r
     56  1.11      uwe #define	REG_SYMBOL(x)	.L_/**/x:	.long	SH4_/**/x
     57  1.11      uwe #define	FUNC_SYMBOL(x)	.L_/**/x:	.long	_C_LABEL(sh4_/**/x)
     58   1.4      uch #endif /* SH3 && SH4 */
     59  1.20    joerg #endif /* __STDC__ */
     60   1.1      uch 
     61   1.1      uch /*
     62  1.13      uwe  * BANK1 r6 contains current trapframe pointer.
     63  1.13      uwe  * BANK1 r7 contains bottom address of lwp's kernel stack.
     64   1.6      uch  */
     65   1.1      uch /*
     66   1.6      uch  * __EXCEPTION_ENTRY:
     67   1.1      uch  *	+ setup stack pointer
     68  1.13      uwe  *	+ save all registers to trapframe.
     69   1.3      uch  *	+ setup kernel stack.
     70   1.1      uch  *	+ change bank from 1 to 0
     71  1.21      uwe  *	+ NB: interrupt vector "knows" that r0_bank1 = ssp
     72   1.1      uch  */
     73   1.9      uwe #define	__EXCEPTION_ENTRY						;\
     74   1.1      uch 	/* Check kernel/user mode. */					;\
     75   1.1      uch 	mov	#0x40,	r3						;\
     76  1.21      uwe 	stc	ssr,	r2	/* r2 = SSR */				;\
     77   1.1      uch 	swap.b	r3,	r3						;\
     78   1.3      uch 	mov	r14,	r1						;\
     79  1.21      uwe 	swap.w	r3,	r3	/* r3 = PSL_MD */			;\
     80  1.21      uwe 	mov	r6,	r14	/* trapframe pointer */			;\
     81  1.21      uwe 	tst	r3,	r2	/* if (SSR.MD == 0) T = 1 */		;\
     82  1.21      uwe 	mov.l	r1,	@-r14	/* save tf_r14 */			;\
     83   1.1      uch 	bf/s	1f		/* T==0 ...Exception from kernel mode */;\
     84   1.3      uch 	 mov	r15,	r0						;\
     85   1.1      uch 	/* Exception from user mode */					;\
     86   1.1      uch 	mov	r7,	r15	/* change to kernel stack */		;\
     87   1.1      uch 1:									;\
     88  1.21      uwe 	/* Save remaining registers */					;\
     89   1.3      uch 	mov.l	r0,	@-r14	/* tf_r15 */				;\
     90   1.3      uch 	stc.l	r0_bank,@-r14	/* tf_r0  */				;\
     91   1.3      uch 	stc.l	r1_bank,@-r14	/* tf_r1  */				;\
     92   1.3      uch 	stc.l	r2_bank,@-r14	/* tf_r2  */				;\
     93   1.3      uch 	stc.l	r3_bank,@-r14	/* tf_r3  */				;\
     94   1.3      uch 	stc.l	r4_bank,@-r14	/* tf_r4  */				;\
     95   1.3      uch 	stc.l	r5_bank,@-r14	/* tf_r5  */				;\
     96   1.3      uch 	stc.l	r6_bank,@-r14	/* tf_r6  */				;\
     97   1.3      uch 	stc.l	r7_bank,@-r14	/* tf_r7  */				;\
     98   1.3      uch 	mov.l	r8,	@-r14	/* tf_r8  */				;\
     99   1.3      uch 	mov.l	r9,	@-r14	/* tf_r9  */				;\
    100   1.3      uch 	mov.l	r10,	@-r14	/* tf_r10 */				;\
    101   1.3      uch 	mov.l	r11,	@-r14	/* tf_r11 */				;\
    102   1.3      uch 	mov.l	r12,	@-r14	/* tf_r12 */				;\
    103   1.3      uch 	mov.l	r13,	@-r14	/* tf_r13 */				;\
    104   1.3      uch 	sts.l	pr,	@-r14	/* tf_pr  */				;\
    105   1.3      uch 	sts.l	mach,	@-r14	/* tf_mach*/				;\
    106   1.3      uch 	sts.l	macl,	@-r14	/* tf_macl*/				;\
    107  1.19      uwe 	stc.l	gbr,	@-r14	/* tf_gbr */				;\
    108   1.3      uch 	mov.l	r2,	@-r14	/* tf_ssr */				;\
    109   1.3      uch 	stc.l	spc,	@-r14	/* tf_spc */				;\
    110   1.7      uch 	add	#-8,	r14	/* skip tf_ubc, tf_expevt */		;\
    111   1.3      uch 	mov	r14,	r6	/* store frame pointer */		;\
    112   1.1      uch 	/* Change register bank to 0 */					;\
    113  1.21      uwe 	shlr	r3		/* r3 = PSL_RB */			;\
    114   1.5      uch 	stc	sr,	r1	/* r1 = SR */				;\
    115   1.1      uch 	not	r3,	r3						;\
    116   1.5      uch 	and	r1,	r3						;\
    117  1.21      uwe 	ldc	r3,	sr	/* SR.RB = 0 */
    118  1.21      uwe 
    119   1.1      uch 
    120   1.6      uch /*
    121   1.6      uch  * __EXCEPTION_RETURN:
    122  1.13      uwe  *	+ block exceptions
    123  1.13      uwe  *	+ restore all registers from stack.
    124   1.6      uch  *	+ rte.
    125   1.1      uch  */
    126   1.5      uch #define	__EXCEPTION_RETURN						;\
    127   1.1      uch 	mov	#0x10,	r0						;\
    128   1.1      uch 	swap.b	r0,	r0						;\
    129   1.1      uch 	swap.w	r0,	r0	/* r0 = 0x10000000 */			;\
    130   1.1      uch 	stc	sr,	r1						;\
    131   1.1      uch 	or	r0,	r1						;\
    132   1.1      uch 	ldc	r1,	sr	/* SR.BL = 1 */				;\
    133   1.3      uch 	stc	r6_bank,r0						;\
    134   1.3      uch 	mov	r0,	r14						;\
    135   1.3      uch 	add	#TF_SIZE, r0						;\
    136   1.3      uch 	ldc	r0,	r6_bank	/* roll up frame pointer */		;\
    137   1.7      uch 	add	#8,	r14	/* skip tf_expevt, tf_ubc */		;\
    138  1.18      uwe 	ldc.l	@r14+,	spc	/* tf_spc */				;\
    139  1.18      uwe 	ldc.l	@r14+,	ssr	/* tf_ssr */				;\
    140  1.19      uwe 	ldc.l	@r14+,	gbr	/* tf_gbr */				;\
    141   1.3      uch 	lds.l	@r14+,	macl	/* tf_macl*/				;\
    142   1.3      uch 	lds.l	@r14+,	mach	/* tf_mach*/				;\
    143   1.3      uch 	lds.l	@r14+,	pr	/* tf_pr  */				;\
    144   1.3      uch 	mov.l	@r14+,	r13	/* tf_r13 */				;\
    145   1.3      uch 	mov.l	@r14+,	r12	/* tf_r12 */				;\
    146   1.3      uch 	mov.l	@r14+,	r11	/* tf_r11 */				;\
    147   1.3      uch 	mov.l	@r14+,	r10	/* tf_r10 */				;\
    148   1.3      uch 	mov.l	@r14+,	r9	/* tf_r9  */				;\
    149   1.3      uch 	mov.l	@r14+,	r8	/* tf_r8  */				;\
    150   1.3      uch 	mov.l	@r14+,	r7	/* tf_r7  */				;\
    151   1.3      uch 	mov.l	@r14+,	r6	/* tf_r6  */				;\
    152   1.3      uch 	mov.l	@r14+,	r5	/* tf_r5  */				;\
    153   1.3      uch 	mov.l	@r14+,	r4	/* tf_r4  */				;\
    154   1.3      uch 	mov.l	@r14+,	r3	/* tf_r3  */				;\
    155   1.3      uch 	mov.l	@r14+,	r2	/* tf_r2  */				;\
    156   1.3      uch 	mov.l	@r14+,	r1	/* tf_r1  */				;\
    157   1.3      uch 	mov.l	@r14+,	r0	/* tf_r0  */				;\
    158   1.3      uch 	mov.l	@r14+	r15	/* tf_r15 */				;\
    159   1.3      uch 	mov.l	@r14+,	r14	/* tf_r14 */				;\
    160   1.1      uch 	rte								;\
    161   1.2      uch 	 nop
    162   1.1      uch 
    163   1.1      uch 
    164   1.1      uch /*
    165   1.1      uch  * Macros to disable and enable exceptions (including interrupts).
    166   1.1      uch  * This modifies SR.BL
    167   1.1      uch  */
    168   1.1      uch 
    169   1.6      uch #define	__EXCEPTION_BLOCK(Rn, Rm)					;\
    170  1.12      uwe 	mov	#0x10,	Rn						;\
    171   1.2      uch 	swap.b	Rn,	Rn						;\
    172   1.2      uch 	swap.w	Rn,	Rn	/* Rn = 0x10000000 */			;\
    173   1.2      uch 	stc	sr,	Rm						;\
    174  1.16      uwe 	or	Rm,	Rn						;\
    175  1.16      uwe 	ldc	Rn,	sr	/* block exceptions */
    176   1.2      uch 
    177   1.6      uch #define	__EXCEPTION_UNBLOCK(Rn, Rm)					;\
    178  1.14      uwe 	mov	#0xef,	Rn	/* ~0x10 */				;\
    179   1.2      uch 	swap.b	Rn,	Rn						;\
    180  1.14      uwe 	swap.w	Rn,	Rn	/* Rn = ~0x10000000 */			;\
    181   1.2      uch 	stc	sr,	Rm						;\
    182   1.2      uch 	and	Rn,	Rm						;\
    183   1.2      uch 	ldc	Rm,	sr	/* unblock exceptions */
    184   1.1      uch 
    185   1.1      uch /*
    186   1.1      uch  * Macros to disable and enable interrupts.
    187   1.1      uch  * This modifies SR.I[0-3]
    188   1.1      uch  */
    189   1.2      uch #define	__INTR_MASK(Rn, Rm)						;\
    190  1.12      uwe 	mov	#0x78,	Rn						;\
    191   1.2      uch 	shll	Rn		/* Rn = 0x000000f0 */			;\
    192   1.2      uch 	stc	sr,	Rm						;\
    193   1.2      uch 	or	Rn,	Rm						;\
    194  1.13      uwe 	ldc	Rm,	sr	/* mask all interrupts */
    195   1.2      uch 
    196   1.6      uch #define	__INTR_UNMASK(Rn, Rm)						;\
    197  1.12      uwe 	mov	#0x78,	Rn						;\
    198   1.2      uch 	shll	Rn		/* Rn = 0x000000f0 */			;\
    199   1.2      uch 	not	Rn,	Rn						;\
    200   1.2      uch 	stc	sr,	Rm						;\
    201   1.2      uch 	and	Rn,	Rm						;\
    202  1.13      uwe 	ldc	Rm,	sr	/* unmask all interrupts */
    203   1.3      uch 
    204  1.22      uwe 
    205  1.22      uwe /*
    206  1.22      uwe  * Since __INTR_MASK + __EXCEPTION_UNBLOCK is common sequence, provide
    207  1.22      uwe  * this combo version that does stc/ldc just once.
    208  1.22      uwe  */
    209  1.22      uwe #define __INTR_MASK_EXCEPTION_UNBLOCK(Rs, Ri, Rb)			 \
    210  1.22      uwe 	mov	#0x78, Ri	/* 0xf0 >> 1 */				;\
    211  1.22      uwe 	mov	#0xef, Rb	/* ~0x10 */				;\
    212  1.22      uwe 	shll	Ri		/* Ri = PSL_IMASK */			;\
    213  1.22      uwe 	swap.b	Rb, Rb							;\
    214  1.22      uwe 	stc	sr, Rs							;\
    215  1.22      uwe 	swap.w	Rb, Rb		/* Rb = ~PSL_BL */			;\
    216  1.22      uwe 	or	Ri, Rs		/* SR |= PSL_IMASK */			;\
    217  1.22      uwe 	and	Rb, Rs		/* SR &= ~PSL_BL */			;\
    218  1.22      uwe 	ldc	Rs, sr
    219  1.22      uwe 
    220  1.22      uwe 
    221  1.12      uwe #else /* !_LOCORE */
    222  1.12      uwe 
    223   1.8  thorpej void sh3_switch_setup(struct lwp *);
    224   1.8  thorpej void sh4_switch_setup(struct lwp *);
    225   1.8  thorpej void sh3_switch_resume(struct lwp *);
    226   1.8  thorpej void sh4_switch_resume(struct lwp *);
    227   1.8  thorpej extern void (*__sh_switch_resume)(struct lwp *);
    228  1.12      uwe 
    229   1.4      uch #endif /* !_LOCORE */
    230