Home | History | Annotate | Line # | Download | only in include
locore.h revision 1.40
      1  1.40     skrll /*	$NetBSD: locore.h,v 1.40 2025/10/07 10:38:30 skrll Exp $	*/
      2   1.1      matt 
      3   1.1      matt /*
      4   1.1      matt  * Copyright (c) 1994-1996 Mark Brinicombe.
      5   1.1      matt  * Copyright (c) 1994 Brini.
      6   1.1      matt  * All rights reserved.
      7   1.1      matt  *
      8   1.1      matt  * This code is derived from software written for Brini by Mark Brinicombe
      9   1.1      matt  *
     10   1.1      matt  * Redistribution and use in source and binary forms, with or without
     11   1.1      matt  * modification, are permitted provided that the following conditions
     12   1.1      matt  * are met:
     13   1.1      matt  * 1. Redistributions of source code must retain the above copyright
     14   1.1      matt  *    notice, this list of conditions and the following disclaimer.
     15   1.1      matt  * 2. Redistributions in binary form must reproduce the above copyright
     16   1.1      matt  *    notice, this list of conditions and the following disclaimer in the
     17   1.1      matt  *    documentation and/or other materials provided with the distribution.
     18   1.1      matt  * 3. All advertising materials mentioning features or use of this software
     19   1.1      matt  *    must display the following acknowledgement:
     20   1.1      matt  *	This product includes software developed by Brini.
     21   1.1      matt  * 4. The name of the company nor the name of the author may be used to
     22   1.1      matt  *    endorse or promote products derived from this software without specific
     23   1.1      matt  *    prior written permission.
     24   1.1      matt  *
     25   1.1      matt  * THIS SOFTWARE IS PROVIDED BY BRINI ``AS IS'' AND ANY EXPRESS OR IMPLIED
     26   1.1      matt  * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
     27   1.1      matt  * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
     28   1.1      matt  * IN NO EVENT SHALL BRINI OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
     29   1.1      matt  * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
     30   1.1      matt  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
     31   1.1      matt  * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
     32   1.1      matt  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
     33   1.1      matt  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
     34   1.1      matt  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
     35   1.1      matt  * SUCH DAMAGE.
     36   1.1      matt  *
     37   1.1      matt  * RiscBSD kernel project
     38   1.1      matt  *
     39   1.1      matt  * cpu.h
     40   1.1      matt  *
     41   1.1      matt  * CPU specific symbols
     42   1.1      matt  *
     43   1.1      matt  * Created      : 18/09/94
     44   1.1      matt  *
     45   1.1      matt  * Based on kate/katelib/arm6.h
     46   1.1      matt  */
     47   1.1      matt 
     48   1.1      matt #ifndef _ARM_LOCORE_H_
     49   1.1      matt #define _ARM_LOCORE_H_
     50   1.1      matt 
     51  1.30       ryo #ifdef __arm__
     52  1.30       ryo 
     53   1.2      matt #ifdef _KERNEL_OPT
     54   1.2      matt #include "opt_cpuoptions.h"
     55   1.2      matt #include "opt_cputypes.h"
     56  1.15      matt #include "opt_arm_debug.h"
     57   1.2      matt #endif
     58   1.2      matt 
     59  1.21      matt #include <sys/pcu.h>
     60  1.21      matt 
     61   1.1      matt #include <arm/cpuconf.h>
     62   1.1      matt #include <arm/armreg.h>
     63   1.1      matt 
     64   1.1      matt #include <machine/frame.h>
     65   1.1      matt 
     66   1.1      matt #ifdef _LOCORE
     67   1.1      matt 
     68   1.1      matt #if defined(_ARM_ARCH_6)
     69  1.39     skrll #define	IRQ_DISABLE(rTMP)	cpsid	i
     70  1.39     skrll #define	IRQ_ENABLE(rTMP)	cpsie	i
     71  1.40     skrll 
     72  1.40     skrll #define	INTERRUPT_DISABLE(rTMP)	cpsid	if
     73  1.40     skrll #define	INTERRUPT_ENABLE(rTMP)	cpsie	if
     74  1.29     skrll #else
     75  1.39     skrll #define	IRQ_DISABLE(rTMP)				\
     76  1.39     skrll 	mrs	rTMP, cpsr ;				\
     77  1.39     skrll 	orr	rTMP, rTMP, #(I32_bit) ;		\
     78  1.39     skrll 	msr	cpsr_c, rTMP
     79  1.39     skrll 
     80  1.39     skrll #define	IRQ_ENABLE(rTMP)				\
     81  1.39     skrll 	mrs	rTMP, cpsr ;				\
     82  1.39     skrll 	bic	rTMP, rTMP, #(I32_bit) ;		\
     83  1.39     skrll 	msr	cpsr_c, rTMP
     84  1.40     skrll 
     85  1.40     skrll #define	INTERRUPT_DISABLE(rTMP)				\
     86  1.40     skrll 	mrs	rTMP, cpsr ;				\
     87  1.40     skrll 	orr	rTMP, rTMP, #(I32_bit | F32_bit) ;	\
     88  1.40     skrll 	msr	cpsr_c, rTMP
     89  1.40     skrll 
     90  1.40     skrll #define	INTERRUPT_ENABLE(rTMP)				\
     91  1.40     skrll 	mrs	rTMP, cpsr ;				\
     92  1.40     skrll 	bic	rTMP, rTMP, #(I32_bit | F32_bit) ;	\
     93  1.40     skrll 	msr	cpsr_c, rTMP
     94   1.1      matt #endif
     95   1.1      matt 
     96   1.1      matt #if defined (TPIDRPRW_IS_CURCPU)
     97  1.38     skrll #define	GET_CURCPU(rX)		mrc	p15, 0, rX, c13, c0, 4
     98  1.38     skrll #define	GET_CURLWP(rX)		GET_CURCPU(rX); ldr rX, [rX, #CI_CURLWP]
     99  1.38     skrll #define	GET_CURX(rCPU, rLWP)	GET_CURCPU(rCPU); ldr rLWP, [rCPU, #CI_CURLWP]
    100   1.1      matt #elif defined (TPIDRPRW_IS_CURLWP)
    101  1.38     skrll #define	GET_CURLWP(rX)		mrc	p15, 0, rX, c13, c0, 4
    102  1.20      matt #if defined (MULTIPROCESSOR)
    103  1.38     skrll #define	GET_CURCPU(rX)		GET_CURLWP(rX); ldr rX, [rX, #L_CPU]
    104  1.38     skrll #define	GET_CURX(rCPU, rLWP)	GET_CURLWP(rLWP); ldr rCPU, [rLWP, #L_CPU]
    105  1.20      matt #elif defined(_ARM_ARCH_7)
    106  1.38     skrll #define	GET_CURCPU(rX)		movw rX, #:lower16:cpu_info_store; movt rX, #:upper16:cpu_info_store
    107  1.38     skrll #define	GET_CURX(rCPU, rLWP)	GET_CURLWP(rLWP); GET_CURCPU(rCPU)
    108  1.25     skrll #else
    109  1.38     skrll #define	GET_CURCPU(rX)		ldr rX, =_C_LABEL(cpu_info_store)
    110  1.38     skrll #define	GET_CURX(rCPU, rLWP)	GET_CURLWP(rLWP); ldr rCPU, [rLWP, #L_CPU]
    111  1.20      matt #endif
    112   1.1      matt #elif !defined(MULTIPROCESSOR)
    113  1.38     skrll #define	GET_CURCPU(rX)		ldr rX, =_C_LABEL(cpu_info_store)
    114  1.38     skrll #define	GET_CURLWP(rX)		GET_CURCPU(rX); ldr rX, [rX, #CI_CURLWP]
    115  1.38     skrll #define	GET_CURX(rCPU, rLWP)	GET_CURCPU(rCPU); ldr rLWP, [rCPU, #CI_CURLWP]
    116   1.1      matt #endif
    117  1.38     skrll #define	GET_CURPCB(rX)		GET_CURLWP(rX); ldr rX, [rX, #L_PCB]
    118   1.1      matt 
    119   1.1      matt #else /* !_LOCORE */
    120   1.1      matt 
    121   1.3      matt #include <arm/cpufunc.h>
    122   1.3      matt 
    123  1.38     skrll #define	IRQdisable __set_cpsr_c(I32_bit, I32_bit);
    124  1.38     skrll #define	IRQenable __set_cpsr_c(I32_bit, 0);
    125   1.1      matt 
    126   1.1      matt /*
    127   1.1      matt  * Validate a PC or PSR for a user process.  Used by various system calls
    128   1.1      matt  * that take a context passed by the user and restore it.
    129   1.1      matt  */
    130   1.1      matt 
    131  1.17      matt #ifdef __NO_FIQ
    132  1.38     skrll #define	VALID_PSR(psr)						\
    133  1.36     skrll     (((psr) & PSR_MODE) == PSR_USR32_MODE && ((psr) & I32_bit) == 0)
    134  1.17      matt #else
    135  1.38     skrll #define	VALID_PSR(psr)						\
    136  1.36     skrll     (((psr) & PSR_MODE) == PSR_USR32_MODE && ((psr) & IF32_bits) == 0)
    137  1.17      matt #endif
    138   1.1      matt 
    139  1.24     skrll /*
    140  1.26     skrll  * Translation Table Base Register Share/Cache settings
    141  1.26     skrll  */
    142  1.24     skrll #define	TTBR_UPATTR	(TTBR_S | TTBR_RGN_WBNWA | TTBR_C)
    143  1.24     skrll #define	TTBR_MPATTR	(TTBR_S | TTBR_RGN_WBNWA /* | TTBR_NOS */ | TTBR_IRGN_WBNWA)
    144   1.1      matt 
    145   1.1      matt /* The address of the vector page. */
    146   1.1      matt extern vaddr_t vector_page;
    147   1.1      matt void	arm32_vector_init(vaddr_t, int);
    148   1.1      matt 
    149   1.1      matt #define	ARM_VEC_RESET			(1 << 0)
    150   1.1      matt #define	ARM_VEC_UNDEFINED		(1 << 1)
    151   1.1      matt #define	ARM_VEC_SWI			(1 << 2)
    152   1.1      matt #define	ARM_VEC_PREFETCH_ABORT		(1 << 3)
    153   1.1      matt #define	ARM_VEC_DATA_ABORT		(1 << 4)
    154   1.1      matt #define	ARM_VEC_ADDRESS_EXCEPTION	(1 << 5)
    155   1.1      matt #define	ARM_VEC_IRQ			(1 << 6)
    156   1.1      matt #define	ARM_VEC_FIQ			(1 << 7)
    157   1.1      matt 
    158   1.1      matt #define	ARM_NVEC			8
    159   1.1      matt #define	ARM_VEC_ALL			0xffffffff
    160   1.1      matt 
    161   1.1      matt /*
    162   1.1      matt  * cpu device glue (belongs in cpuvar.h)
    163   1.1      matt  */
    164   1.1      matt void	cpu_attach(device_t, cpuid_t);
    165   1.1      matt 
    166   1.2      matt /* 1 == use cpu_sleep(), 0 == don't */
    167  1.14      matt extern int cpu_do_powersave;
    168   1.7      matt extern int cpu_printfataltraps;
    169   1.2      matt extern int cpu_fpu_present;
    170   1.5      matt extern int cpu_hwdiv_present;
    171  1.14      matt extern int cpu_neon_present;
    172  1.14      matt extern int cpu_simd_present;
    173  1.14      matt extern int cpu_simdex_present;
    174  1.14      matt extern int cpu_umull_present;
    175  1.14      matt extern int cpu_synchprim_present;
    176  1.14      matt 
    177  1.14      matt extern int cpu_instruction_set_attributes[6];
    178  1.14      matt extern int cpu_memory_model_features[4];
    179  1.14      matt extern int cpu_processor_features[2];
    180  1.14      matt extern int cpu_media_and_vfp_features[2];
    181   1.2      matt 
    182  1.16      matt extern bool arm_has_tlbiasid_p;
    183  1.28  jmcneill extern bool arm_has_mpext_p;
    184  1.13      matt 
    185   1.2      matt #if !defined(CPU_ARMV7)
    186   1.2      matt #define	CPU_IS_ARMV7_P()		false
    187   1.2      matt #elif defined(CPU_ARMV6) || defined(CPU_PRE_ARMV6)
    188   1.2      matt extern bool cpu_armv7_p;
    189   1.2      matt #define	CPU_IS_ARMV7_P()		(cpu_armv7_p)
    190   1.2      matt #else
    191   1.2      matt #define	CPU_IS_ARMV7_P()		true
    192   1.2      matt #endif
    193   1.6      matt #if !defined(CPU_ARMV6)
    194   1.6      matt #define	CPU_IS_ARMV6_P()		false
    195   1.6      matt #elif defined(CPU_ARMV7) || defined(CPU_PRE_ARMV6)
    196   1.6      matt extern bool cpu_armv6_p;
    197   1.6      matt #define	CPU_IS_ARMV6_P()		(cpu_armv6_p)
    198   1.6      matt #else
    199   1.6      matt #define	CPU_IS_ARMV6_P()		true
    200   1.6      matt #endif
    201   1.6      matt 
    202   1.8      matt /*
    203  1.12     joerg  * Used by the fault code to read the current instruction.
    204   1.8      matt  */
    205   1.8      matt static inline uint32_t
    206   1.8      matt read_insn(vaddr_t va, bool user_p)
    207   1.8      matt {
    208   1.8      matt 	uint32_t insn;
    209   1.8      matt 	if (user_p) {
    210   1.8      matt 		__asm __volatile("ldrt %0, [%1]" : "=&r"(insn) : "r"(va));
    211   1.8      matt 	} else {
    212   1.8      matt 		insn = *(const uint32_t *)va;
    213   1.8      matt 	}
    214  1.35       rin #ifdef _ARM_ARCH_BE8
    215   1.8      matt 	insn = bswap32(insn);
    216   1.8      matt #endif
    217   1.8      matt 	return insn;
    218   1.8      matt }
    219   1.8      matt 
    220   1.8      matt /*
    221  1.12     joerg  * Used by the fault code to read the current thumb instruction.
    222   1.8      matt  */
    223   1.8      matt static inline uint32_t
    224   1.8      matt read_thumb_insn(vaddr_t va, bool user_p)
    225   1.8      matt {
    226   1.8      matt 	va &= ~1;
    227   1.8      matt 	uint32_t insn;
    228   1.8      matt 	if (user_p) {
    229  1.23      matt #if defined(__thumb__) && defined(_ARM_ARCH_T2)
    230  1.23      matt 		__asm __volatile("ldrht %0, [%1, #0]" : "=&r"(insn) : "r"(va));
    231  1.23      matt #elif defined(_ARM_ARCH_7)
    232  1.23      matt 		__asm __volatile("ldrht %0, [%1], #0" : "=&r"(insn) : "r"(va));
    233   1.9      matt #else
    234   1.9      matt 		__asm __volatile("ldrt %0, [%1]" : "=&r"(insn) : "r"(va & ~3));
    235   1.9      matt #ifdef __ARMEB__
    236   1.9      matt 		insn = (uint16_t) (insn >> (((va ^ 2) & 2) << 3));
    237   1.9      matt #else
    238   1.9      matt 		insn = (uint16_t) (insn >> ((va & 2) << 3));
    239   1.9      matt #endif
    240   1.9      matt #endif
    241   1.8      matt 	} else {
    242   1.8      matt 		insn = *(const uint16_t *)va;
    243   1.8      matt 	}
    244  1.35       rin #ifdef _ARM_ARCH_BE8
    245   1.8      matt 	insn = bswap16(insn);
    246   1.8      matt #endif
    247   1.8      matt 	return insn;
    248   1.8      matt }
    249   1.2      matt 
    250   1.1      matt /*
    251   1.1      matt  * Random cruft
    252   1.1      matt  */
    253   1.1      matt 
    254   1.1      matt struct lwp;
    255   1.1      matt 
    256  1.16      matt /* cpu.c */
    257  1.16      matt void	identify_arm_cpu(device_t, struct cpu_info *);
    258   1.1      matt 
    259   1.1      matt /* cpuswitch.S */
    260   1.1      matt struct pcb;
    261   1.1      matt void	savectx(struct pcb *);
    262   1.1      matt 
    263   1.1      matt /* ast.c */
    264   1.1      matt void	userret(struct lwp *);
    265   1.1      matt 
    266   1.1      matt /* *_machdep.c */
    267   1.1      matt void	bootsync(void);
    268   1.1      matt 
    269   1.1      matt /* fault.c */
    270   1.1      matt int	badaddr_read(void *, size_t, void *);
    271   1.1      matt 
    272   1.1      matt /* syscall.c */
    273   1.1      matt void	swi_handler(trapframe_t *);
    274   1.1      matt 
    275   1.1      matt /* vfp_init.c */
    276  1.37     skrll void	vfp_detect(struct cpu_info *);
    277  1.16      matt void	vfp_attach(struct cpu_info *);
    278  1.27       chs void	vfp_discardcontext(lwp_t *, bool);
    279  1.27       chs void	vfp_savecontext(lwp_t *);
    280   1.1      matt void	vfp_kernel_acquire(void);
    281   1.1      matt void	vfp_kernel_release(void);
    282  1.27       chs bool	vfp_used_p(const lwp_t *);
    283   1.1      matt extern const pcu_ops_t arm_vfp_ops;
    284   1.1      matt 
    285   1.1      matt #endif	/* !_LOCORE */
    286   1.1      matt 
    287  1.30       ryo #elif defined(__aarch64__)
    288  1.30       ryo 
    289  1.30       ryo #include <aarch64/locore.h>
    290  1.30       ryo 
    291  1.30       ryo #endif /* __arm__/__aarch64__ */
    292  1.30       ryo 
    293   1.1      matt #endif /* !_ARM_LOCORE_H_ */
    294