Home | History | Annotate | Line # | Download | only in include
sysreg.h revision 1.14
      1  1.14     skrll /* $NetBSD: sysreg.h,v 1.14 2022/09/10 09:28:26 skrll Exp $ */
      2   1.4      maxv 
      3   1.4      maxv /*
      4   1.1      matt  * Copyright (c) 2014 The NetBSD Foundation, Inc.
      5   1.1      matt  * All rights reserved.
      6   1.1      matt  *
      7   1.1      matt  * This code is derived from software contributed to The NetBSD Foundation
      8   1.1      matt  * by Matt Thomas of 3am Software Foundry.
      9   1.1      matt  *
     10   1.1      matt  * Redistribution and use in source and binary forms, with or without
     11   1.1      matt  * modification, are permitted provided that the following conditions
     12   1.1      matt  * are met:
     13   1.1      matt  * 1. Redistributions of source code must retain the above copyright
     14   1.1      matt  *    notice, this list of conditions and the following disclaimer.
     15   1.1      matt  * 2. Redistributions in binary form must reproduce the above copyright
     16   1.1      matt  *    notice, this list of conditions and the following disclaimer in the
     17   1.1      matt  *    documentation and/or other materials provided with the distribution.
     18   1.1      matt  *
     19   1.1      matt  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     20   1.1      matt  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     21   1.1      matt  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     22   1.1      matt  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     23   1.1      matt  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     24   1.1      matt  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     25   1.1      matt  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     26   1.1      matt  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     27   1.1      matt  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     28   1.1      matt  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     29   1.1      matt  * POSSIBILITY OF SUCH DAMAGE.
     30   1.1      matt  */
     31   1.1      matt 
     32   1.1      matt #ifndef _RISCV_SYSREG_H_
     33   1.1      matt #define _RISCV_SYSREG_H_
     34   1.1      matt 
     35   1.1      matt #ifndef _KERNEL
     36   1.1      matt #include <sys/param.h>
     37   1.1      matt #endif
     38   1.1      matt 
     39  1.13     skrll #include <riscv/reg.h>
     40  1.13     skrll 
     41   1.1      matt #define FCSR_FMASK	0	// no exception bits
     42   1.1      matt #define FCSR_FRM	__BITS(7,5)
     43  1.12     skrll #define  FCSR_FRM_RNE	0b000	// Round Nearest, ties to Even
     44  1.12     skrll #define  FCSR_FRM_RTZ	0b001	// Round Towards Zero
     45  1.12     skrll #define  FCSR_FRM_RDN	0b010	// Round DowN (-infinity)
     46  1.12     skrll #define  FCSR_FRM_RUP	0b011	// Round UP (+infinity)
     47  1.12     skrll #define  FCSR_FRM_RMM	0b100	// Round to nearest, ties to Max Magnitude
     48  1.12     skrll #define  FCSR_FRM_DYN	0b111	// Dynamic rounding
     49   1.1      matt #define FCSR_FFLAGS	__BITS(4,0)	// Sticky bits
     50   1.1      matt #define FCSR_NV		__BIT(4)	// iNValid operation
     51   1.1      matt #define FCSR_DZ		__BIT(3)	// Divide by Zero
     52   1.1      matt #define FCSR_OF		__BIT(2)	// OverFlow
     53   1.1      matt #define FCSR_UF		__BIT(1)	// UnderFlow
     54   1.1      matt #define FCSR_NX		__BIT(0)	// iNeXact
     55   1.1      matt 
     56   1.1      matt static inline uint32_t
     57   1.1      matt riscvreg_fcsr_read(void)
     58   1.1      matt {
     59   1.1      matt 	uint32_t __fcsr;
     60   1.1      matt 	__asm("frcsr %0" : "=r"(__fcsr));
     61   1.1      matt 	return __fcsr;
     62   1.1      matt }
     63   1.1      matt 
     64   1.1      matt 
     65   1.1      matt static inline uint32_t
     66   1.1      matt riscvreg_fcsr_write(uint32_t __new)
     67   1.1      matt {
     68   1.1      matt 	uint32_t __old;
     69   1.1      matt 	__asm("fscsr %0, %1" : "=r"(__old) : "r"(__new));
     70   1.1      matt 	return __old;
     71   1.1      matt }
     72   1.1      matt 
     73   1.1      matt static inline uint32_t
     74   1.1      matt riscvreg_fcsr_read_fflags(void)
     75   1.1      matt {
     76   1.1      matt 	uint32_t __old;
     77   1.1      matt 	__asm("frflags %0" : "=r"(__old));
     78   1.1      matt 	return __SHIFTOUT(__old, FCSR_FFLAGS);
     79   1.1      matt }
     80   1.1      matt 
     81   1.1      matt static inline uint32_t
     82   1.1      matt riscvreg_fcsr_write_fflags(uint32_t __new)
     83   1.1      matt {
     84   1.1      matt 	uint32_t __old;
     85   1.1      matt 	__new = __SHIFTIN(__new, FCSR_FFLAGS);
     86   1.1      matt 	__asm("fsflags %0, %1" : "=r"(__old) : "r"(__new));
     87   1.1      matt 	return __SHIFTOUT(__old, FCSR_FFLAGS);
     88   1.1      matt }
     89   1.1      matt 
     90   1.1      matt static inline uint32_t
     91   1.1      matt riscvreg_fcsr_read_frm(void)
     92   1.1      matt {
     93   1.1      matt 	uint32_t __old;
     94   1.1      matt 	__asm("frrm\t%0" : "=r"(__old));
     95   1.1      matt 	return __SHIFTOUT(__old, FCSR_FRM);
     96   1.1      matt }
     97   1.1      matt 
     98   1.1      matt static inline uint32_t
     99   1.1      matt riscvreg_fcsr_write_frm(uint32_t __new)
    100   1.1      matt {
    101   1.1      matt 	uint32_t __old;
    102   1.1      matt 	__new = __SHIFTIN(__new, FCSR_FRM);
    103   1.1      matt 	__asm volatile("fsrm\t%0, %1" : "=r"(__old) : "r"(__new));
    104   1.1      matt 	return __SHIFTOUT(__old, FCSR_FRM);
    105   1.1      matt }
    106   1.1      matt 
    107   1.9     skrll /* Supervisor Status Register */
    108   1.9     skrll #ifdef _LP64
    109   1.9     skrll #define SR_WPRI		__BITS(62, 34) | __BITS(31,20) | __BIT(17) | \
    110   1.9     skrll 			    __BITS(12,9) | __BITS(7,6) | __BITS(3,2)
    111   1.9     skrll #define SR_SD		__BIT(63)
    112   1.9     skrll 			/* Bits 62-34 are WPRI */
    113   1.9     skrll #define SR_UXL		__BITS(33,32)
    114   1.9     skrll #define  SR_UXL_32	1
    115   1.9     skrll #define  SR_UXL_64	2
    116   1.9     skrll #define  SR_UXL_128	3
    117   1.9     skrll 			/* Bits 31-20 are WPRI*/
    118   1.9     skrll #else
    119   1.9     skrll #define SR_WPRI		__BITS(30,20) | __BIT(17) | __BITS(12,9) | \
    120   1.9     skrll 			    __BITS(7,6) | __BITS(3,2)
    121   1.9     skrll #define SR_SD		__BIT(31)
    122   1.9     skrll 			/* Bits 30-20 are WPRI*/
    123   1.9     skrll #endif /* _LP64 */
    124   1.9     skrll 
    125   1.9     skrll /* Both RV32 and RV64 have the bottom 20 bits shared */
    126   1.9     skrll #define SR_MXR		__BIT(19)
    127   1.9     skrll #define SR_SUM		__BIT(18)
    128   1.9     skrll 			/* Bit 17 is WPRI */
    129   1.9     skrll #define SR_XS		__BITS(16,15)
    130   1.9     skrll #define SR_FS		__BITS(14,13)
    131   1.9     skrll #define  SR_FS_OFF	0
    132   1.9     skrll #define  SR_FS_INITIAL	1
    133   1.9     skrll #define  SR_FS_CLEAN	2
    134   1.9     skrll #define  SR_FS_DIRTY	3
    135   1.9     skrll 
    136   1.9     skrll 			/* Bits 12-9 are WPRI */
    137   1.9     skrll #define SR_SPP		__BIT(8)
    138   1.9     skrll 			/* Bits 7-6 are WPRI */
    139   1.9     skrll #define SR_SPIE		__BIT(5)
    140   1.9     skrll #define SR_UPIE		__BIT(4)
    141   1.9     skrll 			/* Bits 3-2 are WPRI */
    142   1.9     skrll #define SR_SIE		__BIT(1)
    143   1.9     skrll #define SR_UIE		__BIT(0)
    144   1.9     skrll 
    145   1.9     skrll /* Supervisor interrupt registers */
    146  1.11  christos /* ... interrupt pending register (sip) */
    147   1.9     skrll 			/* Bit (XLEN-1)-10 is WIRI */
    148   1.9     skrll #define SIP_SEIP	__BIT(9)
    149   1.9     skrll #define SIP_UEIP	__BIT(8)
    150   1.9     skrll 			/* Bit 7-6 is WIRI */
    151   1.9     skrll #define SIP_STIP	__BIT(5)
    152   1.9     skrll #define SIP_UTIP	__BIT(4)
    153   1.9     skrll 			/* Bit 3-2 is WIRI */
    154   1.9     skrll #define SIP_SSIP	__BIT(1)
    155   1.9     skrll #define SIP_USIP	__BIT(0)
    156   1.9     skrll 
    157  1.11  christos /* ... interrupt-enable register (sie) */
    158   1.9     skrll 			/* Bit (XLEN-1) - 10 is WIRI */
    159   1.9     skrll #define SIE_SEIE	__BIT(9)
    160   1.9     skrll #define SIE_UEIE	__BIT(8)
    161   1.9     skrll 			/* Bit 7-6 is WIRI */
    162   1.9     skrll #define SIE_STIE	__BIT(5)
    163   1.9     skrll #define SIE_UTIE	__BIT(4)
    164   1.9     skrll 			/* Bit 3-2 is WIRI */
    165   1.9     skrll #define SIE_SSIE	__BIT(1)
    166   1.9     skrll #define SIE_USIE	__BIT(0)
    167   1.9     skrll 
    168   1.9     skrll /* Mask for all interrupts */
    169   1.9     skrll #define SIE_IM		(SIE_SEI|SIE_UEIE|SIE_STIE|SIE_UTIE|SIE_SSIE|SIE_USIE)
    170   1.1      matt 
    171   1.1      matt #ifdef _LP64
    172  1.10     skrll #define	SR_USER		(SR_UIE)
    173  1.10     skrll #define	SR_USER32	(SR_USER)
    174  1.10     skrll #define	SR_KERNEL	(SR_SIE | SR_UIE)
    175   1.1      matt #else
    176  1.10     skrll #define	SR_USER		(SR_UIE)
    177  1.10     skrll #define	SR_KERNEL	(SR_SIE | SR_UIE)
    178   1.1      matt #endif
    179   1.1      matt 
    180   1.1      matt static inline uint32_t
    181   1.1      matt riscvreg_status_read(void)
    182   1.1      matt {
    183   1.1      matt 	uint32_t __sr;
    184   1.2      matt 	__asm("csrr\t%0, sstatus" : "=r"(__sr));
    185   1.1      matt 	return __sr;
    186   1.1      matt }
    187   1.1      matt 
    188   1.1      matt static inline uint32_t
    189   1.1      matt riscvreg_status_clear(uint32_t __mask)
    190   1.1      matt {
    191   1.1      matt 	uint32_t __sr;
    192   1.1      matt 	if (__builtin_constant_p(__mask) && __mask < 0x20) {
    193   1.2      matt 		__asm("csrrci\t%0, sstatus, %1" : "=r"(__sr) : "i"(__mask));
    194   1.1      matt 	} else {
    195   1.2      matt 		__asm("csrrc\t%0, sstatus, %1" : "=r"(__sr) : "r"(__mask));
    196   1.1      matt 	}
    197   1.1      matt 	return __sr;
    198   1.1      matt }
    199   1.1      matt 
    200   1.1      matt static inline uint32_t
    201   1.1      matt riscvreg_status_set(uint32_t __mask)
    202   1.1      matt {
    203   1.1      matt 	uint32_t __sr;
    204   1.1      matt 	if (__builtin_constant_p(__mask) && __mask < 0x20) {
    205   1.2      matt 		__asm("csrrsi\t%0, sstatus, %1" : "=r"(__sr) : "i"(__mask));
    206   1.1      matt 	} else {
    207   1.2      matt 		__asm("csrrs\t%0, sstatus, %1" : "=r"(__sr) : "r"(__mask));
    208   1.1      matt 	}
    209   1.1      matt 	return __sr;
    210   1.1      matt }
    211   1.1      matt 
    212   1.1      matt // Cause register
    213   1.6     skrll #define CAUSE_FETCH_MISALIGNED		0
    214   1.6     skrll #define CAUSE_FETCH_ACCESS		1
    215   1.1      matt #define CAUSE_ILLEGAL_INSTRUCTION	2
    216   1.6     skrll #define CAUSE_BREAKPOINT		3
    217   1.6     skrll #define CAUSE_LOAD_MISALIGNED		4
    218   1.6     skrll #define CAUSE_LOAD_ACCESS		5
    219   1.6     skrll #define CAUSE_STORE_MISALIGNED		6
    220   1.6     skrll #define CAUSE_STORE_ACCESS		7
    221   1.2      matt #define CAUSE_SYSCALL			8
    222   1.6     skrll #define CAUSE_USER_ECALL		8
    223   1.6     skrll #define CAUSE_SUPERVISOR_ECALL		9
    224   1.6     skrll /* 10 is reserved */
    225   1.6     skrll #define CAUSE_MACHINE_ECALL		11
    226   1.6     skrll #define CAUSE_FETCH_PAGE_FAULT		12
    227   1.6     skrll #define CAUSE_LOAD_PAGE_FAULT		13
    228   1.6     skrll /* 14 is Reserved */
    229   1.6     skrll #define CAUSE_STORE_PAGE_FAULT		15
    230   1.6     skrll /* >= 16 is reserved */
    231   1.1      matt 
    232   1.1      matt static inline uint64_t
    233   1.1      matt riscvreg_cycle_read(void)
    234   1.1      matt {
    235   1.1      matt #ifdef _LP64
    236   1.1      matt 	uint64_t __lo;
    237   1.2      matt 	__asm __volatile("csrr\t%0, cycle" : "=r"(__lo));
    238   1.1      matt 	return __lo;
    239   1.1      matt #else
    240   1.1      matt 	uint32_t __hi0, __hi1, __lo0;
    241   1.1      matt 	do {
    242   1.1      matt 		__asm __volatile(
    243   1.5     skrll 			"csrr\t%[__hi0], cycleh"
    244   1.1      matt 		"\n\t"	"csrr\t%[__lo0], cycle"
    245   1.1      matt 		"\n\t"	"csrr\t%[__hi1], cycleh"
    246   1.1      matt 		   :	[__hi0] "=r"(__hi0),
    247   1.1      matt 			[__lo0] "=r"(__lo0),
    248   1.1      matt 			[__hi1] "=r"(__hi1));
    249   1.1      matt 	} while (__hi0 != __hi1);
    250   1.1      matt 	return ((uint64_t)__hi0 << 32) | (uint64_t)__lo0;
    251   1.1      matt #endif
    252   1.1      matt }
    253   1.1      matt 
    254   1.4      maxv #ifdef _LP64
    255   1.7     skrll #define SATP_MODE		__BITS(63,60)
    256   1.8     skrll #define  SATP_MODE_SV39		8
    257   1.8     skrll #define  SATP_MODE_SV48		9
    258   1.7     skrll #define SATP_ASID		__BITS(59,44)
    259   1.7     skrll #define SATP_PPN		__BITS(43,0)
    260   1.4      maxv #else
    261   1.7     skrll #define SATP_MODE		__BIT(31)
    262   1.8     skrll #define  SATP_MODE_SV32		1
    263   1.7     skrll #define SATP_ASID		__BITS(30,22)
    264   1.7     skrll #define SATP_PPN		__BITS(21,0)
    265   1.4      maxv #endif
    266   1.2      matt 
    267  1.13     skrll static inline uintptr_t
    268  1.13     skrll riscvreg_satp_read(void)
    269  1.13     skrll {
    270  1.13     skrll 	uintptr_t satp;
    271  1.13     skrll 	__asm __volatile("csrr	%0, satp" : "=r" (satp));
    272  1.13     skrll 	return satp;
    273  1.13     skrll }
    274  1.13     skrll 
    275  1.13     skrll static inline void
    276  1.13     skrll riscvreg_satp_write(uintptr_t satp)
    277  1.13     skrll {
    278  1.13     skrll 	__asm __volatile("csrw	satp, %0" :: "r" (satp));
    279  1.13     skrll }
    280  1.13     skrll 
    281   1.2      matt static inline uint32_t
    282   1.2      matt riscvreg_asid_read(void)
    283   1.2      matt {
    284   1.4      maxv 	uintptr_t satp;
    285   1.4      maxv 	__asm __volatile("csrr	%0, satp" : "=r" (satp));
    286   1.4      maxv 	return __SHIFTOUT(satp, SATP_ASID);
    287   1.2      matt }
    288   1.2      matt 
    289   1.2      matt static inline void
    290   1.4      maxv riscvreg_asid_write(uint32_t asid)
    291   1.2      matt {
    292   1.4      maxv 	uintptr_t satp;
    293   1.4      maxv 	__asm __volatile("csrr	%0, satp" : "=r" (satp));
    294   1.4      maxv 	satp &= ~SATP_ASID;
    295  1.14     skrll 	satp |= __SHIFTIN(asid, SATP_ASID);
    296   1.4      maxv 	__asm __volatile("csrw	satp, %0" :: "r" (satp));
    297   1.2      matt }
    298   1.2      matt 
    299   1.1      matt #endif /* _RISCV_SYSREG_H_ */
    300