armreg.h revision 1.128 1 1.128 jmcneill /* $NetBSD: armreg.h,v 1.128 2019/08/12 23:31:48 jmcneill Exp $ */
2 1.1 bjh21
3 1.1 bjh21 /*
4 1.1 bjh21 * Copyright (c) 1998, 2001 Ben Harris
5 1.1 bjh21 * Copyright (c) 1994-1996 Mark Brinicombe.
6 1.1 bjh21 * Copyright (c) 1994 Brini.
7 1.1 bjh21 * All rights reserved.
8 1.1 bjh21 *
9 1.1 bjh21 * This code is derived from software written for Brini by Mark Brinicombe
10 1.1 bjh21 *
11 1.1 bjh21 * Redistribution and use in source and binary forms, with or without
12 1.1 bjh21 * modification, are permitted provided that the following conditions
13 1.1 bjh21 * are met:
14 1.1 bjh21 * 1. Redistributions of source code must retain the above copyright
15 1.1 bjh21 * notice, this list of conditions and the following disclaimer.
16 1.1 bjh21 * 2. Redistributions in binary form must reproduce the above copyright
17 1.1 bjh21 * notice, this list of conditions and the following disclaimer in the
18 1.1 bjh21 * documentation and/or other materials provided with the distribution.
19 1.1 bjh21 * 3. All advertising materials mentioning features or use of this software
20 1.1 bjh21 * must display the following acknowledgement:
21 1.1 bjh21 * This product includes software developed by Brini.
22 1.1 bjh21 * 4. The name of the company nor the name of the author may be used to
23 1.1 bjh21 * endorse or promote products derived from this software without specific
24 1.1 bjh21 * prior written permission.
25 1.1 bjh21 *
26 1.1 bjh21 * THIS SOFTWARE IS PROVIDED BY BRINI ``AS IS'' AND ANY EXPRESS OR IMPLIED
27 1.1 bjh21 * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
28 1.1 bjh21 * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
29 1.1 bjh21 * IN NO EVENT SHALL BRINI OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
30 1.1 bjh21 * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
31 1.1 bjh21 * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
32 1.1 bjh21 * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
33 1.1 bjh21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
34 1.1 bjh21 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
35 1.1 bjh21 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
36 1.1 bjh21 * SUCH DAMAGE.
37 1.1 bjh21 */
38 1.1 bjh21
39 1.1 bjh21 #ifndef _ARM_ARMREG_H
40 1.1 bjh21 #define _ARM_ARMREG_H
41 1.1 bjh21
42 1.119 ryo #include <arm/cputypes.h>
43 1.119 ryo
44 1.120 ryo #ifdef __arm__
45 1.120 ryo
46 1.1 bjh21 /*
47 1.1 bjh21 * ARM Process Status Register
48 1.1 bjh21 *
49 1.1 bjh21 * The picture in the ARM manuals looks like this:
50 1.104 skrll * 3 3 2 2 2 2
51 1.1 bjh21 * 1 0 9 8 7 6 8 7 6 5 4 0
52 1.1 bjh21 * +-+-+-+-+-+-------------------------------------+-+-+-+---------+
53 1.1 bjh21 * |N|Z|C|V|Q| reserved |I|F|T|M M M M M|
54 1.1 bjh21 * | | | | | | | | | |4 3 2 1 0|
55 1.1 bjh21 * +-+-+-+-+-+-------------------------------------+-+-+-+---------+
56 1.1 bjh21 */
57 1.1 bjh21
58 1.1 bjh21 #define PSR_FLAGS 0xf0000000 /* flags */
59 1.1 bjh21 #define PSR_N_bit (1 << 31) /* negative */
60 1.1 bjh21 #define PSR_Z_bit (1 << 30) /* zero */
61 1.1 bjh21 #define PSR_C_bit (1 << 29) /* carry */
62 1.1 bjh21 #define PSR_V_bit (1 << 28) /* overflow */
63 1.1 bjh21
64 1.1 bjh21 #define PSR_Q_bit (1 << 27) /* saturation */
65 1.85 matt #define PSR_IT1_bit (1 << 26)
66 1.85 matt #define PSR_IT0_bit (1 << 25)
67 1.85 matt #define PSR_J_bit (1 << 24) /* Jazelle mode */
68 1.85 matt #define PSR_GE_bits (15 << 16) /* SIMD GE bits */
69 1.85 matt #define PSR_IT7_bit (1 << 15)
70 1.85 matt #define PSR_IT6_bit (1 << 14)
71 1.85 matt #define PSR_IT5_bit (1 << 13)
72 1.85 matt #define PSR_IT4_bit (1 << 12)
73 1.85 matt #define PSR_IT3_bit (1 << 11)
74 1.85 matt #define PSR_IT2_bit (1 << 10)
75 1.85 matt #define PSR_E_BIT (1 << 9) /* Endian state */
76 1.85 matt #define PSR_A_BIT (1 << 8) /* Async abort disable */
77 1.1 bjh21
78 1.1 bjh21 #define I32_bit (1 << 7) /* IRQ disable */
79 1.1 bjh21 #define F32_bit (1 << 6) /* FIQ disable */
80 1.85 matt #define IF32_bits (3 << 6) /* IRQ/FIQ disable */
81 1.1 bjh21
82 1.1 bjh21 #define PSR_T_bit (1 << 5) /* Thumb state */
83 1.1 bjh21
84 1.1 bjh21 #define PSR_MODE 0x0000001f /* mode mask */
85 1.1 bjh21 #define PSR_USR32_MODE 0x00000010
86 1.1 bjh21 #define PSR_FIQ32_MODE 0x00000011
87 1.1 bjh21 #define PSR_IRQ32_MODE 0x00000012
88 1.1 bjh21 #define PSR_SVC32_MODE 0x00000013
89 1.57 matt #define PSR_MON32_MODE 0x00000016
90 1.1 bjh21 #define PSR_ABT32_MODE 0x00000017
91 1.69 matt #define PSR_HYP32_MODE 0x0000001a
92 1.1 bjh21 #define PSR_UND32_MODE 0x0000001b
93 1.1 bjh21 #define PSR_SYS32_MODE 0x0000001f
94 1.1 bjh21 #define PSR_32_MODE 0x00000010
95 1.1 bjh21
96 1.1 bjh21 #define R15_FLAGS 0xf0000000
97 1.1 bjh21 #define R15_FLAG_N 0x80000000
98 1.1 bjh21 #define R15_FLAG_Z 0x40000000
99 1.1 bjh21 #define R15_FLAG_C 0x20000000
100 1.1 bjh21 #define R15_FLAG_V 0x10000000
101 1.1 bjh21
102 1.1 bjh21 /*
103 1.1 bjh21 * Co-processor 15: The system control co-processor.
104 1.1 bjh21 */
105 1.1 bjh21
106 1.1 bjh21 #define ARM_CP15_CPU_ID 0
107 1.1 bjh21
108 1.76 rkujawa /* CPUID registers */
109 1.90 matt #define ARM_ISA3_SYNCHPRIM_MASK 0x0000f000
110 1.90 matt #define ARM_ISA4_SYNCHPRIM_MASK 0x00f00000
111 1.90 matt #define ARM_ISA3_SYNCHPRIM_LDREX 0x10 // LDREX
112 1.90 matt #define ARM_ISA3_SYNCHPRIM_LDREXPLUS 0x13 // +CLREX/LDREXB/LDREXH
113 1.90 matt #define ARM_ISA3_SYNCHPRIM_LDREXD 0x20 // +LDREXD
114 1.76 rkujawa #define ARM_PFR0_THUMBEE_MASK 0x0000f000
115 1.77 matt #define ARM_PFR1_GTIMER_MASK 0x000f0000
116 1.77 matt #define ARM_PFR1_VIRT_MASK 0x0000f000
117 1.78 matt #define ARM_PFR1_SEC_MASK 0x000000f0
118 1.1 bjh21
119 1.82 matt /* Media and VFP Feature registers */
120 1.82 matt #define ARM_MVFR0_ROUNDING_MASK 0xf0000000
121 1.82 matt #define ARM_MVFR0_SHORTVEC_MASK 0x0f000000
122 1.82 matt #define ARM_MVFR0_SQRT_MASK 0x00f00000
123 1.82 matt #define ARM_MVFR0_DIVIDE_MASK 0x000f0000
124 1.82 matt #define ARM_MVFR0_EXCEPT_MASK 0x0000f000
125 1.82 matt #define ARM_MVFR0_DFLOAT_MASK 0x00000f00
126 1.82 matt #define ARM_MVFR0_SFLOAT_MASK 0x000000f0
127 1.82 matt #define ARM_MVFR0_ASIMD_MASK 0x0000000f
128 1.82 matt #define ARM_MVFR1_ASIMD_FMACS_MASK 0xf0000000
129 1.82 matt #define ARM_MVFR1_VFP_HPFP_MASK 0x0f000000
130 1.82 matt #define ARM_MVFR1_ASIMD_HPFP_MASK 0x00f00000
131 1.82 matt #define ARM_MVFR1_ASIMD_SPFP_MASK 0x000f0000
132 1.82 matt #define ARM_MVFR1_ASIMD_INT_MASK 0x0000f000
133 1.82 matt #define ARM_MVFR1_ASIMD_LDST_MASK 0x00000f00
134 1.82 matt #define ARM_MVFR1_D_NAN_MASK 0x000000f0
135 1.82 matt #define ARM_MVFR1_FTZ_MASK 0x0000000f
136 1.82 matt
137 1.1 bjh21 /* ARM3-specific coprocessor 15 registers */
138 1.1 bjh21 #define ARM3_CP15_FLUSH 1
139 1.1 bjh21 #define ARM3_CP15_CONTROL 2
140 1.1 bjh21 #define ARM3_CP15_CACHEABLE 3
141 1.1 bjh21 #define ARM3_CP15_UPDATEABLE 4
142 1.104 skrll #define ARM3_CP15_DISRUPTIVE 5
143 1.1 bjh21
144 1.1 bjh21 /* ARM3 Control register bits */
145 1.1 bjh21 #define ARM3_CTL_CACHE_ON 0x00000001
146 1.1 bjh21 #define ARM3_CTL_SHARED 0x00000002
147 1.1 bjh21 #define ARM3_CTL_MONITOR 0x00000004
148 1.1 bjh21
149 1.1 bjh21 /*
150 1.1 bjh21 * Post-ARM3 CP15 registers:
151 1.14 thorpej *
152 1.14 thorpej * 1 Control register
153 1.14 thorpej *
154 1.14 thorpej * 2 Translation Table Base
155 1.14 thorpej *
156 1.14 thorpej * 3 Domain Access Control
157 1.14 thorpej *
158 1.14 thorpej * 4 Reserved
159 1.14 thorpej *
160 1.14 thorpej * 5 Fault Status
161 1.14 thorpej *
162 1.14 thorpej * 6 Fault Address
163 1.14 thorpej *
164 1.14 thorpej * 7 Cache/write-buffer Control
165 1.14 thorpej *
166 1.14 thorpej * 8 TLB Control
167 1.14 thorpej *
168 1.14 thorpej * 9 Cache Lockdown
169 1.14 thorpej *
170 1.14 thorpej * 10 TLB Lockdown
171 1.14 thorpej *
172 1.14 thorpej * 11 Reserved
173 1.14 thorpej *
174 1.14 thorpej * 12 Reserved
175 1.14 thorpej *
176 1.14 thorpej * 13 Process ID (for FCSE)
177 1.14 thorpej *
178 1.14 thorpej * 14 Reserved
179 1.14 thorpej *
180 1.14 thorpej * 15 Implementation Dependent
181 1.1 bjh21 */
182 1.14 thorpej
183 1.1 bjh21 /* Some of the definitions below need cleaning up for V3/V4 architectures */
184 1.1 bjh21
185 1.1 bjh21 /* CPU control register (CP15 register 1) */
186 1.1 bjh21 #define CPU_CONTROL_MMU_ENABLE 0x00000001 /* M: MMU/Protection unit enable */
187 1.1 bjh21 #define CPU_CONTROL_AFLT_ENABLE 0x00000002 /* A: Alignment fault enable */
188 1.1 bjh21 #define CPU_CONTROL_DC_ENABLE 0x00000004 /* C: IDC/DC enable */
189 1.1 bjh21 #define CPU_CONTROL_WBUF_ENABLE 0x00000008 /* W: Write buffer enable */
190 1.1 bjh21 #define CPU_CONTROL_32BP_ENABLE 0x00000010 /* P: 32-bit exception handlers */
191 1.1 bjh21 #define CPU_CONTROL_32BD_ENABLE 0x00000020 /* D: 32-bit addressing */
192 1.1 bjh21 #define CPU_CONTROL_LABT_ENABLE 0x00000040 /* L: Late abort enable */
193 1.1 bjh21 #define CPU_CONTROL_BEND_ENABLE 0x00000080 /* B: Big-endian mode */
194 1.1 bjh21 #define CPU_CONTROL_SYST_ENABLE 0x00000100 /* S: System protection bit */
195 1.1 bjh21 #define CPU_CONTROL_ROM_ENABLE 0x00000200 /* R: ROM protection bit */
196 1.1 bjh21 #define CPU_CONTROL_CPCLK 0x00000400 /* F: Implementation defined */
197 1.59 matt #define CPU_CONTROL_SWP_ENABLE 0x00000400 /* SW: SWP{B} perform normally. */
198 1.1 bjh21 #define CPU_CONTROL_BPRD_ENABLE 0x00000800 /* Z: Branch prediction enable */
199 1.1 bjh21 #define CPU_CONTROL_IC_ENABLE 0x00001000 /* I: IC enable */
200 1.1 bjh21 #define CPU_CONTROL_VECRELOC 0x00002000 /* V: Vector relocation */
201 1.1 bjh21 #define CPU_CONTROL_ROUNDROBIN 0x00004000 /* RR: Predictable replacement */
202 1.1 bjh21 #define CPU_CONTROL_V4COMPAT 0x00008000 /* L4: ARMv4 compat LDR R15 etc */
203 1.90 matt #define CPU_CONTROL_HA_ENABLE 0x00020000 /* HA: Hardware Access flag enable */
204 1.90 matt #define CPU_CONTROL_WXN_ENABLE 0x00080000 /* WXN: Write Execute Never */
205 1.90 matt #define CPU_CONTROL_UWXN_ENABLE 0x00100000 /* UWXN: User Write eXecute Never */
206 1.39 matt #define CPU_CONTROL_FI_ENABLE 0x00200000 /* FI: Low interrupt latency */
207 1.46 bsh #define CPU_CONTROL_UNAL_ENABLE 0x00400000 /* U: unaligned data access */
208 1.46 bsh #define CPU_CONTROL_XP_ENABLE 0x00800000 /* XP: extended page table */
209 1.47 bsh #define CPU_CONTROL_V_ENABLE 0x01000000 /* VE: Interrupt vectors enable */
210 1.47 bsh #define CPU_CONTROL_EX_BEND 0x02000000 /* EE: exception endianness */
211 1.47 bsh #define CPU_CONTROL_NMFI 0x08000000 /* NMFI: Non maskable FIQ */
212 1.47 bsh #define CPU_CONTROL_TR_ENABLE 0x10000000 /* TRE: */
213 1.47 bsh #define CPU_CONTROL_AF_ENABLE 0x20000000 /* AFE: Access flag enable */
214 1.47 bsh #define CPU_CONTROL_TE_ENABLE 0x40000000 /* TE: Thumb Exception enable */
215 1.1 bjh21
216 1.1 bjh21 #define CPU_CONTROL_IDC_ENABLE CPU_CONTROL_DC_ENABLE
217 1.16 thorpej
218 1.68 matt /* ARMv6/ARMv7 Co-Processor Access Control Register (CP15, 0, c1, c0, 2) */
219 1.68 matt #define CPACR_V7_ASEDIS 0x80000000 /* Disable Advanced SIMD Ext. */
220 1.68 matt #define CPACR_V7_D32DIS 0x40000000 /* Disable VFP regs 15-31 */
221 1.68 matt #define CPACR_CPn(n) (3 << (2*n))
222 1.68 matt #define CPACR_NOACCESS 0 /* reset value */
223 1.68 matt #define CPACR_PRIVED 1 /* Privileged mode access */
224 1.68 matt #define CPACR_RESERVED 2
225 1.68 matt #define CPACR_ALL 3 /* Privileged and User mode access */
226 1.68 matt
227 1.94 matt /* ARMv6/ARMv7 Non-Secure Access Control Register (CP15, 0, c1, c1, 2) */
228 1.94 matt #define NSACR_SMP 0x00040000 /* ACTRL.SMP is writeable (!A8) */
229 1.94 matt #define NSACR_L2ERR 0x00020000 /* L2ECTRL is writeable (!A8) */
230 1.94 matt #define NSACR_ASEDIS 0x00008000 /* Deny Advanced SIMD Ext. */
231 1.94 matt #define NSACR_D32DIS 0x00004000 /* Deny VFP regs 15-31 */
232 1.94 matt #define NSACR_CPn(n) (1 << (n)) /* NonSecure access allowed */
233 1.94 matt
234 1.54 skrll /* ARM11x6 Auxiliary Control Register (CP15 register 1, opcode2 1) */
235 1.54 skrll #define ARM11X6_AUXCTL_RS 0x00000001 /* return stack */
236 1.54 skrll #define ARM11X6_AUXCTL_DB 0x00000002 /* dynamic branch prediction */
237 1.54 skrll #define ARM11X6_AUXCTL_SB 0x00000004 /* static branch prediction */
238 1.54 skrll #define ARM11X6_AUXCTL_TR 0x00000008 /* MicroTLB replacement strat. */
239 1.54 skrll #define ARM11X6_AUXCTL_EX 0x00000010 /* exclusive L1/L2 cache */
240 1.54 skrll #define ARM11X6_AUXCTL_RA 0x00000020 /* clean entire cache disable */
241 1.54 skrll #define ARM11X6_AUXCTL_RV 0x00000040 /* block transfer cache disable */
242 1.54 skrll #define ARM11X6_AUXCTL_CZ 0x00000080 /* restrict cache size */
243 1.54 skrll
244 1.56 skrll /* ARM1136 Auxiliary Control Register (CP15 register 1, opcode2 1) */
245 1.56 skrll #define ARM1136_AUXCTL_PFI 0x80000000 /* PFI: partial FI mode. */
246 1.56 skrll /* This is an undocumented flag
247 1.56 skrll * used to work around a cache bug
248 1.56 skrll * in r0 steppings. See errata
249 1.56 skrll * 364296.
250 1.56 skrll */
251 1.104 skrll /* ARM1176 Auxiliary Control Register (CP15 register 1, opcode2 1) */
252 1.54 skrll #define ARM1176_AUXCTL_PHD 0x10000000 /* inst. prefetch halting disable */
253 1.54 skrll #define ARM1176_AUXCTL_BFD 0x20000000 /* branch folding disable */
254 1.54 skrll #define ARM1176_AUXCTL_FSD 0x40000000 /* force speculative ops disable */
255 1.54 skrll #define ARM1176_AUXCTL_FIO 0x80000000 /* low intr latency override */
256 1.39 matt
257 1.55 skrll /* XScale Auxiliary Control Register (CP15 register 1, opcode2 1) */
258 1.16 thorpej #define XSCALE_AUXCTL_K 0x00000001 /* dis. write buffer coalescing */
259 1.16 thorpej #define XSCALE_AUXCTL_P 0x00000002 /* ECC protect page table access */
260 1.16 thorpej #define XSCALE_AUXCTL_MD_WB_RA 0x00000000 /* mini-D$ wb, read-allocate */
261 1.16 thorpej #define XSCALE_AUXCTL_MD_WB_RWA 0x00000010 /* mini-D$ wb, read/write-allocate */
262 1.17 thorpej #define XSCALE_AUXCTL_MD_WT 0x00000020 /* mini-D$ wt, read-allocate */
263 1.17 thorpej #define XSCALE_AUXCTL_MD_MASK 0x00000030
264 1.1 bjh21
265 1.55 skrll /* ARM11 MPCore Auxiliary Control Register (CP15 register 1, opcode2 1) */
266 1.47 bsh #define MPCORE_AUXCTL_RS 0x00000001 /* return stack */
267 1.47 bsh #define MPCORE_AUXCTL_DB 0x00000002 /* dynamic branch prediction */
268 1.47 bsh #define MPCORE_AUXCTL_SB 0x00000004 /* static branch prediction */
269 1.47 bsh #define MPCORE_AUXCTL_F 0x00000008 /* instruction folding enable */
270 1.47 bsh #define MPCORE_AUXCTL_EX 0x00000010 /* exclusive L1/L2 cache */
271 1.47 bsh #define MPCORE_AUXCTL_SA 0x00000020 /* SMP/AMP */
272 1.47 bsh
273 1.105 hsuenaga /* Marvell PJ4B Auxillary Control Register (CP15.0.R1.c0.1) */
274 1.105 hsuenaga #define PJ4B_AUXCTL_FW __BIT(0) /* Cache and TLB updates broadcast */
275 1.105 hsuenaga #define PJ4B_AUXCTL_SMPNAMP __BIT(6) /* 0 = AMP, 1 = SMP */
276 1.105 hsuenaga #define PJ4B_AUXCTL_L1PARITY __BIT(9) /* L1 parity checking */
277 1.105 hsuenaga
278 1.105 hsuenaga /* Marvell PJ4B Auxialiary Function Modes Control 0 (CP15.1.R15.c2.0) */
279 1.105 hsuenaga #define PJ4B_AUXFMC0_L2EN __BIT(0) /* Tightly-Coupled L2 cache enable */
280 1.105 hsuenaga #define PJ4B_AUXFMC0_SMPNAMP __BIT(1) /* 0 = AMP, 1 = SMP */
281 1.105 hsuenaga #define PJ4B_AUXFMC0_L1PARITY __BIT(2) /* alias of PJ4B_AUXCTL_L1PARITY */
282 1.105 hsuenaga #define PJ4B_AUXFMC0_DCSLFD __BIT(2) /* Disable DC Speculative linefill */
283 1.105 hsuenaga #define PJ4B_AUXFMC0_FW __BIT(8) /* alias of PJ4B_AUXCTL_FW*/
284 1.76 rkujawa
285 1.124 skrll /* Cortex-A5 Auxiliary Control Register (CP15 register 1, opcode 1) */
286 1.124 skrll #define CORTEXA5_ACTLR_FW __BIT(0)
287 1.124 skrll #define CORTEXA5_ACTLR_SMP __BIT(6) /* Inner Cache Shared is cacheable */
288 1.124 skrll #define CORTEXA5_ACTLR_EXCL __BIT(7) /* Exclusive L1/L2 cache control */
289 1.124 skrll
290 1.124 skrll /* Cortex-A7 Auxiliary Control Register (CP15 register 1, opcode 1) */
291 1.124 skrll #define CORTEXA7_ACTLR_L1ALIAS __BIT(0) /* Enables L1 cache alias checks */
292 1.124 skrll #define CORTEXA7_ACTLR_L2EN __BIT(1) /* Enables L2 cache */
293 1.124 skrll #define CORTEXA7_ACTLR_SMP __BIT(6) /* SMP */
294 1.124 skrll
295 1.124 skrll /* Cortex-A8 Auxiliary Control Register (CP15 register 1, opcode 1) */
296 1.124 skrll #define CORTEXA8_ACTLR_L1ALIAS __BIT(0) /* Enables L1 cache alias checks */
297 1.124 skrll #define CORTEXA8_ACTLR_L2EN __BIT(1) /* Enables L2 cache */
298 1.124 skrll
299 1.60 matt /* Cortex-A9 Auxiliary Control Register (CP15 register 1, opcode 1) */
300 1.60 matt #define CORTEXA9_AUXCTL_FW 0x00000001 /* Cache and TLB updates broadcast */
301 1.98 matt #define CORTEXA9_AUXCTL_L2PE 0x00000002 /* Prefetch hint enable */
302 1.98 matt #define CORTEXA9_AUXCTL_L1PE 0x00000004 /* Data prefetch hint enable */
303 1.60 matt #define CORTEXA9_AUXCTL_WR_ZERO 0x00000008 /* Ena. write full line of 0s mode */
304 1.60 matt #define CORTEXA9_AUXCTL_SMP 0x00000040 /* Coherency is active */
305 1.60 matt #define CORTEXA9_AUXCTL_EXCL 0x00000080 /* Exclusive cache bit */
306 1.60 matt #define CORTEXA9_AUXCTL_ONEWAY 0x00000100 /* Allocate in on cache way only */
307 1.60 matt #define CORTEXA9_AUXCTL_PARITY 0x00000200 /* Support parity checking */
308 1.60 matt
309 1.108 skrll /* Cortex-A15 Auxiliary Control Register (CP15 register 1, opcode 1) */
310 1.108 skrll #define CORTEXA15_ACTLR_BTB __BIT(0) /* Cache and TLB updates broadcast */
311 1.108 skrll #define CORTEXA15_ACTLR_SMP __BIT(6) /* SMP */
312 1.109 skrll #define CORTEXA15_ACTLR_IOBEU __BIT(15) /* In order issue in Branch Exec Unit */
313 1.124 skrll #define CORTEXA15_ACTLR_SDEH __BIT(31) /* snoop-delayed exclusive handling */
314 1.108 skrll
315 1.45 kiyohara /* Marvell Feroceon Extra Features Register (CP15 register 1, opcode2 0) */
316 1.45 kiyohara #define FC_DCACHE_REPL_LOCK 0x80000000 /* Replace DCache Lock */
317 1.45 kiyohara #define FC_DCACHE_STREAM_EN 0x20000000 /* DCache Streaming Switch */
318 1.45 kiyohara #define FC_WR_ALLOC_EN 0x10000000 /* Enable Write Allocate */
319 1.45 kiyohara #define FC_L2_PREF_DIS 0x01000000 /* L2 Cache Prefetch Disable */
320 1.45 kiyohara #define FC_L2_INV_EVICT_LINE 0x00800000 /* L2 Invalidates Uncorrectable Error Line Eviction */
321 1.45 kiyohara #define FC_L2CACHE_EN 0x00400000 /* L2 enable */
322 1.45 kiyohara #define FC_ICACHE_REPL_LOCK 0x00080000 /* Replace ICache Lock */
323 1.45 kiyohara #define FC_GLOB_HIST_REG_EN 0x00040000 /* Branch Global History Register Enable */
324 1.45 kiyohara #define FC_BRANCH_TARG_BUF_DIS 0x00020000 /* Branch Target Buffer Disable */
325 1.45 kiyohara #define FC_L1_PAR_ERR_EN 0x00010000 /* L1 Parity Error Enable */
326 1.45 kiyohara
327 1.41 matt /* Cache type register definitions 0 */
328 1.41 matt #define CPU_CT_FORMAT(x) (((x) >> 29) & 0x7) /* reg format */
329 1.9 thorpej #define CPU_CT_ISIZE(x) ((x) & 0xfff) /* I$ info */
330 1.9 thorpej #define CPU_CT_DSIZE(x) (((x) >> 12) & 0xfff) /* D$ info */
331 1.9 thorpej #define CPU_CT_S (1U << 24) /* split cache */
332 1.9 thorpej #define CPU_CT_CTYPE(x) (((x) >> 25) & 0xf) /* cache type */
333 1.9 thorpej
334 1.9 thorpej #define CPU_CT_CTYPE_WT 0 /* write-through */
335 1.9 thorpej #define CPU_CT_CTYPE_WB1 1 /* write-back, clean w/ read */
336 1.9 thorpej #define CPU_CT_CTYPE_WB2 2 /* w/b, clean w/ cp15,7 */
337 1.9 thorpej #define CPU_CT_CTYPE_WB6 6 /* w/b, cp15,7, lockdown fmt A */
338 1.9 thorpej #define CPU_CT_CTYPE_WB7 7 /* w/b, cp15,7, lockdown fmt B */
339 1.41 matt #define CPU_CT_CTYPE_WB14 14 /* w/b, cp15,7, lockdown fmt C */
340 1.9 thorpej
341 1.9 thorpej #define CPU_CT_xSIZE_LEN(x) ((x) & 0x3) /* line size */
342 1.9 thorpej #define CPU_CT_xSIZE_M (1U << 2) /* multiplier */
343 1.9 thorpej #define CPU_CT_xSIZE_ASSOC(x) (((x) >> 3) & 0x7) /* associativity */
344 1.9 thorpej #define CPU_CT_xSIZE_SIZE(x) (((x) >> 6) & 0x7) /* size */
345 1.38 matt #define CPU_CT_xSIZE_P (1U << 11) /* need to page-color */
346 1.1 bjh21
347 1.41 matt /* format 4 definitions */
348 1.41 matt #define CPU_CT4_ILINE(x) ((x) & 0xf) /* I$ line size */
349 1.41 matt #define CPU_CT4_DLINE(x) (((x) >> 16) & 0xf) /* D$ line size */
350 1.41 matt #define CPU_CT4_L1IPOLICY(x) (((x) >> 14) & 0x3) /* I$ policy */
351 1.65 matt #define CPU_CT4_L1_AIVIVT 1 /* ASID tagged VIVT */
352 1.41 matt #define CPU_CT4_L1_VIPT 2 /* VIPT */
353 1.65 matt #define CPU_CT4_L1_PIPT 3 /* PIPT */
354 1.65 matt #define CPU_CT4_ERG(x) (((x) >> 20) & 0xf) /* Cache WriteBack Granule */
355 1.65 matt #define CPU_CT4_CWG(x) (((x) >> 24) & 0xf) /* Exclusive Resv. Granule */
356 1.41 matt
357 1.41 matt /* Cache size identifaction register definitions 1, Rd, c0, c0, 0 */
358 1.104 skrll #define CPU_CSID_CTYPE_WT 0x80000000 /* write-through avail */
359 1.104 skrll #define CPU_CSID_CTYPE_WB 0x40000000 /* write-back avail */
360 1.104 skrll #define CPU_CSID_CTYPE_RA 0x20000000 /* read-allocation avail */
361 1.104 skrll #define CPU_CSID_CTYPE_WA 0x10000000 /* write-allocation avail */
362 1.44 matt #define CPU_CSID_NUMSETS(x) (((x) >> 13) & 0x7fff)
363 1.41 matt #define CPU_CSID_ASSOC(x) (((x) >> 3) & 0x1ff)
364 1.64 matt #define CPU_CSID_LEN(x) ((x) & 0x07)
365 1.41 matt
366 1.41 matt /* Cache size selection register definitions 2, Rd, c0, c0, 0 */
367 1.41 matt #define CPU_CSSR_L2 0x00000002
368 1.41 matt #define CPU_CSSR_L1 0x00000000
369 1.41 matt #define CPU_CSSR_InD 0x00000001
370 1.41 matt
371 1.1 bjh21 /* Fault status register definitions */
372 1.1 bjh21
373 1.1 bjh21 #define FAULT_TYPE_MASK 0x0f
374 1.1 bjh21 #define FAULT_USER 0x10
375 1.1 bjh21
376 1.1 bjh21 #define FAULT_WRTBUF_0 0x00 /* Vector Exception */
377 1.1 bjh21 #define FAULT_WRTBUF_1 0x02 /* Terminal Exception */
378 1.1 bjh21 #define FAULT_BUSERR_0 0x04 /* External Abort on Linefetch -- Section */
379 1.1 bjh21 #define FAULT_BUSERR_1 0x06 /* External Abort on Linefetch -- Page */
380 1.1 bjh21 #define FAULT_BUSERR_2 0x08 /* External Abort on Non-linefetch -- Section */
381 1.1 bjh21 #define FAULT_BUSERR_3 0x0a /* External Abort on Non-linefetch -- Page */
382 1.1 bjh21 #define FAULT_BUSTRNL1 0x0c /* External abort on Translation -- Level 1 */
383 1.1 bjh21 #define FAULT_BUSTRNL2 0x0e /* External abort on Translation -- Level 2 */
384 1.1 bjh21 #define FAULT_ALIGN_0 0x01 /* Alignment */
385 1.1 bjh21 #define FAULT_ALIGN_1 0x03 /* Alignment */
386 1.1 bjh21 #define FAULT_TRANS_S 0x05 /* Translation -- Section */
387 1.1 bjh21 #define FAULT_TRANS_P 0x07 /* Translation -- Page */
388 1.1 bjh21 #define FAULT_DOMAIN_S 0x09 /* Domain -- Section */
389 1.1 bjh21 #define FAULT_DOMAIN_P 0x0b /* Domain -- Page */
390 1.1 bjh21 #define FAULT_PERM_S 0x0d /* Permission -- Section */
391 1.1 bjh21 #define FAULT_PERM_P 0x0f /* Permission -- Page */
392 1.28 scw
393 1.86 matt #define FAULT_LPAE 0x0200 /* (SW) used long descriptors */
394 1.86 matt #define FAULT_IMPRECISE 0x0400 /* Imprecise exception (XSCALE) */
395 1.86 matt #define FAULT_WRITE 0x0800 /* fault was due to write (ARMv6+) */
396 1.86 matt #define FAULT_EXT 0x1000 /* fault was due to external abort (ARMv6+) */
397 1.86 matt #define FAULT_CM 0x2000 /* fault was due to cache maintenance (ARMv7+) */
398 1.15 thorpej
399 1.15 thorpej /*
400 1.15 thorpej * Address of the vector page, low and high versions.
401 1.15 thorpej */
402 1.24 thorpej #define ARM_VECTORS_LOW 0x00000000U
403 1.24 thorpej #define ARM_VECTORS_HIGH 0xffff0000U
404 1.1 bjh21
405 1.1 bjh21 /*
406 1.1 bjh21 * ARM Instructions
407 1.1 bjh21 *
408 1.104 skrll * 3 3 2 2 2
409 1.1 bjh21 * 1 0 9 8 7 0
410 1.1 bjh21 * +-------+-------------------------------------------------------+
411 1.48 wiz * | cond | instruction dependent |
412 1.1 bjh21 * |c c c c| |
413 1.1 bjh21 * +-------+-------------------------------------------------------+
414 1.1 bjh21 */
415 1.1 bjh21
416 1.1 bjh21 #define INSN_SIZE 4 /* Always 4 bytes */
417 1.1 bjh21 #define INSN_COND_MASK 0xf0000000 /* Condition mask */
418 1.91 matt #define INSN_COND_EQ 0 /* Z == 1 */
419 1.91 matt #define INSN_COND_NE 1 /* Z == 0 */
420 1.91 matt #define INSN_COND_CS 2 /* C == 1 */
421 1.91 matt #define INSN_COND_CC 3 /* C == 0 */
422 1.91 matt #define INSN_COND_MI 4 /* N == 1 */
423 1.91 matt #define INSN_COND_PL 5 /* N == 0 */
424 1.91 matt #define INSN_COND_VS 6 /* V == 1 */
425 1.91 matt #define INSN_COND_VC 7 /* V == 0 */
426 1.91 matt #define INSN_COND_HI 8 /* C == 1 && Z == 0 */
427 1.91 matt #define INSN_COND_LS 9 /* C == 0 || Z == 1 */
428 1.91 matt #define INSN_COND_GE 10 /* N == V */
429 1.91 matt #define INSN_COND_LT 11 /* N != V */
430 1.91 matt #define INSN_COND_GT 12 /* Z == 0 && N == V */
431 1.91 matt #define INSN_COND_LE 13 /* Z == 1 || N != V */
432 1.91 matt #define INSN_COND_AL 14 /* Always condition */
433 1.1 bjh21
434 1.30 rearnsha #define THUMB_INSN_SIZE 2 /* Some are 4 bytes. */
435 1.30 rearnsha
436 1.38 matt /*
437 1.38 matt * Defines and such for arm11 Performance Monitor Counters (p15, c15, c12, 0)
438 1.38 matt */
439 1.38 matt #define ARM11_PMCCTL_E __BIT(0) /* enable all three counters */
440 1.38 matt #define ARM11_PMCCTL_P __BIT(1) /* reset both Count Registers to zero */
441 1.38 matt #define ARM11_PMCCTL_C __BIT(2) /* reset the Cycle Counter Register to zero */
442 1.38 matt #define ARM11_PMCCTL_D __BIT(3) /* cycle count divide by 64 */
443 1.38 matt #define ARM11_PMCCTL_EC0 __BIT(4) /* Enable Counter Register 0 interrupt */
444 1.38 matt #define ARM11_PMCCTL_EC1 __BIT(5) /* Enable Counter Register 1 interrupt */
445 1.38 matt #define ARM11_PMCCTL_ECC __BIT(6) /* Enable Cycle Counter interrupt */
446 1.38 matt #define ARM11_PMCCTL_SBZa __BIT(7) /* UNP/SBZ */
447 1.38 matt #define ARM11_PMCCTL_CR0 __BIT(8) /* Count Register 0 overflow flag */
448 1.38 matt #define ARM11_PMCCTL_CR1 __BIT(9) /* Count Register 1 overflow flag */
449 1.38 matt #define ARM11_PMCCTL_CCR __BIT(10) /* Cycle Count Register overflow flag */
450 1.38 matt #define ARM11_PMCCTL_X __BIT(11) /* Enable Export of the events to the event bus */
451 1.38 matt #define ARM11_PMCCTL_EVT1 __BITS(19,12) /* source of events for Count Register 1 */
452 1.38 matt #define ARM11_PMCCTL_EVT0 __BITS(27,20) /* source of events for Count Register 0 */
453 1.38 matt #define ARM11_PMCCTL_SBZb __BITS(31,28) /* UNP/SBZ */
454 1.38 matt #define ARM11_PMCCTL_SBZ \
455 1.38 matt (ARM11_PMCCTL_SBZa | ARM11_PMCCTL_SBZb)
456 1.38 matt
457 1.38 matt #define ARM11_PMCEVT_ICACHE_MISS 0 /* Instruction Cache Miss */
458 1.38 matt #define ARM11_PMCEVT_ISTREAM_STALL 1 /* Instruction Stream Stall */
459 1.38 matt #define ARM11_PMCEVT_IUTLB_MISS 2 /* Instruction uTLB Miss */
460 1.38 matt #define ARM11_PMCEVT_DUTLB_MISS 3 /* Data uTLB Miss */
461 1.38 matt #define ARM11_PMCEVT_BRANCH 4 /* Branch Inst. Executed */
462 1.38 matt #define ARM11_PMCEVT_BRANCH_MISS 6 /* Branch mispredicted */
463 1.38 matt #define ARM11_PMCEVT_INST_EXEC 7 /* Instruction Executed */
464 1.38 matt #define ARM11_PMCEVT_DCACHE_ACCESS0 9 /* Data Cache Access */
465 1.38 matt #define ARM11_PMCEVT_DCACHE_ACCESS1 10 /* Data Cache Access */
466 1.38 matt #define ARM11_PMCEVT_DCACHE_MISS 11 /* Data Cache Miss */
467 1.38 matt #define ARM11_PMCEVT_DCACHE_WRITEBACK 12 /* Data Cache Writeback */
468 1.38 matt #define ARM11_PMCEVT_PC_CHANGE 13 /* Software PC change */
469 1.38 matt #define ARM11_PMCEVT_TLB_MISS 15 /* Main TLB Miss */
470 1.38 matt #define ARM11_PMCEVT_DATA_ACCESS 16 /* non-cached data access */
471 1.38 matt #define ARM11_PMCEVT_LSU_STALL 17 /* Load/Store Unit stall */
472 1.38 matt #define ARM11_PMCEVT_WBUF_DRAIN 18 /* Write buffer drained */
473 1.38 matt #define ARM11_PMCEVT_ETMEXTOUT0 32 /* ETMEXTOUT[0] asserted */
474 1.38 matt #define ARM11_PMCEVT_ETMEXTOUT1 33 /* ETMEXTOUT[1] asserted */
475 1.38 matt #define ARM11_PMCEVT_ETMEXTOUT 34 /* ETMEXTOUT[0 & 1] */
476 1.38 matt #define ARM11_PMCEVT_CALL_EXEC 35 /* Procedure call executed */
477 1.38 matt #define ARM11_PMCEVT_RETURN_EXEC 36 /* Return executed */
478 1.38 matt #define ARM11_PMCEVT_RETURN_HIT 37 /* return address predicted */
479 1.38 matt #define ARM11_PMCEVT_RETURN_MISS 38 /* return addr. mispredicted */
480 1.38 matt #define ARM11_PMCEVT_CYCLE 255 /* Increment each cycle */
481 1.38 matt
482 1.43 matt /* Defines for ARM CORTEX performance counters */
483 1.43 matt #define CORTEX_CNTENS_C __BIT(31) /* Enables the cycle counter */
484 1.43 matt #define CORTEX_CNTENC_C __BIT(31) /* Disables the cycle counter */
485 1.43 matt #define CORTEX_CNTOFL_C __BIT(31) /* Cycle counter overflow flag */
486 1.42 jmcneill
487 1.86 matt /* Defines for ARM Cortex A7/A15 L2CTRL */
488 1.86 matt #define L2CTRL_NUMCPU __BITS(25,24) // numcpus - 1
489 1.86 matt #define L2CTRL_ICPRES __BIT(23) // Interrupt Controller is present
490 1.86 matt
491 1.107 skrll /* Translation Table Base Register */
492 1.107 skrll #define TTBR_C __BIT(0) /* without MPE */
493 1.107 skrll #define TTBR_S __BIT(1)
494 1.107 skrll #define TTBR_IMP __BIT(2)
495 1.107 skrll #define TTBR_RGN_MASK __BITS(4,3)
496 1.107 skrll #define TTBR_RGN_NC __SHIFTIN(0, TTBR_RGN_MASK)
497 1.107 skrll #define TTBR_RGN_WBWA __SHIFTIN(1, TTBR_RGN_MASK)
498 1.107 skrll #define TTBR_RGN_WT __SHIFTIN(2, TTBR_RGN_MASK)
499 1.107 skrll #define TTBR_RGN_WBNWA __SHIFTIN(3, TTBR_RGN_MASK)
500 1.107 skrll #define TTBR_NOS __BIT(5)
501 1.107 skrll #define TTBR_IRGN_MASK (__BIT(6) | __BIT(0))
502 1.107 skrll #define TTBR_IRGN_NC 0
503 1.107 skrll #define TTBR_IRGN_WBWA __BIT(6)
504 1.107 skrll #define TTBR_IRGN_WT __BIT(0)
505 1.107 skrll #define TTBR_IRGN_WBNWA (__BIT(0) | __BIT(6))
506 1.107 skrll
507 1.81 matt /* Translate Table Base Control Register */
508 1.81 matt #define TTBCR_S_EAE __BIT(31) // Extended Address Extension
509 1.81 matt #define TTBCR_S_PD1 __BIT(5) // Don't use TTBR1
510 1.81 matt #define TTBCR_S_PD0 __BIT(4) // Don't use TTBR0
511 1.81 matt #define TTBCR_S_N __BITS(2,0) // Width of base address in TTB0
512 1.81 matt
513 1.81 matt #define TTBCR_L_EAE __BIT(31) // Extended Address Extension
514 1.81 matt #define TTBCR_L_SH1 __BITS(29,28) // TTBR1 Shareability
515 1.81 matt #define TTBCR_L_ORGN1 __BITS(27,26) // TTBR1 Outer cacheability
516 1.81 matt #define TTBCR_L_IRGN1 __BITS(25,24) // TTBR1 inner cacheability
517 1.81 matt #define TTBCR_L_EPD1 __BIT(23) // Don't use TTBR1
518 1.81 matt #define TTBCR_L_A1 __BIT(22) // ASID is in TTBR1
519 1.81 matt #define TTBCR_L_T1SZ __BITS(18,16) // TTBR1 size offset
520 1.81 matt #define TTBCR_L_SH0 __BITS(13,12) // TTBR0 Shareability
521 1.81 matt #define TTBCR_L_ORGN0 __BITS(11,10) // TTBR0 Outer cacheability
522 1.81 matt #define TTBCR_L_IRGN0 __BITS(9,8) // TTBR0 inner cacheability
523 1.81 matt #define TTBCR_L_EPD0 __BIT(7) // Don't use TTBR0
524 1.81 matt #define TTBCR_L_T0SZ __BITS(2,0) // TTBR0 size offset
525 1.81 matt
526 1.126 skrll #define NMRR_ORn(n) __BITS(17+2*(n),16+2*(n)) // Outer Cacheable mappings
527 1.126 skrll #define NMRR_IRn(n) __BITS(1+2*(n),0+2*(n)) // Inner Cacheable mappings
528 1.126 skrll #define NMRR_NC 0 // non-cacheable
529 1.126 skrll #define NMRR_WBWA 1 // write-back write-allocate
530 1.126 skrll #define NMRR_WT 2 // write-through
531 1.126 skrll #define NMRR_WB 3 // write-back
532 1.87 matt #define PRRR_NOSn(n) __BITS(24+2*(n))// Memory region is Inner Shareable
533 1.87 matt #define PRRR_NS1 __BIT(19) // Normal Shareable S=1 is Shareable
534 1.87 matt #define PRRR_NS0 __BIT(18) // Normal Shareable S=0 is Shareable
535 1.87 matt #define PRRR_DS1 __BIT(17) // Device Shareable S=1 is Shareable
536 1.87 matt #define PRRR_DS0 __BIT(16) // Device Shareable S=0 is Shareable
537 1.87 matt #define PRRR_TRn(n) __BITS(1+2*(n),0+2*(n))
538 1.87 matt #define PRRR_TR_STRONG 0 // Strongly Ordered
539 1.87 matt #define PRRR_TR_DEVICE 1 // Device
540 1.87 matt #define PRRR_TR_NORMAL 2 // Normal Memory
541 1.87 matt
542 1.105 hsuenaga /* ARMv7 MPIDR, Multiprocessor Affinity Register generic format */
543 1.105 hsuenaga #define MPIDR_MP __BIT(31) /* 1 = Have MP Extention */
544 1.105 hsuenaga #define MPIDR_U __BIT(30) /* 1 = Uni-Processor System */
545 1.105 hsuenaga #define MPIDR_MT __BIT(24) /* 1 = SMT(AFF0 is logical) */
546 1.105 hsuenaga #define MPIDR_AFF2 __BITS(23,16) /* Affinity Level 2 */
547 1.105 hsuenaga #define MPIDR_AFF1 __BITS(15,8) /* Affinity Level 1 */
548 1.105 hsuenaga #define MPIDR_AFF0 __BITS(7,0) /* Affinity Level 0 */
549 1.105 hsuenaga
550 1.105 hsuenaga /* MPIDR implementation of ARM Cortex A9: SMT and AFF2 is not used */
551 1.105 hsuenaga #define CORTEXA9_MPIDR_MP MPIDR_MP
552 1.105 hsuenaga #define CORTEXA9_MPIDR_U MPIDR_U
553 1.105 hsuenaga #define CORTEXA9_MPIDR_CLID __BITS(11,8) /* AFF1 = cluster id */
554 1.114 skrll #define CORTEXA9_MPIDR_CPUID __BITS(0,1) /* AFF0 = physical core id */
555 1.105 hsuenaga
556 1.105 hsuenaga /* MPIDR implementation of Marvell PJ4B-MP: AFF2 is not used */
557 1.105 hsuenaga #define PJ4B_MPIDR_MP MPIDR_MP
558 1.105 hsuenaga #define PJ4B_MPIDR_U MPIDR_U
559 1.105 hsuenaga #define PJ4B_MPIDR_MT MPIDR_MT /* 1 = SMT(AFF0 is logical) */
560 1.105 hsuenaga #define PJ4B_MPIDR_CLID __BITS(11,8) /* AFF1 = cluster id */
561 1.105 hsuenaga #define PJ4B_MPIDR_CPUID __BITS(0,3) /* AFF0 = core id */
562 1.105 hsuenaga
563 1.77 matt /* Defines for ARM Generic Timer */
564 1.120 ryo #define CNTCTL_ISTATUS __BIT(2) // Interrupt is pending
565 1.120 ryo #define CNTCTL_IMASK __BIT(1) // Mask Interrupt
566 1.120 ryo #define CNTCTL_ENABLE __BIT(0) // Timer Enabled
567 1.120 ryo
568 1.120 ryo #define CNTKCTL_PL0PTEN __BIT(9) /* PL0 Physical Timer Enable */
569 1.120 ryo #define CNTKCTL_PL0VTEN __BIT(8) /* PL0 Virtual Timer Enable */
570 1.120 ryo #define CNTKCTL_EVNTI __BITS(7,4) /* CNTVCT Event Bit Select */
571 1.120 ryo #define CNTKCTL_EVNTDIR __BIT(3) /* CNTVCT Event Dir (1->0) */
572 1.120 ryo #define CNTKCTL_EVNTEN __BIT(2) /* CNTVCT Event Enable */
573 1.120 ryo #define CNTKCTL_PL0VCTEN __BIT(1) /* PL0 Virtual Counter Enable */
574 1.120 ryo #define CNTKCTL_PL0PCTEN __BIT(0) /* PL0 Physical Counter Enable */
575 1.116 skrll
576 1.116 skrll /* CNCHCTL, Timer PL2 Control register, Virtualization Extensions */
577 1.120 ryo #define CNTHCTL_EVNTI __BITS(7,4)
578 1.120 ryo #define CNTHCTL_EVNTDIR __BIT(3)
579 1.120 ryo #define CNTHCTL_EVNTEN __BIT(2)
580 1.120 ryo #define CNTHCTL_PL1PCEN __BIT(1)
581 1.120 ryo #define CNTHCTL_PL1PCTEN __BIT(0)
582 1.77 matt
583 1.88 matt #define ARM_A5_TLBDATA_DOM __BITS(62,59)
584 1.88 matt #define ARM_A5_TLBDATA_AP __BITS(58,56)
585 1.88 matt #define ARM_A5_TLBDATA_NS_WALK __BIT(55)
586 1.88 matt #define ARM_A5_TLBDATA_NS_PAGE __BIT(54)
587 1.89 matt #define ARM_A5_TLBDATA_XN __BIT(53)
588 1.88 matt #define ARM_A5_TLBDATA_TEX __BITS(52,50)
589 1.88 matt #define ARM_A5_TLBDATA_B __BIT(49)
590 1.88 matt #define ARM_A5_TLBDATA_C __BIT(48)
591 1.88 matt #define ARM_A5_TLBDATA_S __BIT(47)
592 1.89 matt #define ARM_A5_TLBDATA_ASID __BITS(46,39)
593 1.89 matt #define ARM_A5_TLBDATA_SIZE __BITS(38,37)
594 1.88 matt #define ARM_A5_TLBDATA_SIZE_4KB 0
595 1.88 matt #define ARM_A5_TLBDATA_SIZE_16KB 1
596 1.88 matt #define ARM_A5_TLBDATA_SIZE_1MB 2
597 1.88 matt #define ARM_A5_TLBDATA_SIZE_16MB 3
598 1.89 matt #define ARM_A5_TLBDATA_VA __BITS(36,22)
599 1.89 matt #define ARM_A5_TLBDATA_PA __BITS(21,2)
600 1.88 matt #define ARM_A5_TLBDATA_nG __BIT(1)
601 1.88 matt #define ARM_A5_TLBDATA_VALID __BIT(0)
602 1.88 matt
603 1.88 matt #define ARM_A7_TLBDATA2_S2_LEVEL __BITS(85-64,84-64)
604 1.88 matt #define ARM_A7_TLBDATA2_S1_SIZE __BITS(83-64,82-64)
605 1.88 matt #define ARM_A7_TLBDATA2_S1_SIZE_4KB 0
606 1.99 skrll #define ARM_A7_TLBDATA2_S1_SIZE_64KB 1
607 1.99 skrll #define ARM_A7_TLBDATA2_S1_SIZE_1MB 2
608 1.99 skrll #define ARM_A7_TLBDATA2_S1_SIZE_16MB 3
609 1.88 matt #define ARM_A7_TLBDATA2_DOM __BITS(81-64,78-64)
610 1.88 matt #define ARM_A7_TLBDATA2_IS __BITS(77-64,76-64)
611 1.88 matt #define ARM_A7_TLBDATA2_IS_NC 0
612 1.99 skrll #define ARM_A7_TLBDATA2_IS_WB_WA 1
613 1.88 matt #define ARM_A7_TLBDATA2_IS_WT 2
614 1.88 matt #define ARM_A7_TLBDATA2_IS_DSO 3
615 1.88 matt #define ARM_A7_TLBDATA2_S2OVR __BIT(75-64)
616 1.88 matt #define ARM_A7_TLBDATA2_SDO_MT __BITS(74-64,72-64)
617 1.88 matt #define ARM_A7_TLBDATA2_SDO_MT_D 2
618 1.88 matt #define ARM_A7_TLBDATA2_SDO_MT_SO 6
619 1.88 matt #define ARM_A7_TLBDATA2_OS __BITS(75-64,74-64)
620 1.88 matt #define ARM_A7_TLBDATA2_OS_NC 0
621 1.88 matt #define ARM_A7_TLBDATA2_OS_WB_WA 1
622 1.88 matt #define ARM_A7_TLBDATA2_OS_WT 2
623 1.88 matt #define ARM_A7_TLBDATA2_OS_WB 3
624 1.88 matt #define ARM_A7_TLBDATA2_SH __BITS(73-64,72-64)
625 1.88 matt #define ARM_A7_TLBDATA2_SH_NONE 0
626 1.88 matt #define ARM_A7_TLBDATA2_SH_UNUSED 1
627 1.88 matt #define ARM_A7_TLBDATA2_SH_OS 2
628 1.88 matt #define ARM_A7_TLBDATA2_SH_IS 3
629 1.88 matt #define ARM_A7_TLBDATA2_XN2 __BIT(71-64)
630 1.88 matt #define ARM_A7_TLBDATA2_XN1 __BIT(70-64)
631 1.88 matt #define ARM_A7_TLBDATA2_PXN __BIT(69-64)
632 1.88 matt
633 1.88 matt #define ARM_A7_TLBDATA12_PA __BITS(68-32,41-32)
634 1.88 matt
635 1.88 matt #define ARM_A7_TLBDATA1_NS __BIT(40-32)
636 1.88 matt #define ARM_A7_TLBDATA1_HAP __BITS(39-32,38-32)
637 1.88 matt #define ARM_A7_TLBDATA1_AP __BITS(37-32,35-32)
638 1.88 matt #define ARM_A7_TLBDATA1_nG __BIT(34-32)
639 1.88 matt
640 1.88 matt #define ARM_A7_TLBDATA01_ASID __BITS(33,26)
641 1.88 matt
642 1.88 matt #define ARM_A7_TLBDATA0_VMID __BITS(25,18)
643 1.88 matt #define ARM_A7_TLBDATA0_VA __BITS(17,5)
644 1.88 matt #define ARM_A7_TLBDATA0_NS_WALK __BIT(4)
645 1.88 matt #define ARM_A7_TLBDATA0_SIZE __BITS(3,1)
646 1.88 matt #define ARM_A7_TLBDATA0_SIZE_V7_4KB 0
647 1.88 matt #define ARM_A7_TLBDATA0_SIZE_LPAE_4KB 1
648 1.88 matt #define ARM_A7_TLBDATA0_SIZE_V7_64KB 2
649 1.88 matt #define ARM_A7_TLBDATA0_SIZE_LPAE_64KB 3
650 1.88 matt #define ARM_A7_TLBDATA0_SIZE_V7_1MB 4
651 1.88 matt #define ARM_A7_TLBDATA0_SIZE_LPAE_2MB 5
652 1.88 matt #define ARM_A7_TLBDATA0_SIZE_V7_16MB 6
653 1.88 matt #define ARM_A7_TLBDATA0_SIZE_LPAE_1GB 7
654 1.88 matt
655 1.88 matt #define ARM_TLBDATA_VALID __BIT(0)
656 1.88 matt
657 1.88 matt #define ARM_TLBDATAOP_WAY __BIT(31)
658 1.88 matt #define ARM_A5_TLBDATAOP_INDEX __BITS(5,0)
659 1.88 matt #define ARM_A7_TLBDATAOP_INDEX __BITS(6,0)
660 1.88 matt
661 1.104 skrll #if !defined(__ASSEMBLER__) && defined(_KERNEL)
662 1.91 matt static inline bool
663 1.91 matt arm_cond_ok_p(uint32_t insn, uint32_t psr)
664 1.91 matt {
665 1.91 matt const uint32_t __cond = __SHIFTOUT(insn, INSN_COND_MASK);
666 1.91 matt
667 1.91 matt bool __ok;
668 1.91 matt const bool __z = (psr & PSR_Z_bit);
669 1.91 matt const bool __n = (psr & PSR_N_bit);
670 1.91 matt const bool __c = (psr & PSR_C_bit);
671 1.91 matt const bool __v = (psr & PSR_V_bit);
672 1.91 matt switch (__cond & ~1) {
673 1.91 matt case INSN_COND_EQ: // Z == 1
674 1.91 matt __ok = __z;
675 1.91 matt break;
676 1.91 matt case INSN_COND_CS: // C == 1
677 1.91 matt __ok = __c;
678 1.91 matt break;
679 1.91 matt case INSN_COND_MI: // N == 1
680 1.91 matt __ok = __n;
681 1.91 matt break;
682 1.91 matt case INSN_COND_VS: // V == 1
683 1.91 matt __ok = __v;
684 1.91 matt break;
685 1.91 matt case INSN_COND_HI: // C == 1 && Z == 0
686 1.91 matt __ok = __c && !__z;
687 1.91 matt break;
688 1.91 matt case INSN_COND_GE: // N == V
689 1.91 matt __ok = __n == __v;
690 1.91 matt break;
691 1.91 matt case INSN_COND_GT: // N == V && Z == 0
692 1.91 matt __ok = __n == __v && !__z;
693 1.91 matt break;
694 1.92 matt default: /* INSN_COND_AL or unconditional */
695 1.91 matt return true;
696 1.91 matt }
697 1.91 matt
698 1.91 matt return (__cond & 1) ? !__ok : __ok;
699 1.91 matt }
700 1.93 matt #endif /* !__ASSEMBLER && _KERNEL */
701 1.91 matt
702 1.72 matt #if !defined(__ASSEMBLER__) && !defined(_RUMPKERNEL)
703 1.60 matt #define ARMREG_READ_INLINE(name, __insnstring) \
704 1.60 matt static inline uint32_t armreg_##name##_read(void) \
705 1.60 matt { \
706 1.60 matt uint32_t __rv; \
707 1.60 matt __asm __volatile("mrc " __insnstring : "=r"(__rv)); \
708 1.60 matt return __rv; \
709 1.60 matt }
710 1.60 matt
711 1.60 matt #define ARMREG_WRITE_INLINE(name, __insnstring) \
712 1.60 matt static inline void armreg_##name##_write(uint32_t __val) \
713 1.60 matt { \
714 1.60 matt __asm __volatile("mcr " __insnstring :: "r"(__val)); \
715 1.60 matt }
716 1.60 matt
717 1.84 matt #define ARMREG_READ_INLINE2(name, __insnstring) \
718 1.84 matt static inline uint32_t armreg_##name##_read(void) \
719 1.84 matt { \
720 1.84 matt uint32_t __rv; \
721 1.118 christos __asm __volatile(".fpu vfp"); \
722 1.118 christos __asm __volatile(__insnstring : "=r"(__rv)); \
723 1.84 matt return __rv; \
724 1.84 matt }
725 1.84 matt
726 1.84 matt #define ARMREG_WRITE_INLINE2(name, __insnstring) \
727 1.84 matt static inline void armreg_##name##_write(uint32_t __val) \
728 1.84 matt { \
729 1.118 christos __asm __volatile(".fpu vfp"); \
730 1.84 matt __asm __volatile(__insnstring :: "r"(__val)); \
731 1.84 matt }
732 1.84 matt
733 1.71 matt #define ARMREG_READ64_INLINE(name, __insnstring) \
734 1.71 matt static inline uint64_t armreg_##name##_read(void) \
735 1.71 matt { \
736 1.71 matt uint64_t __rv; \
737 1.71 matt __asm __volatile("mrrc " __insnstring : "=r"(__rv)); \
738 1.71 matt return __rv; \
739 1.71 matt }
740 1.71 matt
741 1.71 matt #define ARMREG_WRITE64_INLINE(name, __insnstring) \
742 1.71 matt static inline void armreg_##name##_write(uint64_t __val) \
743 1.71 matt { \
744 1.71 matt __asm __volatile("mcrr " __insnstring :: "r"(__val)); \
745 1.71 matt }
746 1.71 matt
747 1.73 matt /* cp10 registers */
748 1.84 matt ARMREG_READ_INLINE2(fpsid, "vmrs\t%0, fpsid") /* VFP System ID */
749 1.84 matt ARMREG_READ_INLINE2(fpscr, "vmrs\t%0, fpscr") /* VFP Status/Control Register */
750 1.84 matt ARMREG_WRITE_INLINE2(fpscr, "vmsr\tfpscr, %0") /* VFP Status/Control Register */
751 1.84 matt ARMREG_READ_INLINE2(mvfr1, "vmrs\t%0, mvfr1") /* Media and VFP Feature Register 1 */
752 1.84 matt ARMREG_READ_INLINE2(mvfr0, "vmrs\t%0, mvfr0") /* Media and VFP Feature Register 0 */
753 1.84 matt ARMREG_READ_INLINE2(fpexc, "vmrs\t%0, fpexc") /* VFP Exception Register */
754 1.84 matt ARMREG_WRITE_INLINE2(fpexc, "vmsr\tfpexc, %0") /* VFP Exception Register */
755 1.84 matt ARMREG_READ_INLINE2(fpinst, "fmrx\t%0, fpinst") /* VFP Exception Instruction */
756 1.84 matt ARMREG_WRITE_INLINE2(fpinst, "fmxr\tfpinst, %0") /* VFP Exception Instruction */
757 1.84 matt ARMREG_READ_INLINE2(fpinst2, "fmrx\t%0, fpinst2") /* VFP Exception Instruction 2 */
758 1.84 matt ARMREG_WRITE_INLINE2(fpinst2, "fmxr\tfpinst2, %0") /* VFP Exception Instruction 2 */
759 1.73 matt
760 1.73 matt /* cp15 c0 registers */
761 1.60 matt ARMREG_READ_INLINE(midr, "p15,0,%0,c0,c0,0") /* Main ID Register */
762 1.60 matt ARMREG_READ_INLINE(ctr, "p15,0,%0,c0,c0,1") /* Cache Type Register */
763 1.91 matt ARMREG_READ_INLINE(tlbtr, "p15,0,%0,c0,c0,3") /* TLB Type Register */
764 1.60 matt ARMREG_READ_INLINE(mpidr, "p15,0,%0,c0,c0,5") /* Multiprocess Affinity Register */
765 1.106 skrll ARMREG_READ_INLINE(revidr, "p15,0,%0,c0,c0,6") /* Revision ID Register */
766 1.60 matt ARMREG_READ_INLINE(pfr0, "p15,0,%0,c0,c1,0") /* Processor Feature Register 0 */
767 1.60 matt ARMREG_READ_INLINE(pfr1, "p15,0,%0,c0,c1,1") /* Processor Feature Register 1 */
768 1.60 matt ARMREG_READ_INLINE(mmfr0, "p15,0,%0,c0,c1,4") /* Memory Model Feature Register 0 */
769 1.60 matt ARMREG_READ_INLINE(mmfr1, "p15,0,%0,c0,c1,5") /* Memory Model Feature Register 1 */
770 1.60 matt ARMREG_READ_INLINE(mmfr2, "p15,0,%0,c0,c1,6") /* Memory Model Feature Register 2 */
771 1.60 matt ARMREG_READ_INLINE(mmfr3, "p15,0,%0,c0,c1,7") /* Memory Model Feature Register 3 */
772 1.60 matt ARMREG_READ_INLINE(isar0, "p15,0,%0,c0,c2,0") /* Instruction Set Attribute Register 0 */
773 1.60 matt ARMREG_READ_INLINE(isar1, "p15,0,%0,c0,c2,1") /* Instruction Set Attribute Register 1 */
774 1.60 matt ARMREG_READ_INLINE(isar2, "p15,0,%0,c0,c2,2") /* Instruction Set Attribute Register 2 */
775 1.60 matt ARMREG_READ_INLINE(isar3, "p15,0,%0,c0,c2,3") /* Instruction Set Attribute Register 3 */
776 1.60 matt ARMREG_READ_INLINE(isar4, "p15,0,%0,c0,c2,4") /* Instruction Set Attribute Register 4 */
777 1.60 matt ARMREG_READ_INLINE(isar5, "p15,0,%0,c0,c2,5") /* Instruction Set Attribute Register 5 */
778 1.60 matt ARMREG_READ_INLINE(ccsidr, "p15,1,%0,c0,c0,0") /* Cache Size ID Register */
779 1.60 matt ARMREG_READ_INLINE(clidr, "p15,1,%0,c0,c0,1") /* Cache Level ID Register */
780 1.60 matt ARMREG_READ_INLINE(csselr, "p15,2,%0,c0,c0,0") /* Cache Size Selection Register */
781 1.60 matt ARMREG_WRITE_INLINE(csselr, "p15,2,%0,c0,c0,0") /* Cache Size Selection Register */
782 1.73 matt /* cp15 c1 registers */
783 1.103 skrll ARMREG_READ_INLINE(sctlr, "p15,0,%0,c1,c0,0") /* System Control Register */
784 1.103 skrll ARMREG_WRITE_INLINE(sctlr, "p15,0,%0,c1,c0,0") /* System Control Register */
785 1.68 matt ARMREG_READ_INLINE(auxctl, "p15,0,%0,c1,c0,1") /* Auxiliary Control Register */
786 1.68 matt ARMREG_WRITE_INLINE(auxctl, "p15,0,%0,c1,c0,1") /* Auxiliary Control Register */
787 1.68 matt ARMREG_READ_INLINE(cpacr, "p15,0,%0,c1,c0,2") /* Co-Processor Access Control Register */
788 1.68 matt ARMREG_WRITE_INLINE(cpacr, "p15,0,%0,c1,c0,2") /* Co-Processor Access Control Register */
789 1.95 matt ARMREG_READ_INLINE(scr, "p15,0,%0,c1,c1,0") /* Secure Configuration Register */
790 1.94 matt ARMREG_READ_INLINE(nsacr, "p15,0,%0,c1,c1,2") /* Non-Secure Access Control Register */
791 1.73 matt /* cp15 c2 registers */
792 1.63 matt ARMREG_READ_INLINE(ttbr, "p15,0,%0,c2,c0,0") /* Translation Table Base Register 0 */
793 1.63 matt ARMREG_WRITE_INLINE(ttbr, "p15,0,%0,c2,c0,0") /* Translation Table Base Register 0 */
794 1.63 matt ARMREG_READ_INLINE(ttbr1, "p15,0,%0,c2,c0,1") /* Translation Table Base Register 1 */
795 1.63 matt ARMREG_WRITE_INLINE(ttbr1, "p15,0,%0,c2,c0,1") /* Translation Table Base Register 1 */
796 1.63 matt ARMREG_READ_INLINE(ttbcr, "p15,0,%0,c2,c0,2") /* Translation Table Base Register */
797 1.63 matt ARMREG_WRITE_INLINE(ttbcr, "p15,0,%0,c2,c0,2") /* Translation Table Base Register */
798 1.86 matt /* cp15 c3 registers */
799 1.86 matt ARMREG_READ_INLINE(dacr, "p15,0,%0,c3,c0,0") /* Domain Access Control Register */
800 1.86 matt ARMREG_WRITE_INLINE(dacr, "p15,0,%0,c3,c0,0") /* Domain Access Control Register */
801 1.73 matt /* cp15 c5 registers */
802 1.66 matt ARMREG_READ_INLINE(dfsr, "p15,0,%0,c5,c0,0") /* Data Fault Status Register */
803 1.66 matt ARMREG_READ_INLINE(ifsr, "p15,0,%0,c5,c0,1") /* Instruction Fault Status Register */
804 1.73 matt /* cp15 c6 registers */
805 1.66 matt ARMREG_READ_INLINE(dfar, "p15,0,%0,c6,c0,0") /* Data Fault Address Register */
806 1.66 matt ARMREG_READ_INLINE(ifar, "p15,0,%0,c6,c0,2") /* Instruction Fault Address Register */
807 1.73 matt /* cp15 c7 registers */
808 1.65 matt ARMREG_WRITE_INLINE(icialluis, "p15,0,%0,c7,c1,0") /* Instruction Inv All (IS) */
809 1.100 skrll ARMREG_WRITE_INLINE(bpiallis, "p15,0,%0,c7,c1,6") /* Branch Predictor Invalidate All (IS) */
810 1.65 matt ARMREG_READ_INLINE(par, "p15,0,%0,c7,c4,0") /* Physical Address Register */
811 1.65 matt ARMREG_WRITE_INLINE(iciallu, "p15,0,%0,c7,c5,0") /* Instruction Invalidate All */
812 1.65 matt ARMREG_WRITE_INLINE(icimvau, "p15,0,%0,c7,c5,1") /* Instruction Invalidate MVA */
813 1.65 matt ARMREG_WRITE_INLINE(isb, "p15,0,%0,c7,c5,4") /* Instruction Synchronization Barrier */
814 1.100 skrll ARMREG_WRITE_INLINE(bpiall, "p15,0,%0,c7,c5,6") /* Branch Predictor Invalidate All */
815 1.101 skrll ARMREG_WRITE_INLINE(bpimva, "p15,0,%0,c7,c5,7") /* Branch Predictor invalidate by MVA */
816 1.65 matt ARMREG_WRITE_INLINE(dcimvac, "p15,0,%0,c7,c6,1") /* Data Invalidate MVA to PoC */
817 1.65 matt ARMREG_WRITE_INLINE(dcisw, "p15,0,%0,c7,c6,2") /* Data Invalidate Set/Way */
818 1.65 matt ARMREG_WRITE_INLINE(ats1cpr, "p15,0,%0,c7,c8,0") /* AddrTrans CurState PL1 Read */
819 1.86 matt ARMREG_WRITE_INLINE(ats1cpw, "p15,0,%0,c7,c8,1") /* AddrTrans CurState PL1 Write */
820 1.86 matt ARMREG_WRITE_INLINE(ats1cur, "p15,0,%0,c7,c8,2") /* AddrTrans CurState PL0 Read */
821 1.86 matt ARMREG_WRITE_INLINE(ats1cuw, "p15,0,%0,c7,c8,3") /* AddrTrans CurState PL0 Write */
822 1.65 matt ARMREG_WRITE_INLINE(dccmvac, "p15,0,%0,c7,c10,1") /* Data Clean MVA to PoC */
823 1.65 matt ARMREG_WRITE_INLINE(dccsw, "p15,0,%0,c7,c10,2") /* Data Clean Set/Way */
824 1.65 matt ARMREG_WRITE_INLINE(dsb, "p15,0,%0,c7,c10,4") /* Data Synchronization Barrier */
825 1.65 matt ARMREG_WRITE_INLINE(dmb, "p15,0,%0,c7,c10,5") /* Data Memory Barrier */
826 1.101 skrll ARMREG_WRITE_INLINE(dccmvau, "p15,0,%0,c7,c11,1") /* Data Clean MVA to PoU */
827 1.65 matt ARMREG_WRITE_INLINE(dccimvac, "p15,0,%0,c7,c14,1") /* Data Clean&Inv MVA to PoC */
828 1.65 matt ARMREG_WRITE_INLINE(dccisw, "p15,0,%0,c7,c14,2") /* Data Clean&Inv Set/Way */
829 1.74 matt /* cp15 c8 registers */
830 1.74 matt ARMREG_WRITE_INLINE(tlbiallis, "p15,0,%0,c8,c3,0") /* Invalidate entire unified TLB, inner shareable */
831 1.74 matt ARMREG_WRITE_INLINE(tlbimvais, "p15,0,%0,c8,c3,1") /* Invalidate unified TLB by MVA, inner shareable */
832 1.74 matt ARMREG_WRITE_INLINE(tlbiasidis, "p15,0,%0,c8,c3,2") /* Invalidate unified TLB by ASID, inner shareable */
833 1.74 matt ARMREG_WRITE_INLINE(tlbimvaais, "p15,0,%0,c8,c3,3") /* Invalidate unified TLB by MVA, all ASID, inner shareable */
834 1.74 matt ARMREG_WRITE_INLINE(itlbiall, "p15,0,%0,c8,c5,0") /* Invalidate entire instruction TLB */
835 1.74 matt ARMREG_WRITE_INLINE(itlbimva, "p15,0,%0,c8,c5,1") /* Invalidate instruction TLB by MVA */
836 1.74 matt ARMREG_WRITE_INLINE(itlbiasid, "p15,0,%0,c8,c5,2") /* Invalidate instruction TLB by ASID */
837 1.74 matt ARMREG_WRITE_INLINE(dtlbiall, "p15,0,%0,c8,c6,0") /* Invalidate entire data TLB */
838 1.74 matt ARMREG_WRITE_INLINE(dtlbimva, "p15,0,%0,c8,c6,1") /* Invalidate data TLB by MVA */
839 1.74 matt ARMREG_WRITE_INLINE(dtlbiasid, "p15,0,%0,c8,c6,2") /* Invalidate data TLB by ASID */
840 1.74 matt ARMREG_WRITE_INLINE(tlbiall, "p15,0,%0,c8,c7,0") /* Invalidate entire unified TLB */
841 1.74 matt ARMREG_WRITE_INLINE(tlbimva, "p15,0,%0,c8,c7,1") /* Invalidate unified TLB by MVA */
842 1.74 matt ARMREG_WRITE_INLINE(tlbiasid, "p15,0,%0,c8,c7,2") /* Invalidate unified TLB by ASID */
843 1.74 matt ARMREG_WRITE_INLINE(tlbimvaa, "p15,0,%0,c8,c7,3") /* Invalidate unified TLB by MVA, all ASID */
844 1.73 matt /* cp15 c9 registers */
845 1.60 matt ARMREG_READ_INLINE(pmcr, "p15,0,%0,c9,c12,0") /* PMC Control Register */
846 1.60 matt ARMREG_WRITE_INLINE(pmcr, "p15,0,%0,c9,c12,0") /* PMC Control Register */
847 1.60 matt ARMREG_READ_INLINE(pmcntenset, "p15,0,%0,c9,c12,1") /* PMC Count Enable Set */
848 1.60 matt ARMREG_WRITE_INLINE(pmcntenset, "p15,0,%0,c9,c12,1") /* PMC Count Enable Set */
849 1.60 matt ARMREG_READ_INLINE(pmcntenclr, "p15,0,%0,c9,c12,2") /* PMC Count Enable Clear */
850 1.60 matt ARMREG_WRITE_INLINE(pmcntenclr, "p15,0,%0,c9,c12,2") /* PMC Count Enable Clear */
851 1.60 matt ARMREG_READ_INLINE(pmovsr, "p15,0,%0,c9,c12,3") /* PMC Overflow Flag Status */
852 1.60 matt ARMREG_WRITE_INLINE(pmovsr, "p15,0,%0,c9,c12,3") /* PMC Overflow Flag Status */
853 1.122 jmcneill ARMREG_READ_INLINE(pmselr, "p15,0,%0,c9,c12,5") /* PMC Event Counter Selection */
854 1.122 jmcneill ARMREG_WRITE_INLINE(pmselr, "p15,0,%0,c9,c12,5") /* PMC Event Counter Selection */
855 1.122 jmcneill ARMREG_READ_INLINE(pmceid0, "p15,0,%0,c9,c12,6") /* PMC Event ID 0 */
856 1.122 jmcneill ARMREG_READ_INLINE(pmceid1, "p15,0,%0,c9,c12,7") /* PMC Event ID 1 */
857 1.60 matt ARMREG_READ_INLINE(pmccntr, "p15,0,%0,c9,c13,0") /* PMC Cycle Counter */
858 1.60 matt ARMREG_WRITE_INLINE(pmccntr, "p15,0,%0,c9,c13,0") /* PMC Cycle Counter */
859 1.122 jmcneill ARMREG_READ_INLINE(pmxevtyper, "p15,0,%0,c9,c13,1") /* PMC Event Type Select */
860 1.122 jmcneill ARMREG_WRITE_INLINE(pmxevtyper, "p15,0,%0,c9,c13,1") /* PMC Event Type Select */
861 1.122 jmcneill ARMREG_READ_INLINE(pmxevcntr, "p15,0,%0,c9,c13,2") /* PMC Event Count */
862 1.122 jmcneill ARMREG_WRITE_INLINE(pmxevcntr, "p15,0,%0,c9,c13,2") /* PMC Event Count */
863 1.71 matt ARMREG_READ_INLINE(pmuserenr, "p15,0,%0,c9,c14,0") /* PMC User Enable */
864 1.71 matt ARMREG_WRITE_INLINE(pmuserenr, "p15,0,%0,c9,c14,0") /* PMC User Enable */
865 1.113 skrll ARMREG_READ_INLINE(pmintenset, "p15,0,%0,c9,c14,1") /* PMC Interrupt Enable Set */
866 1.113 skrll ARMREG_WRITE_INLINE(pmintenset, "p15,0,%0,c9,c14,1") /* PMC Interrupt Enable Set */
867 1.113 skrll ARMREG_READ_INLINE(pmintenclr, "p15,0,%0,c9,c14,2") /* PMC Interrupt Enable Clear */
868 1.113 skrll ARMREG_WRITE_INLINE(pmintenclr, "p15,0,%0,c9,c14,2") /* PMC Interrupt Enable Clear */
869 1.86 matt ARMREG_READ_INLINE(l2ctrl, "p15,1,%0,c9,c0,2") /* A7/A15 L2 Control Register */
870 1.87 matt /* cp10 c10 registers */
871 1.87 matt ARMREG_READ_INLINE(prrr, "p15,0,%0,c10,c2,0") /* Primary Region Remap Register */
872 1.87 matt ARMREG_WRITE_INLINE(prrr, "p15,0,%0,c10,c2,0") /* Primary Region Remap Register */
873 1.127 skrll ARMREG_READ_INLINE(nmrr, "p15,0,%0,c10,c2,1") /* Normal Memory Remap Register */
874 1.127 skrll ARMREG_WRITE_INLINE(nmrr, "p15,0,%0,c10,c2,1") /* Normal Memory Remap Register */
875 1.73 matt /* cp15 c13 registers */
876 1.71 matt ARMREG_READ_INLINE(contextidr, "p15,0,%0,c13,c0,1") /* Context ID Register */
877 1.71 matt ARMREG_WRITE_INLINE(contextidr, "p15,0,%0,c13,c0,1") /* Context ID Register */
878 1.96 matt ARMREG_READ_INLINE(tpidrurw, "p15,0,%0,c13,c0,2") /* User read-write Thread ID Register */
879 1.96 matt ARMREG_WRITE_INLINE(tpidrurw, "p15,0,%0,c13,c0,2") /* User read-write Thread ID Register */
880 1.96 matt ARMREG_READ_INLINE(tpidruro, "p15,0,%0,c13,c0,3") /* User read-only Thread ID Register */
881 1.96 matt ARMREG_WRITE_INLINE(tpidruro, "p15,0,%0,c13,c0,3") /* User read-only Thread ID Register */
882 1.60 matt ARMREG_READ_INLINE(tpidrprw, "p15,0,%0,c13,c0,4") /* PL1 only Thread ID Register */
883 1.60 matt ARMREG_WRITE_INLINE(tpidrprw, "p15,0,%0,c13,c0,4") /* PL1 only Thread ID Register */
884 1.77 matt /* cp14 c12 registers */
885 1.79 matt ARMREG_READ_INLINE(vbar, "p15,0,%0,c12,c0,0") /* Vector Base Address Register */
886 1.79 matt ARMREG_WRITE_INLINE(vbar, "p15,0,%0,c12,c0,0") /* Vector Base Address Register */
887 1.73 matt /* cp15 c14 registers */
888 1.73 matt /* cp15 Global Timer Registers */
889 1.80 matt ARMREG_READ_INLINE(cnt_frq, "p15,0,%0,c14,c0,0") /* Counter Frequency Register */
890 1.80 matt ARMREG_WRITE_INLINE(cnt_frq, "p15,0,%0,c14,c0,0") /* Counter Frequency Register */
891 1.80 matt ARMREG_READ_INLINE(cntk_ctl, "p15,0,%0,c14,c1,0") /* Timer PL1 Control Register */
892 1.80 matt ARMREG_WRITE_INLINE(cntk_ctl, "p15,0,%0,c14,c1,0") /* Timer PL1 Control Register */
893 1.71 matt ARMREG_READ_INLINE(cntp_tval, "p15,0,%0,c14,c2,0") /* PL1 Physical TimerValue Register */
894 1.71 matt ARMREG_WRITE_INLINE(cntp_tval, "p15,0,%0,c14,c2,0") /* PL1 Physical TimerValue Register */
895 1.71 matt ARMREG_READ_INLINE(cntp_ctl, "p15,0,%0,c14,c2,1") /* PL1 Physical Timer Control Register */
896 1.71 matt ARMREG_WRITE_INLINE(cntp_ctl, "p15,0,%0,c14,c2,1") /* PL1 Physical Timer Control Register */
897 1.71 matt ARMREG_READ_INLINE(cntv_tval, "p15,0,%0,c14,c3,0") /* Virtual TimerValue Register */
898 1.71 matt ARMREG_WRITE_INLINE(cntv_tval, "p15,0,%0,c14,c3,0") /* Virtual TimerValue Register */
899 1.71 matt ARMREG_READ_INLINE(cntv_ctl, "p15,0,%0,c14,c3,1") /* Virtual Timer Control Register */
900 1.71 matt ARMREG_WRITE_INLINE(cntv_ctl, "p15,0,%0,c14,c3,1") /* Virtual Timer Control Register */
901 1.80 matt ARMREG_READ64_INLINE(cntp_ct, "p15,0,%Q0,%R0,c14") /* Physical Count Register */
902 1.80 matt ARMREG_WRITE64_INLINE(cntp_ct, "p15,0,%Q0,%R0,c14") /* Physical Count Register */
903 1.80 matt ARMREG_READ64_INLINE(cntv_ct, "p15,1,%Q0,%R0,c14") /* Virtual Count Register */
904 1.80 matt ARMREG_WRITE64_INLINE(cntv_ct, "p15,1,%Q0,%R0,c14") /* Virtual Count Register */
905 1.71 matt ARMREG_READ64_INLINE(cntp_cval, "p15,2,%Q0,%R0,c14") /* PL1 Physical Timer CompareValue Register */
906 1.71 matt ARMREG_WRITE64_INLINE(cntp_cval, "p15,2,%Q0,%R0,c14") /* PL1 Physical Timer CompareValue Register */
907 1.71 matt ARMREG_READ64_INLINE(cntv_cval, "p15,3,%Q0,%R0,c14") /* PL1 Virtual Timer CompareValue Register */
908 1.71 matt ARMREG_WRITE64_INLINE(cntv_cval, "p15,3,%Q0,%R0,c14") /* PL1 Virtual Timer CompareValue Register */
909 1.113 skrll ARMREG_READ64_INLINE(cntvoff, "p15,4,%Q0,%R0,c14") /* Virtual Offset Register */
910 1.113 skrll ARMREG_WRITE64_INLINE(cntvoff, "p15,4,%Q0,%R0,c14") /* Virtual Offset Register */
911 1.73 matt /* cp15 c15 registers */
912 1.77 matt ARMREG_READ_INLINE(cbar, "p15,4,%0,c15,c0,0") /* Configuration Base Address Register */
913 1.60 matt ARMREG_READ_INLINE(pmcrv6, "p15,0,%0,c15,c12,0") /* PMC Control Register (armv6) */
914 1.60 matt ARMREG_WRITE_INLINE(pmcrv6, "p15,0,%0,c15,c12,0") /* PMC Control Register (armv6) */
915 1.60 matt ARMREG_READ_INLINE(pmccntrv6, "p15,0,%0,c15,c12,1") /* PMC Cycle Counter (armv6) */
916 1.60 matt ARMREG_WRITE_INLINE(pmccntrv6, "p15,0,%0,c15,c12,1") /* PMC Cycle Counter (armv6) */
917 1.60 matt
918 1.88 matt ARMREG_READ_INLINE(tlbdata0, "p15,3,%0,c15,c0,0") /* TLB Data Register 0 (cortex) */
919 1.88 matt ARMREG_READ_INLINE(tlbdata1, "p15,3,%0,c15,c0,1") /* TLB Data Register 1 (cortex) */
920 1.88 matt ARMREG_READ_INLINE(tlbdata2, "p15,3,%0,c15,c0,2") /* TLB Data Register 2 (cortex) */
921 1.88 matt ARMREG_WRITE_INLINE(tlbdataop, "p15,3,%0,c15,c4,2") /* TLB Data Read Operation (cortex) */
922 1.88 matt
923 1.97 matt ARMREG_READ_INLINE(sheeva_xctrl, "p15,1,%0,c15,c1,0") /* Sheeva eXtra Control register */
924 1.97 matt ARMREG_WRITE_INLINE(sheeva_xctrl, "p15,1,%0,c15,c1,0") /* Sheeva eXtra Control register */
925 1.97 matt
926 1.123 skrll #if defined(_KERNEL)
927 1.123 skrll
928 1.123 skrll static inline uint64_t
929 1.123 skrll cpu_mpidr_aff_read(void)
930 1.123 skrll {
931 1.123 skrll
932 1.123 skrll return armreg_mpidr_read() & (MPIDR_AFF2|MPIDR_AFF1|MPIDR_AFF0);
933 1.123 skrll }
934 1.123 skrll
935 1.120 ryo /*
936 1.120 ryo * GENERIC TIMER register access
937 1.120 ryo */
938 1.120 ryo static inline uint32_t
939 1.120 ryo gtmr_cntfrq_read(void)
940 1.120 ryo {
941 1.120 ryo
942 1.120 ryo return armreg_cnt_frq_read();
943 1.120 ryo }
944 1.120 ryo
945 1.120 ryo static inline uint32_t
946 1.120 ryo gtmr_cntk_ctl_read(void)
947 1.120 ryo {
948 1.120 ryo
949 1.120 ryo return armreg_cntk_ctl_read();
950 1.120 ryo }
951 1.120 ryo
952 1.120 ryo static inline void
953 1.120 ryo gtmr_cntk_ctl_write(uint32_t val)
954 1.120 ryo {
955 1.120 ryo
956 1.120 ryo armreg_cntk_ctl_write(val);
957 1.120 ryo }
958 1.120 ryo
959 1.120 ryo static inline uint64_t
960 1.120 ryo gtmr_cntpct_read(void)
961 1.120 ryo {
962 1.120 ryo
963 1.120 ryo return armreg_cntp_ct_read();
964 1.120 ryo }
965 1.120 ryo
966 1.120 ryo /*
967 1.120 ryo * Counter-timer Virtual Count timer
968 1.120 ryo */
969 1.120 ryo static inline uint64_t
970 1.120 ryo gtmr_cntvct_read(void)
971 1.120 ryo {
972 1.120 ryo
973 1.120 ryo return armreg_cntv_ct_read();
974 1.120 ryo }
975 1.120 ryo
976 1.120 ryo /*
977 1.120 ryo * Counter-timer Virtual Timer Control register
978 1.120 ryo */
979 1.120 ryo static inline uint32_t
980 1.120 ryo gtmr_cntv_ctl_read(void)
981 1.120 ryo {
982 1.120 ryo
983 1.120 ryo return armreg_cntv_ctl_read();
984 1.120 ryo }
985 1.120 ryo
986 1.120 ryo static inline void
987 1.120 ryo gtmr_cntv_ctl_write(uint32_t val)
988 1.120 ryo {
989 1.120 ryo
990 1.120 ryo armreg_cntv_ctl_write(val);
991 1.120 ryo }
992 1.120 ryo
993 1.128 jmcneill
994 1.128 jmcneill /*
995 1.128 jmcneill * Counter-timer Physical Timer Control register
996 1.128 jmcneill */
997 1.128 jmcneill
998 1.128 jmcneill static inline uint32_t
999 1.128 jmcneill gtmr_cntp_ctl_read(void)
1000 1.128 jmcneill {
1001 1.128 jmcneill
1002 1.128 jmcneill return armreg_cntp_ctl_read();
1003 1.128 jmcneill }
1004 1.128 jmcneill
1005 1.120 ryo static inline void
1006 1.120 ryo gtmr_cntp_ctl_write(uint32_t val)
1007 1.120 ryo {
1008 1.120 ryo
1009 1.120 ryo armreg_cntp_ctl_write(val);
1010 1.120 ryo }
1011 1.120 ryo
1012 1.120 ryo
1013 1.120 ryo /*
1014 1.128 jmcneill * Counter-timer Physical Timer TimerValue register
1015 1.128 jmcneill */
1016 1.128 jmcneill static inline uint32_t
1017 1.128 jmcneill gtmr_cntp_tval_read(void)
1018 1.128 jmcneill {
1019 1.128 jmcneill
1020 1.128 jmcneill return armreg_cntp_tval_read();
1021 1.128 jmcneill }
1022 1.128 jmcneill
1023 1.128 jmcneill static inline void
1024 1.128 jmcneill gtmr_cntp_tval_write(uint32_t val)
1025 1.128 jmcneill {
1026 1.128 jmcneill
1027 1.128 jmcneill armreg_cntp_tval_write(val);
1028 1.128 jmcneill }
1029 1.128 jmcneill
1030 1.128 jmcneill
1031 1.128 jmcneill /*
1032 1.120 ryo * Counter-timer Virtual Timer TimerValue register
1033 1.120 ryo */
1034 1.121 joerg static inline uint32_t
1035 1.121 joerg gtmr_cntv_tval_read(void)
1036 1.121 joerg {
1037 1.121 joerg
1038 1.121 joerg return armreg_cntv_tval_read();
1039 1.121 joerg }
1040 1.121 joerg
1041 1.120 ryo static inline void
1042 1.120 ryo gtmr_cntv_tval_write(uint32_t val)
1043 1.120 ryo {
1044 1.120 ryo
1045 1.120 ryo armreg_cntv_tval_write(val);
1046 1.120 ryo }
1047 1.120 ryo
1048 1.120 ryo
1049 1.120 ryo /*
1050 1.128 jmcneill * Counter-timer Physical Timer CompareValue register
1051 1.128 jmcneill */
1052 1.128 jmcneill static inline uint64_t
1053 1.128 jmcneill gtmr_cntp_cval_read(void)
1054 1.128 jmcneill {
1055 1.128 jmcneill
1056 1.128 jmcneill return armreg_cntp_cval_read();
1057 1.128 jmcneill }
1058 1.128 jmcneill
1059 1.128 jmcneill static inline void
1060 1.128 jmcneill gtmr_cntp_cval_write(uint64_t val)
1061 1.128 jmcneill {
1062 1.128 jmcneill
1063 1.128 jmcneill armreg_cntp_cval_write(val);
1064 1.128 jmcneill }
1065 1.128 jmcneill
1066 1.128 jmcneill
1067 1.128 jmcneill /*
1068 1.120 ryo * Counter-timer Virtual Timer CompareValue register
1069 1.120 ryo */
1070 1.120 ryo static inline uint64_t
1071 1.120 ryo gtmr_cntv_cval_read(void)
1072 1.120 ryo {
1073 1.120 ryo
1074 1.120 ryo return armreg_cntv_cval_read();
1075 1.120 ryo }
1076 1.120 ryo
1077 1.125 jmcneill static inline void
1078 1.125 jmcneill gtmr_cntv_cval_write(uint64_t val)
1079 1.125 jmcneill {
1080 1.125 jmcneill
1081 1.125 jmcneill armreg_cntv_cval_write(val);
1082 1.125 jmcneill }
1083 1.125 jmcneill
1084 1.123 skrll #endif /* _KERNEL */
1085 1.123 skrll #endif /* !__ASSEMBLER && !_RUMPKERNEL */
1086 1.61 matt
1087 1.120 ryo #elif defined(__aarch64__)
1088 1.120 ryo
1089 1.120 ryo #include <aarch64/armreg.h>
1090 1.120 ryo
1091 1.120 ryo #endif /* __arm__/__aarch64__ */
1092 1.120 ryo
1093 1.38 matt #endif /* _ARM_ARMREG_H */
1094