ctlreg.h revision 1.46 1 1.46 mrg /* $NetBSD: ctlreg.h,v 1.46 2009/11/28 21:07:02 mrg Exp $ */
2 1.1 eeh
3 1.1 eeh /*
4 1.30 eeh * Copyright (c) 1996-2002 Eduardo Horvath
5 1.1 eeh *
6 1.1 eeh * Redistribution and use in source and binary forms, with or without
7 1.1 eeh * modification, are permitted provided that the following conditions
8 1.1 eeh * are met:
9 1.1 eeh * 1. Redistributions of source code must retain the above copyright
10 1.1 eeh * notice, this list of conditions and the following disclaimer.
11 1.11 eeh *
12 1.11 eeh * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND
13 1.1 eeh * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
14 1.1 eeh * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
15 1.11 eeh * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE
16 1.1 eeh * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
17 1.1 eeh * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
18 1.1 eeh * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
19 1.1 eeh * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
20 1.1 eeh * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
21 1.1 eeh * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
22 1.1 eeh * SUCH DAMAGE.
23 1.1 eeh *
24 1.1 eeh */
25 1.1 eeh
26 1.33 petrov #ifndef _SPARC_CTLREG_H_
27 1.33 petrov #define _SPARC_CTLREG_H_
28 1.33 petrov
29 1.1 eeh /*
30 1.1 eeh * Sun 4u control registers. (includes address space definitions
31 1.1 eeh * and some registers in control space).
32 1.1 eeh */
33 1.1 eeh
34 1.1 eeh /*
35 1.1 eeh * The Alternate address spaces.
36 1.1 eeh *
37 1.1 eeh * 0x00-0x7f are privileged
38 1.1 eeh * 0x80-0xff can be used by users
39 1.1 eeh */
40 1.1 eeh
41 1.26 eeh #define ASI_LITTLE 0x08 /* This bit should make an ASI little endian */
42 1.1 eeh
43 1.26 eeh #define ASI_NUCLEUS 0x04 /* [4u] kernel address space */
44 1.26 eeh #define ASI_NUCLEUS_LITTLE 0x0c /* [4u] kernel address space, little endian */
45 1.1 eeh
46 1.26 eeh #define ASI_AS_IF_USER_PRIMARY 0x10 /* [4u] primary user address space */
47 1.26 eeh #define ASI_AS_IF_USER_SECONDARY 0x11 /* [4u] secondary user address space */
48 1.1 eeh
49 1.26 eeh #define ASI_PHYS_CACHED 0x14 /* [4u] MMU bypass to main memory */
50 1.26 eeh #define ASI_PHYS_NON_CACHED 0x15 /* [4u] MMU bypass to I/O location */
51 1.26 eeh
52 1.26 eeh #define ASI_AS_IF_USER_PRIMARY_LITTLE 0x18 /* [4u] primary user address space, little endian */
53 1.35 heas #define ASI_AS_IF_USER_SECONDARY_LITTLE 0x19 /* [4u] secondary user address space, little endian */
54 1.26 eeh
55 1.26 eeh #define ASI_PHYS_CACHED_LITTLE 0x1c /* [4u] MMU bypass to main memory, little endian */
56 1.26 eeh #define ASI_PHYS_NON_CACHED_LITTLE 0x1d /* [4u] MMU bypass to I/O location, little endian */
57 1.26 eeh
58 1.26 eeh #define ASI_NUCLEUS_QUAD_LDD 0x24 /* [4u] use w/LDDA to load 128-bit item */
59 1.26 eeh #define ASI_NUCLEUS_QUAD_LDD_LITTLE 0x2c /* [4u] use w/LDDA to load 128-bit item, little endian */
60 1.26 eeh
61 1.26 eeh #define ASI_FLUSH_D_PAGE_PRIMARY 0x38 /* [4u] flush D-cache page using primary context */
62 1.26 eeh #define ASI_FLUSH_D_PAGE_SECONDARY 0x39 /* [4u] flush D-cache page using secondary context */
63 1.26 eeh #define ASI_FLUSH_D_CTX_PRIMARY 0x3a /* [4u] flush D-cache context using primary context */
64 1.26 eeh #define ASI_FLUSH_D_CTX_SECONDARY 0x3b /* [4u] flush D-cache context using secondary context */
65 1.26 eeh
66 1.46 mrg #define ASI_DCACHE_INVALIDATE 0x42 /* [III] invalidate D-cache */
67 1.46 mrg #define ASI_DCACHE_UTAG 0x43 /* [III] diagnostic access to D-cache micro tag */
68 1.46 mrg #define ASI_DCACHE_SNOOP_TAG 0x44 /* [III] diagnostic access to D-cache snoop tag RAM */
69 1.46 mrg
70 1.26 eeh #define ASI_LSU_CONTROL_REGISTER 0x45 /* [4u] load/store unit control register */
71 1.26 eeh
72 1.26 eeh #define ASI_DCACHE_DATA 0x46 /* [4u] diagnostic access to D-cache data RAM */
73 1.26 eeh #define ASI_DCACHE_TAG 0x47 /* [4u] diagnostic access to D-cache tag RAM */
74 1.26 eeh
75 1.26 eeh #define ASI_INTR_DISPATCH_STATUS 0x48 /* [4u] interrupt dispatch status register */
76 1.26 eeh #define ASI_INTR_RECEIVE 0x49 /* [4u] interrupt receive status register */
77 1.26 eeh #define ASI_MID_REG 0x4a /* [4u] hardware config and MID */
78 1.26 eeh #define ASI_ERROR_EN_REG 0x4b /* [4u] asynchronous error enables */
79 1.26 eeh #define ASI_AFSR 0x4c /* [4u] asynchronous fault status register */
80 1.26 eeh #define ASI_AFAR 0x4d /* [4u] asynchronous fault address register */
81 1.26 eeh
82 1.26 eeh #define ASI_ICACHE_DATA 0x66 /* [4u] diagnostic access to D-cache data RAM */
83 1.26 eeh #define ASI_ICACHE_TAG 0x67 /* [4u] diagnostic access to D-cache tag RAM */
84 1.26 eeh #define ASI_FLUSH_I_PAGE_PRIMARY 0x68 /* [4u] flush D-cache page using primary context */
85 1.26 eeh #define ASI_FLUSH_I_PAGE_SECONDARY 0x69 /* [4u] flush D-cache page using secondary context */
86 1.26 eeh #define ASI_FLUSH_I_CTX_PRIMARY 0x6a /* [4u] flush D-cache context using primary context */
87 1.26 eeh #define ASI_FLUSH_I_CTX_SECONDARY 0x6b /* [4u] flush D-cache context using secondary context */
88 1.26 eeh
89 1.26 eeh #define ASI_BLOCK_AS_IF_USER_PRIMARY 0x70 /* [4u] primary user address space, block loads/stores */
90 1.26 eeh #define ASI_BLOCK_AS_IF_USER_SECONDARY 0x71 /* [4u] secondary user address space, block loads/stores */
91 1.26 eeh
92 1.26 eeh #define ASI_ECACHE_DIAG 0x76 /* [4u] diag access to E-cache tag and data */
93 1.26 eeh #define ASI_DATAPATH_ERR_REG_WRITE 0x77 /* [4u] ASI is reused */
94 1.26 eeh
95 1.26 eeh #define ASI_BLOCK_AS_IF_USER_PRIMARY_LITTLE 0x78 /* [4u] primary user address space, block loads/stores */
96 1.26 eeh #define ASI_BLOCK_AS_IF_USER_SECONDARY_LITTLE 0x79 /* [4u] secondary user address space, block loads/stores */
97 1.26 eeh
98 1.26 eeh #define ASI_INTERRUPT_RECEIVE_DATA 0x7f /* [4u] interrupt receive data registers {0,1,2} */
99 1.26 eeh #define ASI_DATAPATH_ERR_REG_READ 0x7f /* [4u] read access to datapath error registers (ASI reused) */
100 1.26 eeh
101 1.26 eeh #define ASI_PRIMARY 0x80 /* [4u] primary address space */
102 1.26 eeh #define ASI_SECONDARY 0x81 /* [4u] secondary address space */
103 1.28 eeh #define ASI_PRIMARY_NOFAULT 0x82 /* [4u] primary address space, no fault */
104 1.28 eeh #define ASI_SECONDARY_NOFAULT 0x83 /* [4u] secondary address space, no fault */
105 1.26 eeh
106 1.26 eeh #define ASI_PRIMARY_LITTLE 0x88 /* [4u] primary address space, little endian */
107 1.26 eeh #define ASI_SECONDARY_LITTLE 0x89 /* [4u] secondary address space, little endian */
108 1.28 eeh #define ASI_PRIMARY_NOFAULT_LITTLE 0x8a /* [4u] primary address space, no fault, little endian */
109 1.28 eeh #define ASI_SECONDARY_NOFAULT_LITTLE 0x8b /* [4u] secondary address space, no fault, little endian */
110 1.26 eeh
111 1.26 eeh #define ASI_PST8_PRIMARY 0xc0 /* [VIS] Eight 8-bit partial store, primary */
112 1.26 eeh #define ASI_PST8_SECONDARY 0xc1 /* [VIS] Eight 8-bit partial store, secondary */
113 1.26 eeh #define ASI_PST16_PRIMARY 0xc2 /* [VIS] Four 16-bit partial store, primary */
114 1.26 eeh #define ASI_PST16_SECONDARY 0xc3 /* [VIS] Fout 16-bit partial store, secondary */
115 1.26 eeh #define ASI_PST32_PRIMARY 0xc4 /* [VIS] Two 32-bit partial store, primary */
116 1.26 eeh #define ASI_PST32_SECONDARY 0xc5 /* [VIS] Two 32-bit partial store, secondary */
117 1.26 eeh
118 1.26 eeh #define ASI_PST8_PRIMARY_LITTLE 0xc8 /* [VIS] Eight 8-bit partial store, primary, little endian */
119 1.26 eeh #define ASI_PST8_SECONDARY_LITTLE 0xc9 /* [VIS] Eight 8-bit partial store, secondary, little endian */
120 1.26 eeh #define ASI_PST16_PRIMARY_LITTLE 0xca /* [VIS] Four 16-bit partial store, primary, little endian */
121 1.26 eeh #define ASI_PST16_SECONDARY_LITTLE 0xcb /* [VIS] Fout 16-bit partial store, secondary, little endian */
122 1.26 eeh #define ASI_PST32_PRIMARY_LITTLE 0xcc /* [VIS] Two 32-bit partial store, primary, little endian */
123 1.26 eeh #define ASI_PST32_SECONDARY_LITTLE 0xcd /* [VIS] Two 32-bit partial store, secondary, little endian */
124 1.26 eeh
125 1.26 eeh #define ASI_FL8_PRIMARY 0xd0 /* [VIS] One 8-bit load/store floating, primary */
126 1.26 eeh #define ASI_FL8_SECONDARY 0xd1 /* [VIS] One 8-bit load/store floating, secondary */
127 1.26 eeh #define ASI_FL16_PRIMARY 0xd2 /* [VIS] One 16-bit load/store floating, primary */
128 1.26 eeh #define ASI_FL16_SECONDARY 0xd3 /* [VIS] One 16-bit load/store floating, secondary */
129 1.26 eeh
130 1.26 eeh #define ASI_FL8_PRIMARY_LITTLE 0xd8 /* [VIS] One 8-bit load/store floating, primary, little endian */
131 1.26 eeh #define ASI_FL8_SECONDARY_LITTLE 0xd9 /* [VIS] One 8-bit load/store floating, secondary, little endian */
132 1.26 eeh #define ASI_FL16_PRIMARY_LITTLE 0xda /* [VIS] One 16-bit load/store floating, primary, little endian */
133 1.26 eeh #define ASI_FL16_SECONDARY_LITTLE 0xdb /* [VIS] One 16-bit load/store floating, secondary, little endian */
134 1.26 eeh
135 1.26 eeh #define ASI_BLOCK_COMMIT_PRIMARY 0xe0 /* [4u] block store with commit, primary */
136 1.26 eeh #define ASI_BLOCK_COMMIT_SECONDARY 0xe1 /* [4u] block store with commit, secondary */
137 1.26 eeh #define ASI_BLOCK_PRIMARY 0xf0 /* [4u] block load/store, primary */
138 1.26 eeh #define ASI_BLOCK_SECONDARY 0xf1 /* [4u] block load/store, secondary */
139 1.26 eeh #define ASI_BLOCK_PRIMARY_LITTLE 0xf8 /* [4u] block load/store, primary, little endian */
140 1.26 eeh #define ASI_BLOCK_SECONDARY_LITTLE 0xf9 /* [4u] block load/store, secondary, little endian */
141 1.1 eeh
142 1.1 eeh
143 1.1 eeh /*
144 1.1 eeh * These are the shorter names used by Solaris
145 1.1 eeh */
146 1.1 eeh
147 1.26 eeh #define ASI_N ASI_NUCLEUS
148 1.26 eeh #define ASI_NL ASI_NUCLEUS_LITTLE
149 1.26 eeh #define ASI_AIUP ASI_AS_IF_USER_PRIMARY
150 1.26 eeh #define ASI_AIUS ASI_AS_IF_USER_SECONDARY
151 1.26 eeh #define ASI_AIUPL ASI_AS_IF_USER_PRIMARY_LITTLE
152 1.26 eeh #define ASI_AIUSL ASI_AS_IF_USER_SECONDARY_LITTLE
153 1.26 eeh #define ASI_P ASI_PRIMARY
154 1.26 eeh #define ASI_S ASI_SECONDARY
155 1.28 eeh #define ASI_PNF ASI_PRIMARY_NOFAULT
156 1.28 eeh #define ASI_SNF ASI_SECONDARY_NOFAULT
157 1.26 eeh #define ASI_PL ASI_PRIMARY_LITTLE
158 1.26 eeh #define ASI_SL ASI_SECONDARY_LITTLE
159 1.28 eeh #define ASI_PNFL ASI_PRIMARY_NOFAULT_LITTLE
160 1.28 eeh #define ASI_SNFL ASI_SECONDARY_NOFAULT_LITTLE
161 1.26 eeh #define ASI_FL8_P ASI_FL8_PRIMARY
162 1.26 eeh #define ASI_FL8_S ASI_FL8_SECONDARY
163 1.26 eeh #define ASI_FL16_P ASI_FL16_PRIMARY
164 1.26 eeh #define ASI_FL16_S ASI_FL16_SECONDARY
165 1.26 eeh #define ASI_FL8_PL ASI_FL8_PRIMARY_LITTLE
166 1.26 eeh #define ASI_FL8_SL ASI_FL8_SECONDARY_LITTLE
167 1.26 eeh #define ASI_FL16_PL ASI_FL16_PRIMARY_LITTLE
168 1.26 eeh #define ASI_FL16_SL ASI_FL16_SECONDARY_LITTLE
169 1.26 eeh #define ASI_BLK_AIUP ASI_BLOCK_AS_IF_USER_PRIMARY
170 1.26 eeh #define ASI_BLK_AIUPL ASI_BLOCK_AS_IF_USER_PRIMARY_LITTLE
171 1.26 eeh #define ASI_BLK_AIUS ASI_BLOCK_AS_IF_USER_SECONDARY
172 1.26 eeh #define ASI_BLK_AIUSL ASI_BLOCK_AS_IF_USER_SECONDARY_LITTLE
173 1.26 eeh #define ASI_BLK_COMMIT_P ASI_BLOCK_COMMIT_PRIMARY
174 1.26 eeh #define ASI_BLK_COMMIT_PRIMARY ASI_BLOCK_COMMIT_PRIMARY
175 1.26 eeh #define ASI_BLK_COMMIT_S ASI_BLOCK_COMMIT_SECONDARY
176 1.26 eeh #define ASI_BLK_COMMIT_SECONDARY ASI_BLOCK_COMMIT_SECONDARY
177 1.26 eeh #define ASI_BLK_P ASI_BLOCK_PRIMARY
178 1.26 eeh #define ASI_BLK_PL ASI_BLOCK_PRIMARY_LITTLE
179 1.26 eeh #define ASI_BLK_S ASI_BLOCK_SECONDARY
180 1.26 eeh #define ASI_BLK_SL ASI_BLOCK_SECONDARY_LITTLE
181 1.1 eeh
182 1.28 eeh /* Alternative spellings */
183 1.28 eeh #define ASI_PRIMARY_NO_FAULT ASI_PRIMARY_NOFAULT
184 1.28 eeh #define ASI_PRIMARY_NO_FAULT_LITTLE ASI_PRIMARY_NOFAULT_LITTLE
185 1.28 eeh #define ASI_SECONDARY_NO_FAULT ASI_SECONDARY_NOFAULT
186 1.28 eeh #define ASI_SECONDARY_NO_FAULT_LITTLE ASI_SECONDARY_NOFAULT_LITTLE
187 1.28 eeh
188 1.29 eeh #define PHYS_ASI(x) (((x) | 0x09) == 0x1d)
189 1.26 eeh #define LITTLE_ASI(x) ((x) & ASI_LITTLE)
190 1.14 eeh
191 1.1 eeh /*
192 1.1 eeh * The following are 4u control registers
193 1.1 eeh */
194 1.18 eeh
195 1.18 eeh /* Get the CPU's UPAID */
196 1.36 petrov #define UPA_CR_MID_SHIFT (17)
197 1.36 petrov #define UPA_CR_MID_SIZE (5)
198 1.36 petrov #define UPA_CR_MID_MASK \
199 1.36 petrov (((1 << UPA_CR_MID_SIZE) - 1) << UPA_CR_MID_SHIFT)
200 1.36 petrov
201 1.36 petrov #define UPA_CR_MID(x) (((x)>>UPA_CR_MID_SHIFT)&((1 << UPA_CR_MID_SIZE) - 1))
202 1.36 petrov
203 1.36 petrov #ifdef _LOCORE
204 1.36 petrov
205 1.36 petrov #define UPA_GET_MID(r1) \
206 1.36 petrov ldxa [%g0] ASI_MID_REG, r1 ; \
207 1.36 petrov srlx r1, UPA_CR_MID_SHIFT, r1 ; \
208 1.36 petrov and r1, (1 << UPA_CR_MID_SIZE) - 1, r1
209 1.36 petrov
210 1.36 petrov #else
211 1.18 eeh #define CPU_UPAID UPA_CR_MID(ldxa(0, ASI_MID_REG))
212 1.36 petrov #endif
213 1.1 eeh
214 1.1 eeh /*
215 1.1 eeh * [4u] MMU and Cache Control Register (MCCR)
216 1.1 eeh * use ASI = 0x45
217 1.1 eeh */
218 1.26 eeh #define ASI_MCCR ASI_LSU_CONTROL_REGISTER
219 1.26 eeh #define MCCR 0x00
220 1.1 eeh
221 1.1 eeh /* MCCR Bits and their meanings */
222 1.26 eeh #define MCCR_DMMU_EN 0x08
223 1.26 eeh #define MCCR_IMMU_EN 0x04
224 1.26 eeh #define MCCR_DCACHE_EN 0x02
225 1.26 eeh #define MCCR_ICACHE_EN 0x01
226 1.1 eeh
227 1.1 eeh
228 1.1 eeh /*
229 1.1 eeh * MMU control registers
230 1.1 eeh */
231 1.1 eeh
232 1.1 eeh /* Choose an MMU */
233 1.26 eeh #define ASI_DMMU 0x58
234 1.26 eeh #define ASI_IMMU 0x50
235 1.1 eeh
236 1.1 eeh /* Other assorted MMU ASIs */
237 1.26 eeh #define ASI_IMMU_8KPTR 0x51
238 1.26 eeh #define ASI_IMMU_64KPTR 0x52
239 1.26 eeh #define ASI_IMMU_DATA_IN 0x54
240 1.26 eeh #define ASI_IMMU_TLB_DATA 0x55
241 1.26 eeh #define ASI_IMMU_TLB_TAG 0x56
242 1.26 eeh #define ASI_DMMU_8KPTR 0x59
243 1.26 eeh #define ASI_DMMU_64KPTR 0x5a
244 1.26 eeh #define ASI_DMMU_DATA_IN 0x5c
245 1.26 eeh #define ASI_DMMU_TLB_DATA 0x5d
246 1.26 eeh #define ASI_DMMU_TLB_TAG 0x5e
247 1.1 eeh
248 1.1 eeh /*
249 1.1 eeh * The following are the control registers
250 1.1 eeh * They work on both MMUs unless noted.
251 1.46 mrg * III = cheetah only
252 1.1 eeh *
253 1.1 eeh * Register contents are defined later on individual registers.
254 1.1 eeh */
255 1.26 eeh #define TSB_TAG_TARGET 0x0
256 1.26 eeh #define TLB_DATA_IN 0x0
257 1.26 eeh #define CTX_PRIMARY 0x08 /* primary context -- DMMU only */
258 1.26 eeh #define CTX_SECONDARY 0x10 /* secondary context -- DMMU only */
259 1.26 eeh #define SFSR 0x18
260 1.26 eeh #define SFAR 0x20 /* fault address -- DMMU only */
261 1.26 eeh #define TSB 0x28
262 1.26 eeh #define TLB_TAG_ACCESS 0x30
263 1.26 eeh #define VIRTUAL_WATCHPOINT 0x38
264 1.26 eeh #define PHYSICAL_WATCHPOINT 0x40
265 1.46 mrg #define TSB_PEXT 0x48 /* III primary ext */
266 1.46 mrg #define TSB_SEXT 0x50 /* III 2ndary ext -- DMMU only */
267 1.46 mrg #define TSB_NEXT 0x58 /* III nucleus ext */
268 1.1 eeh
269 1.1 eeh /* Tag Target bits */
270 1.26 eeh #define TAG_TARGET_VA_MASK 0x03ffffffffffffffffLL
271 1.26 eeh #define TAG_TARGET_VA(x) (((x)<<22)&TAG_TARGET_VA_MASK)
272 1.26 eeh #define TAG_TARGET_CONTEXT(x) ((x)>>48)
273 1.26 eeh #define TAG_TARGET(c,v) ((((uint64_t)c)<<48)|(((uint64_t)v)&TAG_TARGET_VA_MASK))
274 1.1 eeh
275 1.1 eeh /* SFSR bits for both D_SFSR and I_SFSR */
276 1.26 eeh #define SFSR_ASI(x) ((x)>>16)
277 1.26 eeh #define SFSR_FT_VA_OOR_2 0x02000 /* IMMU: jumpl or return to unsupportd VA */
278 1.26 eeh #define SFSR_FT_VA_OOR_1 0x01000 /* fault at unsupported VA */
279 1.26 eeh #define SFSR_FT_NFO 0x00800 /* DMMU: Access to page marked NFO */
280 1.26 eeh #define SFSR_ILL_ASI 0x00400 /* DMMU: Illegal (unsupported) ASI */
281 1.26 eeh #define SFSR_FT_IO_ATOMIC 0x00200 /* DMMU: Atomic access to noncacheable page */
282 1.26 eeh #define SFSR_FT_ILL_NF 0x00100 /* DMMU: NF load or flush to page marked E (has side effects) */
283 1.26 eeh #define SFSR_FT_PRIV 0x00080 /* Privilege violation */
284 1.26 eeh #define SFSR_FT_E 0x00040 /* DMUU: value of E bit associated address */
285 1.26 eeh #define SFSR_CTXT(x) (((x)>>4)&0x3)
286 1.26 eeh #define SFSR_CTXT_IS_PRIM(x) (SFSR_CTXT(x)==0x00)
287 1.26 eeh #define SFSR_CTXT_IS_SECOND(x) (SFSR_CTXT(x)==0x01)
288 1.26 eeh #define SFSR_CTXT_IS_NUCLEUS(x) (SFSR_CTXT(x)==0x02)
289 1.26 eeh #define SFSR_PRIV 0x00008 /* value of PSTATE.PRIV for faulting access */
290 1.26 eeh #define SFSR_W 0x00004 /* DMMU: attempted write */
291 1.26 eeh #define SFSR_OW 0x00002 /* Overwrite; prev vault was still valid */
292 1.26 eeh #define SFSR_FV 0x00001 /* Fault is valid */
293 1.33 petrov #define SFSR_FT (SFSR_FT_VA_OOR_2|SFSR_FT_VA_OOR_1|SFSR_FT_NFO| \
294 1.33 petrov SFSR_ILL_ASI|SFSR_FT_IO_ATOMIC|SFSR_FT_ILL_NF|SFSR_FT_PRIV)
295 1.1 eeh
296 1.26 eeh #define SFSR_BITS "\177\20" \
297 1.33 petrov "f\20\30ASI\0" "b\16VAT\0" "b\15VAD\0" "b\14NFO\0" "b\13ASI\0" "b\12A\0" \
298 1.33 petrov "b\11NF\0" "b\10PRIV\0" "b\7E\0" "b\6NUCLEUS\0" "b\5SECONDCTX\0" "b\4PRIV\0" \
299 1.33 petrov "b\3W\0" "b\2OW\0" "b\1FV\0"
300 1.3 eeh
301 1.3 eeh /* ASFR bits */
302 1.26 eeh #define ASFR_ME 0x100000000LL
303 1.26 eeh #define ASFR_PRIV 0x080000000LL
304 1.26 eeh #define ASFR_ISAP 0x040000000LL
305 1.26 eeh #define ASFR_ETP 0x020000000LL
306 1.26 eeh #define ASFR_IVUE 0x010000000LL
307 1.26 eeh #define ASFR_TO 0x008000000LL
308 1.26 eeh #define ASFR_BERR 0x004000000LL
309 1.26 eeh #define ASFR_LDP 0x002000000LL
310 1.26 eeh #define ASFR_CP 0x001000000LL
311 1.26 eeh #define ASFR_WP 0x000800000LL
312 1.26 eeh #define ASFR_EDP 0x000400000LL
313 1.26 eeh #define ASFR_UE 0x000200000LL
314 1.26 eeh #define ASFR_CE 0x000100000LL
315 1.26 eeh #define ASFR_ETS 0x0000f0000LL
316 1.26 eeh #define ASFT_P_SYND 0x00000ffffLL
317 1.3 eeh
318 1.26 eeh #define AFSR_BITS "\177\20" \
319 1.3 eeh "b\40ME\0" "b\37PRIV\0" "b\36ISAP\0" "b\35ETP\0" \
320 1.3 eeh "b\34IVUE\0" "b\33TO\0" "b\32BERR\0" "b\31LDP\0" \
321 1.3 eeh "b\30CP\0" "b\27WP\0" "b\26EDP\0" "b\25UE\0" \
322 1.3 eeh "b\24CE\0" "f\20\4ETS\0" "f\0\20P_SYND\0"
323 1.3 eeh
324 1.1 eeh /*
325 1.1 eeh * Here's the spitfire TSB control register bits.
326 1.1 eeh *
327 1.1 eeh * Each TSB entry is 16-bytes wide. The TSB must be size aligned
328 1.1 eeh */
329 1.26 eeh #define TSB_SIZE_512 0x0 /* 8kB, etc. */
330 1.26 eeh #define TSB_SIZE_1K 0x01
331 1.26 eeh #define TSB_SIZE_2K 0x02
332 1.26 eeh #define TSB_SIZE_4K 0x03
333 1.1 eeh #define TSB_SIZE_8K 0x04
334 1.26 eeh #define TSB_SIZE_16K 0x05
335 1.26 eeh #define TSB_SIZE_32K 0x06
336 1.26 eeh #define TSB_SIZE_64K 0x07
337 1.26 eeh #define TSB_SPLIT 0x1000
338 1.26 eeh #define TSB_BASE 0xffffffffffffe000
339 1.1 eeh
340 1.1 eeh /* TLB Tag Access bits */
341 1.26 eeh #define TLB_TAG_ACCESS_VA 0xffffffffffffe000
342 1.26 eeh #define TLB_TAG_ACCESS_CTX 0x0000000000001fff
343 1.1 eeh
344 1.1 eeh /*
345 1.1 eeh * TLB demap registers. TTEs are defined in v9pte.h
346 1.1 eeh *
347 1.1 eeh * Use the address space to select between IMMU and DMMU.
348 1.1 eeh * The address of the register selects which context register
349 1.1 eeh * to read the ASI from.
350 1.1 eeh *
351 1.1 eeh * The data stored in the register is interpreted as the VA to
352 1.1 eeh * use. The DEMAP_CTX_<> registers ignore the address and demap the
353 1.1 eeh * entire ASI.
354 1.1 eeh *
355 1.1 eeh */
356 1.26 eeh #define ASI_IMMU_DEMAP 0x57 /* [4u] IMMU TLB demap */
357 1.26 eeh #define ASI_DMMU_DEMAP 0x5f /* [4u] IMMU TLB demap */
358 1.1 eeh
359 1.26 eeh #define DEMAP_PAGE_NUCLEUS ((0x02)<<4) /* Demap page from kernel AS */
360 1.26 eeh #define DEMAP_PAGE_PRIMARY ((0x00)<<4) /* Demap a page from primary CTXT */
361 1.26 eeh #define DEMAP_PAGE_SECONDARY ((0x01)<<4) /* Demap page from secondary CTXT (DMMU only) */
362 1.26 eeh #define DEMAP_CTX_NUCLEUS ((0x06)<<4) /* Demap all of kernel CTXT */
363 1.26 eeh #define DEMAP_CTX_PRIMARY ((0x04)<<4) /* Demap all of primary CTXT */
364 1.26 eeh #define DEMAP_CTX_SECONDARY ((0x05)<<4) /* Demap all of secondary CTXT */
365 1.1 eeh
366 1.1 eeh /*
367 1.1 eeh * Interrupt registers. This really gets hairy.
368 1.1 eeh */
369 1.1 eeh
370 1.1 eeh /* IRSR -- Interrupt Receive Status Ragister */
371 1.26 eeh #define ASI_IRSR 0x49
372 1.26 eeh #define IRSR 0x00
373 1.26 eeh #define IRSR_BUSY 0x020
374 1.26 eeh #define IRSR_MID(x) (x&0x1f)
375 1.1 eeh
376 1.1 eeh /* IRDR -- Interrupt Receive Data Registers */
377 1.26 eeh #define ASI_IRDR 0x7f
378 1.26 eeh #define IRDR_0H 0x40
379 1.26 eeh #define IRDR_0L 0x48 /* unimplemented */
380 1.26 eeh #define IRDR_1H 0x50
381 1.26 eeh #define IRDR_1L 0x58 /* unimplemented */
382 1.26 eeh #define IRDR_2H 0x60
383 1.26 eeh #define IRDR_2L 0x68 /* unimplemented */
384 1.26 eeh #define IRDR_3H 0x70 /* unimplemented */
385 1.26 eeh #define IRDR_3L 0x78 /* unimplemented */
386 1.1 eeh
387 1.1 eeh /* SOFTINT ASRs */
388 1.26 eeh #define SET_SOFTINT %asr20 /* Sets these bits */
389 1.26 eeh #define CLEAR_SOFTINT %asr21 /* Clears these bits */
390 1.26 eeh #define SOFTINT %asr22 /* Reads the register */
391 1.26 eeh #define TICK_CMPR %asr23
392 1.1 eeh
393 1.1 eeh #define TICK_INT 0x01 /* level-14 clock tick */
394 1.26 eeh #define SOFTINT1 (0x1<<1)
395 1.26 eeh #define SOFTINT2 (0x1<<2)
396 1.26 eeh #define SOFTINT3 (0x1<<3)
397 1.26 eeh #define SOFTINT4 (0x1<<4)
398 1.26 eeh #define SOFTINT5 (0x1<<5)
399 1.26 eeh #define SOFTINT6 (0x1<<6)
400 1.26 eeh #define SOFTINT7 (0x1<<7)
401 1.26 eeh #define SOFTINT8 (0x1<<8)
402 1.26 eeh #define SOFTINT9 (0x1<<9)
403 1.26 eeh #define SOFTINT10 (0x1<<10)
404 1.26 eeh #define SOFTINT11 (0x1<<11)
405 1.26 eeh #define SOFTINT12 (0x1<<12)
406 1.26 eeh #define SOFTINT13 (0x1<<13)
407 1.26 eeh #define SOFTINT14 (0x1<<14)
408 1.26 eeh #define SOFTINT15 (0x1<<15)
409 1.1 eeh
410 1.1 eeh /* Interrupt Dispatch -- usually reserved for cross-calls */
411 1.26 eeh #define ASR_IDSR 0x48 /* Interrupt dispatch status reg */
412 1.26 eeh #define IDSR 0x00
413 1.26 eeh #define IDSR_NACK 0x02
414 1.26 eeh #define IDSR_BUSY 0x01
415 1.26 eeh
416 1.26 eeh #define ASI_INTERRUPT_DISPATCH 0x77 /* [4u] spitfire interrupt dispatch regs */
417 1.31 chs
418 1.31 chs /* Interrupt delivery initiation */
419 1.31 chs #define IDCR(x) ((((uint64_t)(x)) << 14) | 0x70)
420 1.31 chs
421 1.31 chs #define IDDR_0H 0x40 /* Store data to send in these regs */
422 1.26 eeh #define IDDR_0L 0x48 /* unimplemented */
423 1.26 eeh #define IDDR_1H 0x50
424 1.26 eeh #define IDDR_1L 0x58 /* unimplemented */
425 1.26 eeh #define IDDR_2H 0x60
426 1.26 eeh #define IDDR_2L 0x68 /* unimplemented */
427 1.26 eeh #define IDDR_3H 0x70 /* unimplemented */
428 1.26 eeh #define IDDR_3L 0x78 /* unimplemented */
429 1.1 eeh
430 1.1 eeh /*
431 1.1 eeh * Error registers
432 1.1 eeh */
433 1.1 eeh
434 1.1 eeh /* Since we won't try to fix async errs, we don't care about the bits in the regs */
435 1.26 eeh #define ASI_AFAR 0x4d /* Asynchronous fault address register */
436 1.26 eeh #define AFAR 0x00
437 1.26 eeh #define ASI_AFSR 0x4c /* Asynchronous fault status register */
438 1.26 eeh #define AFSR 0x00
439 1.26 eeh
440 1.26 eeh #define ASI_P_EER 0x4b /* Error enable register */
441 1.26 eeh #define P_EER 0x00
442 1.26 eeh #define P_EER_ISAPEN 0x04 /* Enable fatal on ISAP */
443 1.26 eeh #define P_EER_NCEEN 0x02 /* Enable trap on uncorrectable errs */
444 1.26 eeh #define P_EER_CEEN 0x01 /* Enable trap on correctable errs */
445 1.26 eeh
446 1.26 eeh #define ASI_DATAPATH_READ 0x7f /* Read the regs */
447 1.26 eeh #define ASI_DATAPATH_WRITE 0x77 /* Write to the regs */
448 1.26 eeh #define P_DPER_0 0x00 /* Datapath err reg 0 */
449 1.26 eeh #define P_DPER_1 0x18 /* Datapath err reg 1 */
450 1.26 eeh #define P_DCR_0 0x20 /* Datapath control reg 0 */
451 1.26 eeh #define P_DCR_1 0x38 /* Datapath control reg 0 */
452 1.1 eeh
453 1.2 eeh
454 1.2 eeh /* From sparc64/asm.h which I think I'll deprecate since it makes bus.h a pain. */
455 1.2 eeh
456 1.21 eeh #ifndef _LOCORE
457 1.1 eeh /*
458 1.2 eeh * GCC __asm constructs for doing assembly stuff.
459 1.1 eeh */
460 1.2 eeh
461 1.2 eeh /*
462 1.2 eeh * ``Routines'' to load and store from/to alternate address space.
463 1.2 eeh * The location can be a variable, the asi value (address space indicator)
464 1.2 eeh * must be a constant.
465 1.1 eeh *
466 1.2 eeh * N.B.: You can put as many special functions here as you like, since
467 1.2 eeh * they cost no kernel space or time if they are not used.
468 1.1 eeh *
469 1.2 eeh * These were static inline functions, but gcc screws up the constraints
470 1.2 eeh * on the address space identifiers (the "n"umeric value part) because
471 1.2 eeh * it inlines too late, so we have to use the funny valued-macro syntax.
472 1.2 eeh */
473 1.6 eeh
474 1.20 eeh /*
475 1.20 eeh * Apparently the definition of bypass ASIs is that they all use the
476 1.20 eeh * D$ so we need to flush the D$ to make sure we don't get data pollution.
477 1.20 eeh */
478 1.6 eeh
479 1.21 eeh #ifdef __arch64__
480 1.41 perry static __inline u_char
481 1.21 eeh lduba(paddr_t loc, int asi)
482 1.21 eeh {
483 1.21 eeh register unsigned int _lduba_v;
484 1.21 eeh
485 1.38 perry __asm volatile(
486 1.36 petrov "wr %2, %%g0, %%asi; "
487 1.36 petrov "lduba [%1]%%asi, %0 "
488 1.33 petrov : "=r" (_lduba_v)
489 1.33 petrov : "r" ((unsigned long)(loc)), "r" (asi));
490 1.21 eeh return (_lduba_v);
491 1.21 eeh }
492 1.21 eeh #else
493 1.41 perry static __inline u_char
494 1.21 eeh lduba(paddr_t loc, int asi)
495 1.21 eeh {
496 1.21 eeh register unsigned int _lduba_v, _loc_hi, _pstate;
497 1.21 eeh
498 1.40 cdi _loc_hi = (((uint64_t)loc)>>32);
499 1.21 eeh if (PHYS_ASI(asi)) {
500 1.38 perry __asm volatile(
501 1.33 petrov "wr %4,%%g0,%%asi; "
502 1.33 petrov "sllx %3,32,%0; "
503 1.33 petrov "rdpr %%pstate,%1; "
504 1.33 petrov "or %0,%2,%0; "
505 1.33 petrov "wrpr %1,8,%%pstate; "
506 1.33 petrov "membar #Sync; "
507 1.33 petrov "lduba [%0]%%asi,%0; "
508 1.33 petrov "wrpr %1,0,%%pstate; "
509 1.33 petrov "membar #Sync; "
510 1.33 petrov "wr %%g0, 0x82, %%asi "
511 1.33 petrov : "=&r" (_lduba_v), "=&r" (_pstate)
512 1.33 petrov : "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
513 1.33 petrov } else {
514 1.38 perry __asm volatile(
515 1.34 martin "wr %3,%%g0,%%asi; "
516 1.33 petrov "sllx %2,32,%0; "
517 1.33 petrov "or %0,%1,%0; "
518 1.33 petrov "lduba [%0]%%asi,%0; "
519 1.33 petrov "wr %%g0, 0x82, %%asi "
520 1.33 petrov : "=&r" (_lduba_v)
521 1.33 petrov : "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
522 1.21 eeh }
523 1.21 eeh return (_lduba_v);
524 1.21 eeh }
525 1.21 eeh #endif
526 1.21 eeh
527 1.21 eeh #ifdef __arch64__
528 1.21 eeh /* load half-word from alternate address space */
529 1.41 perry static __inline u_short
530 1.21 eeh lduha(paddr_t loc, int asi)
531 1.21 eeh {
532 1.21 eeh register unsigned int _lduha_v;
533 1.21 eeh
534 1.38 perry __asm volatile(
535 1.36 petrov "wr %2, %%g0, %%asi; "
536 1.36 petrov "lduha [%1]%%asi, %0 "
537 1.33 petrov : "=r" (_lduha_v)
538 1.33 petrov : "r" ((unsigned long)(loc)), "r" (asi));
539 1.21 eeh return (_lduha_v);
540 1.21 eeh }
541 1.21 eeh #else
542 1.21 eeh /* load half-word from alternate address space */
543 1.41 perry static __inline u_short
544 1.21 eeh lduha(paddr_t loc, int asi) {
545 1.21 eeh register unsigned int _lduha_v, _loc_hi, _pstate;
546 1.21 eeh
547 1.40 cdi _loc_hi = (((uint64_t)loc)>>32);
548 1.21 eeh
549 1.21 eeh if (PHYS_ASI(asi)) {
550 1.38 perry __asm volatile(
551 1.33 petrov "wr %4,%%g0,%%asi; "
552 1.33 petrov "sllx %3,32,%0; "
553 1.33 petrov "rdpr %%pstate,%1; "
554 1.33 petrov "wrpr %1,8,%%pstate; "
555 1.33 petrov "or %0,%2,%0; "
556 1.33 petrov "membar #Sync; "
557 1.33 petrov "lduha [%0]%%asi,%0; "
558 1.33 petrov "wrpr %1,0,%%pstate; "
559 1.33 petrov "membar #Sync; "
560 1.33 petrov "wr %%g0, 0x82, %%asi "
561 1.33 petrov : "=&r" (_lduha_v), "=&r" (_pstate)
562 1.33 petrov : "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
563 1.33 petrov } else {
564 1.38 perry __asm volatile(
565 1.33 petrov "wr %3,%%g0,%%asi; "
566 1.33 petrov "sllx %2,32,%0; "
567 1.33 petrov "or %0,%1,%0; "
568 1.33 petrov "lduha [%0]%%asi,%0; "
569 1.33 petrov "wr %%g0, 0x82, %%asi "
570 1.33 petrov : "=&r" (_lduha_v)
571 1.33 petrov : "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
572 1.21 eeh }
573 1.21 eeh return (_lduha_v);
574 1.21 eeh }
575 1.21 eeh #endif
576 1.21 eeh
577 1.21 eeh
578 1.21 eeh #ifdef __arch64__
579 1.21 eeh /* load unsigned int from alternate address space */
580 1.41 perry static __inline u_int
581 1.21 eeh lda(paddr_t loc, int asi)
582 1.21 eeh {
583 1.21 eeh register unsigned int _lda_v;
584 1.21 eeh
585 1.38 perry __asm volatile(
586 1.33 petrov "wr %2,%%g0,%%asi; "
587 1.33 petrov "lda [%1]%%asi,%0 "
588 1.33 petrov : "=r" (_lda_v)
589 1.33 petrov : "r" ((unsigned long)(loc)), "r" (asi));
590 1.21 eeh return (_lda_v);
591 1.21 eeh }
592 1.21 eeh
593 1.21 eeh /* load signed int from alternate address space */
594 1.41 perry static __inline int
595 1.21 eeh ldswa(paddr_t loc, int asi)
596 1.21 eeh {
597 1.21 eeh register int _lda_v;
598 1.21 eeh
599 1.38 perry __asm volatile(
600 1.33 petrov "wr %2,%%g0,%%asi; "
601 1.33 petrov "ldswa [%1]%%asi,%0; "
602 1.33 petrov : "=r" (_lda_v)
603 1.33 petrov : "r" ((unsigned long)(loc)), "r" (asi));
604 1.21 eeh return (_lda_v);
605 1.21 eeh }
606 1.21 eeh #else /* __arch64__ */
607 1.21 eeh /* load unsigned int from alternate address space */
608 1.41 perry static __inline u_int
609 1.21 eeh lda(paddr_t loc, int asi)
610 1.21 eeh {
611 1.21 eeh register unsigned int _lda_v, _loc_hi, _pstate;
612 1.21 eeh
613 1.40 cdi _loc_hi = (((uint64_t)loc)>>32);
614 1.21 eeh if (PHYS_ASI(asi)) {
615 1.38 perry __asm volatile(
616 1.33 petrov "wr %4,%%g0,%%asi; "
617 1.33 petrov "rdpr %%pstate,%1; "
618 1.33 petrov "sllx %3,32,%0; "
619 1.33 petrov "wrpr %1,8,%%pstate; "
620 1.33 petrov "or %0,%2,%0; "
621 1.33 petrov "membar #Sync; "
622 1.33 petrov "lda [%0]%%asi,%0; "
623 1.33 petrov "wrpr %1,0,%%pstate; "
624 1.33 petrov "membar #Sync; "
625 1.33 petrov "wr %%g0, 0x82, %%asi "
626 1.33 petrov : "=&r" (_lda_v), "=&r" (_pstate)
627 1.33 petrov : "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
628 1.33 petrov } else {
629 1.38 perry __asm volatile(
630 1.33 petrov "wr %3,%%g0,%%asi; "
631 1.33 petrov "sllx %2,32,%0; "
632 1.33 petrov "or %0,%1,%0; "
633 1.33 petrov "lda [%0]%%asi,%0; "
634 1.33 petrov "wr %%g0, 0x82, %%asi "
635 1.33 petrov : "=&r" (_lda_v)
636 1.33 petrov : "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
637 1.21 eeh }
638 1.21 eeh return (_lda_v);
639 1.21 eeh }
640 1.21 eeh
641 1.21 eeh /* load signed int from alternate address space */
642 1.41 perry static __inline int
643 1.21 eeh ldswa(paddr_t loc, int asi)
644 1.21 eeh {
645 1.21 eeh register int _lda_v, _loc_hi, _pstate;
646 1.21 eeh
647 1.40 cdi _loc_hi = (((uint64_t)loc)>>32);
648 1.21 eeh if (PHYS_ASI(asi)) {
649 1.38 perry __asm volatile(
650 1.33 petrov "wr %4,%%g0,%%asi; "
651 1.33 petrov "rdpr %%pstate,%1; "
652 1.33 petrov "wrpr %1,8,%%pstate; "
653 1.33 petrov "sllx %3,32,%0; "
654 1.33 petrov " or %0,%2,%0; "
655 1.33 petrov "membar #Sync; "
656 1.33 petrov "ldswa [%0]%%asi,%0; "
657 1.33 petrov "wrpr %1,0,%%pstate; "
658 1.33 petrov "membar #Sync; "
659 1.33 petrov "wr %%g0, 0x82, %%asi "
660 1.33 petrov : "=&r" (_lda_v), "=&r" (_pstate)
661 1.33 petrov : "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
662 1.33 petrov } else {
663 1.38 perry __asm volatile(
664 1.33 petrov "wr %3,%%g0,%%asi; "
665 1.33 petrov "sllx %2,32,%0; "
666 1.33 petrov "or %0,%1,%0; "
667 1.33 petrov "ldswa [%0]%%asi,%0; "
668 1.33 petrov "wr %%g0, 0x82, %%asi "
669 1.33 petrov : "=&r" (_lda_v)
670 1.33 petrov : "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
671 1.21 eeh }
672 1.21 eeh return (_lda_v);
673 1.21 eeh }
674 1.21 eeh #endif /* __arch64__ */
675 1.21 eeh
676 1.21 eeh #ifdef __arch64__
677 1.21 eeh /* load 64-bit int from alternate address space -- these should never be used */
678 1.41 perry static __inline uint64_t
679 1.21 eeh ldda(paddr_t loc, int asi)
680 1.21 eeh {
681 1.21 eeh register long long _lda_v;
682 1.21 eeh
683 1.38 perry __asm volatile(
684 1.33 petrov "wr %2,%%g0,%%asi; "
685 1.36 petrov "ldda [%1]%%asi,%0 "
686 1.33 petrov : "=r" (_lda_v)
687 1.33 petrov : "r" ((unsigned long)(loc)), "r" (asi));
688 1.21 eeh return (_lda_v);
689 1.21 eeh }
690 1.21 eeh #else
691 1.21 eeh /* load 64-bit int from alternate address space */
692 1.41 perry static __inline uint64_t
693 1.21 eeh ldda(paddr_t loc, int asi)
694 1.21 eeh {
695 1.21 eeh register long long _lda_v, _loc_hi, _pstate;
696 1.21 eeh
697 1.40 cdi _loc_hi = (((uint64_t)loc)>>32);
698 1.21 eeh if (PHYS_ASI(asi)) {
699 1.38 perry __asm volatile(
700 1.33 petrov "wr %4,%%g0,%%asi; "
701 1.33 petrov "rdpr %%pstate,%1; "
702 1.33 petrov "wrpr %1,8,%%pstate; "
703 1.33 petrov "sllx %3,32,%0; "
704 1.33 petrov "or %0,%2,%0; "
705 1.33 petrov "membar #Sync; "
706 1.33 petrov "ldda [%0]%%asi,%0; "
707 1.33 petrov "wrpr %1,0,%%pstate; "
708 1.33 petrov "membar #Sync; "
709 1.33 petrov "wr %%g0, 0x82, %%asi "
710 1.33 petrov : "=&r" (_lda_v), "=&r" (_pstate)
711 1.33 petrov : "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
712 1.33 petrov } else {
713 1.38 perry __asm volatile(
714 1.33 petrov "wr %3,%%g0,%%asi; "
715 1.33 petrov "sllx %2,32,%0; "
716 1.33 petrov " or %0,%1,%0; "
717 1.33 petrov "ldda [%0]%%asi,%0; "
718 1.33 petrov "wr %%g0, 0x82, %%asi "
719 1.33 petrov : "=&r" (_lda_v)
720 1.33 petrov : "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
721 1.21 eeh }
722 1.21 eeh return (_lda_v);
723 1.21 eeh }
724 1.21 eeh #endif
725 1.21 eeh
726 1.21 eeh
727 1.21 eeh #ifdef __arch64__
728 1.21 eeh /* native load 64-bit int from alternate address space w/64-bit compiler*/
729 1.41 perry static __inline uint64_t
730 1.21 eeh ldxa(paddr_t loc, int asi)
731 1.21 eeh {
732 1.21 eeh register unsigned long _lda_v;
733 1.21 eeh
734 1.38 perry __asm volatile(
735 1.33 petrov "wr %2,%%g0,%%asi; "
736 1.36 petrov "ldxa [%1]%%asi,%0 "
737 1.33 petrov : "=r" (_lda_v)
738 1.33 petrov : "r" ((unsigned long)(loc)), "r" (asi));
739 1.21 eeh return (_lda_v);
740 1.21 eeh }
741 1.21 eeh #else
742 1.21 eeh /* native load 64-bit int from alternate address space w/32-bit compiler*/
743 1.41 perry static __inline uint64_t
744 1.21 eeh ldxa(paddr_t loc, int asi)
745 1.21 eeh {
746 1.21 eeh register unsigned long _ldxa_lo, _ldxa_hi, _loc_hi;
747 1.21 eeh
748 1.40 cdi _loc_hi = (((uint64_t)loc)>>32);
749 1.21 eeh if (PHYS_ASI(asi)) {
750 1.38 perry __asm volatile(
751 1.33 petrov "wr %4,%%g0,%%asi; "
752 1.33 petrov "rdpr %%pstate,%1; "
753 1.33 petrov "sllx %3,32,%0; "
754 1.33 petrov "wrpr %1,8,%%pstate; "
755 1.33 petrov "or %0, %2, %0; "
756 1.33 petrov "membar #Sync; "
757 1.33 petrov "ldxa [%0]%%asi,%0; "
758 1.33 petrov "wrpr %1,0,%%pstate; "
759 1.33 petrov "membar #Sync; "
760 1.33 petrov "srlx %0, 32, %1; "
761 1.33 petrov "srl %0, 0, %0; "
762 1.33 petrov "wr %%g0, 0x82, %%asi "
763 1.33 petrov : "=&r" (_ldxa_lo), "=&r" (_ldxa_hi)
764 1.33 petrov : "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
765 1.33 petrov } else {
766 1.38 perry __asm volatile(
767 1.33 petrov "wr %4,%%g0,%%asi; "
768 1.33 petrov "sllx %3,32,%0; "
769 1.33 petrov "or %0,%2,%0; "
770 1.33 petrov "ldxa [%0]%%asi,%0; "
771 1.33 petrov "srlx %0,32,%1; "
772 1.33 petrov "srl %0, 0, %0; "
773 1.33 petrov "wr %%g0, 0x82, %%asi "
774 1.33 petrov : "=&r" (_ldxa_lo), "=&r" (_ldxa_hi)
775 1.33 petrov : "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
776 1.21 eeh }
777 1.21 eeh return ((((int64_t)_ldxa_hi)<<32)|_ldxa_lo);
778 1.21 eeh }
779 1.21 eeh #endif
780 1.21 eeh
781 1.21 eeh /* store byte to alternate address space */
782 1.21 eeh #ifdef __arch64__
783 1.41 perry static __inline void
784 1.21 eeh stba(paddr_t loc, int asi, u_char value)
785 1.21 eeh {
786 1.38 perry __asm volatile(
787 1.33 petrov "wr %2, %%g0, %%asi; "
788 1.36 petrov "stba %0, [%1]%%asi "
789 1.33 petrov : : "r" ((int)(value)), "r" ((unsigned long)(loc)), "r" (asi));
790 1.21 eeh }
791 1.21 eeh #else
792 1.41 perry static __inline void
793 1.21 eeh stba(paddr_t loc, int asi, u_char value)
794 1.21 eeh {
795 1.21 eeh register int _loc_hi, _pstate;
796 1.21 eeh
797 1.40 cdi _loc_hi = (((uint64_t)loc)>>32);
798 1.21 eeh if (PHYS_ASI(asi)) {
799 1.38 perry __asm volatile(
800 1.33 petrov "wr %5,%%g0,%%asi; "
801 1.33 petrov "sllx %4,32,%0; "
802 1.33 petrov "rdpr %%pstate,%1; "
803 1.33 petrov "or %3,%0,%0; "
804 1.33 petrov "wrpr %1,8,%%pstate; "
805 1.33 petrov "stba %2,[%0]%%asi; "
806 1.33 petrov "wrpr %1,0,%%pstate; "
807 1.33 petrov "membar #Sync; "
808 1.33 petrov "wr %%g0, 0x82, %%asi "
809 1.33 petrov : "=&r" (_loc_hi), "=&r" (_pstate)
810 1.33 petrov : "r" ((int)(value)), "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
811 1.33 petrov } else {
812 1.38 perry __asm volatile(
813 1.33 petrov "wr %4,%%g0,%%asi; "
814 1.33 petrov "sllx %3,32,%0; "
815 1.33 petrov "or %2,%0,%0; "
816 1.33 petrov "stba %1,[%0]%%asi; "
817 1.33 petrov "wr %%g0, 0x82, %%asi "
818 1.33 petrov : "=&r" (_loc_hi)
819 1.33 petrov : "r" ((int)(value)), "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
820 1.21 eeh }
821 1.21 eeh }
822 1.21 eeh #endif
823 1.21 eeh
824 1.21 eeh /* store half-word to alternate address space */
825 1.21 eeh #ifdef __arch64__
826 1.41 perry static __inline void
827 1.21 eeh stha(paddr_t loc, int asi, u_short value)
828 1.21 eeh {
829 1.38 perry __asm volatile(
830 1.33 petrov "wr %2,%%g0,%%asi; "
831 1.36 petrov "stha %0,[%1]%%asi "
832 1.33 petrov : : "r" ((int)(value)), "r" ((unsigned long)(loc)),
833 1.30 eeh "r" (asi) : "memory");
834 1.21 eeh }
835 1.21 eeh #else
836 1.41 perry static __inline void
837 1.21 eeh stha(paddr_t loc, int asi, u_short value)
838 1.21 eeh {
839 1.21 eeh register int _loc_hi, _pstate;
840 1.21 eeh
841 1.40 cdi _loc_hi = (((uint64_t)loc)>>32);
842 1.21 eeh if (PHYS_ASI(asi)) {
843 1.38 perry __asm volatile(
844 1.33 petrov "wr %5,%%g0,%%asi; "
845 1.33 petrov "sllx %4,32,%0; "
846 1.33 petrov "rdpr %%pstate,%1; "
847 1.33 petrov "or %3,%0,%0; "
848 1.33 petrov "wrpr %1,8,%%pstate; "
849 1.33 petrov "stha %2,[%0]%%asi; "
850 1.33 petrov "wrpr %1,0,%%pstate; "
851 1.33 petrov "membar #Sync; "
852 1.33 petrov "wr %%g0, 0x82, %%asi "
853 1.33 petrov : "=&r" (_loc_hi), "=&r" (_pstate)
854 1.33 petrov : "r" ((int)(value)), "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)
855 1.33 petrov : "memory");
856 1.33 petrov } else {
857 1.38 perry __asm volatile(
858 1.33 petrov "wr %4,%%g0,%%asi; "
859 1.33 petrov "sllx %3,32,%0; "
860 1.33 petrov "or %2,%0,%0; "
861 1.33 petrov "stha %1,[%0]%%asi; "
862 1.33 petrov "wr %%g0, 0x82, %%asi "
863 1.33 petrov : "=&r" (_loc_hi)
864 1.33 petrov : "r" ((int)(value)), "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)
865 1.33 petrov : "memory");
866 1.21 eeh }
867 1.21 eeh }
868 1.21 eeh #endif
869 1.21 eeh
870 1.21 eeh
871 1.21 eeh /* store int to alternate address space */
872 1.21 eeh #ifdef __arch64__
873 1.41 perry static __inline void
874 1.21 eeh sta(paddr_t loc, int asi, u_int value)
875 1.21 eeh {
876 1.38 perry __asm volatile(
877 1.33 petrov "wr %2,%%g0,%%asi; "
878 1.36 petrov "sta %0,[%1]%%asi "
879 1.33 petrov : : "r" ((int)(value)), "r" ((unsigned long)(loc)),
880 1.30 eeh "r" (asi) : "memory");
881 1.21 eeh }
882 1.21 eeh #else
883 1.41 perry static __inline void
884 1.21 eeh sta(paddr_t loc, int asi, u_int value)
885 1.21 eeh {
886 1.21 eeh register int _loc_hi, _pstate;
887 1.21 eeh
888 1.40 cdi _loc_hi = (((uint64_t)loc)>>32);
889 1.21 eeh if (PHYS_ASI(asi)) {
890 1.38 perry __asm volatile(
891 1.33 petrov "wr %5,%%g0,%%asi; "
892 1.33 petrov "sllx %4,32,%0; "
893 1.33 petrov "rdpr %%pstate,%1; "
894 1.33 petrov "or %3,%0,%0; "
895 1.33 petrov "wrpr %1,8,%%pstate; "
896 1.33 petrov "sta %2,[%0]%%asi; "
897 1.33 petrov "wrpr %1,0,%%pstate; "
898 1.33 petrov "membar #Sync; "
899 1.33 petrov "wr %%g0, 0x82, %%asi "
900 1.33 petrov : "=&r" (_loc_hi), "=&r" (_pstate)
901 1.33 petrov : "r" ((int)(value)), "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)
902 1.33 petrov : "memory");
903 1.33 petrov } else {
904 1.38 perry __asm volatile(
905 1.33 petrov "wr %4,%%g0,%%asi; "
906 1.33 petrov "sllx %3,32,%0; "
907 1.33 petrov "or %2,%0,%0; "
908 1.33 petrov "sta %1,[%0]%%asi; "
909 1.33 petrov "wr %%g0, 0x82, %%asi "
910 1.33 petrov : "=&r" (_loc_hi)
911 1.33 petrov : "r" ((int)(value)), "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)
912 1.33 petrov : "memory");
913 1.21 eeh }
914 1.21 eeh }
915 1.21 eeh #endif
916 1.21 eeh
917 1.21 eeh /* store 64-bit int to alternate address space */
918 1.21 eeh #ifdef __arch64__
919 1.41 perry static __inline void
920 1.40 cdi stda(paddr_t loc, int asi, uint64_t value)
921 1.21 eeh {
922 1.38 perry __asm volatile(
923 1.33 petrov "wr %2,%%g0,%%asi; "
924 1.36 petrov "stda %0,[%1]%%asi "
925 1.33 petrov : : "r" ((long long)(value)), "r" ((unsigned long)(loc)), "r" (asi)
926 1.33 petrov : "memory");
927 1.21 eeh }
928 1.21 eeh #else
929 1.41 perry static __inline void
930 1.40 cdi stda(paddr_t loc, int asi, uint64_t value)
931 1.21 eeh {
932 1.21 eeh register int _loc_hi, _pstate;
933 1.21 eeh
934 1.40 cdi _loc_hi = (((uint64_t)loc)>>32);
935 1.21 eeh if (PHYS_ASI(asi)) {
936 1.38 perry __asm volatile(
937 1.33 petrov "wr %5,%%g0,%%asi; "
938 1.33 petrov "sllx %4,32,%0; "
939 1.33 petrov "rdpr %%pstate,%1; "
940 1.33 petrov "or %3,%0,%0; "
941 1.33 petrov "wrpr %1,8,%%pstate; "
942 1.33 petrov "stda %2,[%0]%%asi; "
943 1.33 petrov "wrpr %1,0,%%pstate; "
944 1.33 petrov "membar #Sync; "
945 1.33 petrov "wr %%g0, 0x82, %%asi "
946 1.33 petrov : "=&r" (_loc_hi), "=&r" (_pstate)
947 1.33 petrov : "r" ((long long)(value)), "r" ((unsigned long)(loc)),
948 1.33 petrov "r" (_loc_hi), "r" (asi)
949 1.33 petrov : "memory");
950 1.33 petrov } else {
951 1.38 perry __asm volatile(
952 1.33 petrov "wr %4,%%g0,%%asi; "
953 1.33 petrov "sllx %3,32,%0; "
954 1.33 petrov "or %2,%0,%0; "
955 1.33 petrov "stda %1,[%0]%%asi; "
956 1.33 petrov "wr %%g0, 0x82, %%asi "
957 1.33 petrov : "=&r" (_loc_hi)
958 1.33 petrov : "r" ((long long)(value)), "r" ((unsigned long)(loc)),
959 1.33 petrov "r" (_loc_hi), "r" (asi)
960 1.33 petrov : "memory");
961 1.21 eeh }
962 1.21 eeh }
963 1.21 eeh #endif
964 1.21 eeh
965 1.43 martin /* set dmmu secondary context */
966 1.43 martin static __inline void
967 1.43 martin dmmu_set_secondary_context(uint ctx)
968 1.43 martin {
969 1.43 martin __asm volatile(
970 1.44 hannken "stxa %0,[%1]%2; "
971 1.43 martin "membar #Sync "
972 1.43 martin : : "r" (ctx),
973 1.44 hannken "r" (CTX_SECONDARY), "n" (ASI_DMMU)
974 1.43 martin : "memory");
975 1.43 martin }
976 1.43 martin
977 1.21 eeh #ifdef __arch64__
978 1.21 eeh /* native store 64-bit int to alternate address space w/64-bit compiler*/
979 1.41 perry static __inline void
980 1.40 cdi stxa(paddr_t loc, int asi, uint64_t value)
981 1.21 eeh {
982 1.38 perry __asm volatile(
983 1.33 petrov "wr %2,%%g0,%%asi; "
984 1.36 petrov "stxa %0,[%1]%%asi "
985 1.33 petrov : : "r" ((unsigned long)(value)),
986 1.33 petrov "r" ((unsigned long)(loc)), "r" (asi)
987 1.33 petrov : "memory");
988 1.21 eeh }
989 1.21 eeh #else
990 1.21 eeh /* native store 64-bit int to alternate address space w/32-bit compiler*/
991 1.41 perry static __inline void
992 1.40 cdi stxa(paddr_t loc, int asi, uint64_t value)
993 1.21 eeh {
994 1.21 eeh int _stxa_lo, _stxa_hi, _loc_hi;
995 1.21 eeh
996 1.21 eeh _stxa_lo = value;
997 1.40 cdi _stxa_hi = ((uint64_t)value)>>32;
998 1.40 cdi _loc_hi = (((uint64_t)loc)>>32);
999 1.21 eeh
1000 1.21 eeh if (PHYS_ASI(asi)) {
1001 1.38 perry __asm volatile(
1002 1.33 petrov "wr %7,%%g0,%%asi; "
1003 1.33 petrov "sllx %4,32,%1; "
1004 1.33 petrov "sllx %6,32,%0; "
1005 1.33 petrov "or %1,%3,%1; "
1006 1.33 petrov "rdpr %%pstate,%2; "
1007 1.33 petrov "or %0,%5,%0; "
1008 1.33 petrov "wrpr %2,8,%%pstate; "
1009 1.33 petrov "stxa %1,[%0]%%asi; "
1010 1.33 petrov "wrpr %2,0,%%pstate; "
1011 1.33 petrov "membar #Sync; "
1012 1.33 petrov "wr %%g0, 0x82, %%asi "
1013 1.33 petrov : "=&r" (_loc_hi), "=&r" (_stxa_hi), "=&r" ((int)(_stxa_lo))
1014 1.33 petrov : "r" ((int)(_stxa_lo)), "r" ((int)(_stxa_hi)),
1015 1.33 petrov "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)
1016 1.33 petrov : "memory");
1017 1.33 petrov } else {
1018 1.38 perry __asm volatile(
1019 1.33 petrov "wr %6,%%g0,%%asi; "
1020 1.33 petrov "sllx %3,32,%1; "
1021 1.33 petrov "sllx %5,32,%0; "
1022 1.33 petrov "or %1,%2,%1; "
1023 1.33 petrov "or %0,%4,%0; "
1024 1.33 petrov "stxa %1,[%0]%%asi; "
1025 1.33 petrov "wr %%g0, 0x82, %%asi "
1026 1.33 petrov : "=&r" (_loc_hi), "=&r" (_stxa_hi)
1027 1.33 petrov : "r" ((int)(_stxa_lo)), "r" ((int)(_stxa_hi)),
1028 1.33 petrov "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)
1029 1.33 petrov : "memory");
1030 1.21 eeh }
1031 1.21 eeh }
1032 1.21 eeh #endif
1033 1.21 eeh
1034 1.23 eeh #ifdef __arch64__
1035 1.36 petrov /* native store 32-bit int to alternate address space w/64-bit compiler*/
1036 1.41 perry static __inline uint32_t
1037 1.40 cdi casa(paddr_t loc, int asi, uint32_t value, uint32_t oldvalue)
1038 1.36 petrov {
1039 1.38 perry __asm volatile(
1040 1.36 petrov "wr %3,%%g0,%%asi; "
1041 1.36 petrov "casa [%1]%%asi,%2,%0 "
1042 1.36 petrov : "+r" (value)
1043 1.36 petrov : "r" ((unsigned long)(loc)), "r" (oldvalue), "r" (asi)
1044 1.36 petrov : "memory");
1045 1.36 petrov return (value);
1046 1.36 petrov }
1047 1.23 eeh /* native store 64-bit int to alternate address space w/64-bit compiler*/
1048 1.41 perry static __inline uint64_t
1049 1.40 cdi casxa(paddr_t loc, int asi, uint64_t value, uint64_t oldvalue)
1050 1.23 eeh {
1051 1.38 perry __asm volatile(
1052 1.33 petrov "wr %3,%%g0,%%asi; "
1053 1.36 petrov "casxa [%1]%%asi,%2,%0 "
1054 1.33 petrov : "+r" (value)
1055 1.33 petrov : "r" ((unsigned long)(loc)), "r" (oldvalue), "r" (asi)
1056 1.33 petrov : "memory");
1057 1.23 eeh return (value);
1058 1.23 eeh }
1059 1.23 eeh #else
1060 1.36 petrov #if 0
1061 1.23 eeh /* native store 64-bit int to alternate address space w/32-bit compiler*/
1062 1.41 perry static __inline uint64_t
1063 1.40 cdi casxa(paddr_t loc, int asi, uint64_t value, uint64_t oldvalue)
1064 1.23 eeh {
1065 1.23 eeh int _casxa_lo, _casxa_hi, _loc_hi, _oval_hi;
1066 1.23 eeh
1067 1.23 eeh _casxa_lo = value;
1068 1.40 cdi _casxa_hi = ((uint64_t)value)>>32;
1069 1.40 cdi _oval_hi = ((uint64_t)oldvalue)>>32;
1070 1.40 cdi _loc_hi = (((uint64_t)loc)>>32);
1071 1.23 eeh
1072 1.25 eeh #ifdef __notyet
1073 1.25 eeh /*
1074 1.25 eeh * gcc cannot handle this since it thinks it has >10 asm operands.
1075 1.25 eeh */
1076 1.23 eeh if (PHYS_ASI(asi)) {
1077 1.38 perry __asm volatile(
1078 1.33 petrov "wr %6,%%g0,%%asi; "
1079 1.33 petrov "sllx %1,32,%1; "
1080 1.33 petrov "rdpr %%pstate,%2; "
1081 1.33 petrov "sllx %0,32,%0; "
1082 1.33 petrov "or %1,%2,%1; "
1083 1.33 petrov "sllx %3,32,%3; "
1084 1.33 petrov "or %0,%4,%0; "
1085 1.33 petrov "or %3,%5,%3; "
1086 1.33 petrov "wrpr %2,8,%%pstate; "
1087 1.33 petrov "casxa [%0]%%asi,%3,%1; "
1088 1.33 petrov "wrpr %2,0,%%pstate; "
1089 1.33 petrov "andn %0,0x1f,%3; "
1090 1.33 petrov "membar #Sync; "
1091 1.33 petrov "sll %1,0,%2; "
1092 1.33 petrov "srax %1,32,%1; "
1093 1.33 petrov "wr %%g0, 0x82, %%asi "
1094 1.33 petrov : "+r" (_loc_hi), "+r" (_casxa_hi), "+r" (_casxa_lo), "+r" (_oval_hi)
1095 1.33 petrov : "r" ((unsigned long)(loc)), "r" ((unsigned int)(oldvalue)),
1096 1.33 petrov "r" (asi)
1097 1.33 petrov : "memory");
1098 1.33 petrov } else {
1099 1.38 perry __asm volatile(
1100 1.33 petrov "wr %7,%%g0,%%asi; "
1101 1.33 petrov "sllx %1,32,%1; "
1102 1.33 petrov "sllx %5,32,%0; "
1103 1.33 petrov "or %1,%2,%1; "
1104 1.33 petrov "sllx %3,32,%2; "
1105 1.33 petrov "or %0,%4,%0; "
1106 1.33 petrov "or %2,%4,%2; "
1107 1.33 petrov "casxa [%0]%%asi,%2,%1; "
1108 1.33 petrov "sll %1,0,%2; "
1109 1.33 petrov "srax %o1,32,%o1; "
1110 1.33 petrov "wr %%g0, 0x82, %%asi "
1111 1.33 petrov : "=&r" (_loc_hi), "+r" (_casxa_hi), "+r" (_casxa_lo)
1112 1.33 petrov : "r" ((int)(_oval_hi)), "r" ((int)(oldvalue)),
1113 1.23 eeh "r" ((unsigned long)(loc)), "r" (_loc_hi),
1114 1.33 petrov "r" (asi)
1115 1.33 petrov : "memory");
1116 1.23 eeh }
1117 1.25 eeh #endif
1118 1.40 cdi return (((uint64_t)_casxa_hi<<32)|(uint64_t)_casxa_lo);
1119 1.23 eeh }
1120 1.23 eeh #endif
1121 1.36 petrov #endif
1122 1.23 eeh
1123 1.2 eeh /* flush address from data cache */
1124 1.26 eeh #define flush(loc) ({ \
1125 1.38 perry __asm volatile("flush %0" : : \
1126 1.16 eeh "r" ((unsigned long)(loc))); \
1127 1.2 eeh })
1128 1.2 eeh
1129 1.6 eeh /* Flush a D$ line */
1130 1.6 eeh #if 0
1131 1.26 eeh #define flushline(loc) ({ \
1132 1.6 eeh stxa(((paddr_t)loc)&(~0x1f), (ASI_DCACHE_TAG), 0); \
1133 1.6 eeh membar_sync(); \
1134 1.6 eeh })
1135 1.6 eeh #endif
1136 1.6 eeh
1137 1.6 eeh /* The following two enable or disable the dcache in the LSU control register */
1138 1.26 eeh #define dcenable() ({ \
1139 1.6 eeh int res; \
1140 1.38 perry __asm volatile("ldxa [%%g0] %1,%0; or %0,%2,%0; stxa %0,[%%g0] %1; membar #Sync" \
1141 1.6 eeh : "r" (res) : "n" (ASI_MCCR), "n" (MCCR_DCACHE_EN)); \
1142 1.6 eeh })
1143 1.26 eeh #define dcdisable() ({ \
1144 1.6 eeh int res; \
1145 1.38 perry __asm volatile("ldxa [%%g0] %1,%0; andn %0,%2,%0; stxa %0,[%%g0] %1; membar #Sync" \
1146 1.6 eeh : "r" (res) : "n" (ASI_MCCR), "n" (MCCR_DCACHE_EN)); \
1147 1.6 eeh })
1148 1.6 eeh
1149 1.6 eeh /*
1150 1.6 eeh * SPARC V9 memory barrier instructions.
1151 1.6 eeh */
1152 1.6 eeh /* Make all stores complete before next store */
1153 1.38 perry #define membar_storestore() __asm volatile("membar #StoreStore" : :)
1154 1.6 eeh /* Make all loads complete before next store */
1155 1.38 perry #define membar_loadstore() __asm volatile("membar #LoadStore" : :)
1156 1.6 eeh /* Make all stores complete before next load */
1157 1.38 perry #define membar_storeload() __asm volatile("membar #StoreLoad" : :)
1158 1.6 eeh /* Make all loads complete before next load */
1159 1.38 perry #define membar_loadload() __asm volatile("membar #LoadLoad" : :)
1160 1.6 eeh /* Complete all outstanding memory operations and exceptions */
1161 1.38 perry #define membar_sync() __asm volatile("membar #Sync" : :)
1162 1.6 eeh /* Complete all outstanding memory operations */
1163 1.38 perry #define membar_memissue() __asm volatile("membar #MemIssue" : :)
1164 1.6 eeh /* Complete all outstanding stores before any new loads */
1165 1.38 perry #define membar_lookaside() __asm volatile("membar #Lookaside" : :)
1166 1.2 eeh
1167 1.38 perry #define membar_load() __asm volatile("membar #LoadLoad | #LoadStore" : :)
1168 1.38 perry #define membar_store() __asm volatile("membar #LoadStore | #StoreStore" : :)
1169 1.36 petrov
1170 1.9 eeh #endif
1171 1.33 petrov
1172 1.33 petrov #endif /* _SPARC_CTLREG_H_ */
1173