ctlreg.h revision 1.21 1 /* $NetBSD: ctlreg.h,v 1.21 2000/06/24 20:48:38 eeh Exp $ */
2
3 /*
4 * Copyright (c) 1996-1999 Eduardo Horvath
5 *
6 * Redistribution and use in source and binary forms, with or without
7 * modification, are permitted provided that the following conditions
8 * are met:
9 * 1. Redistributions of source code must retain the above copyright
10 * notice, this list of conditions and the following disclaimer.
11 *
12 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND
13 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
14 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
15 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE
16 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
17 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
18 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
19 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
20 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
21 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
22 * SUCH DAMAGE.
23 *
24 */
25
26 /*
27 * Sun 4u control registers. (includes address space definitions
28 * and some registers in control space).
29 */
30
31 /*
32 * The Alternate address spaces.
33 *
34 * 0x00-0x7f are privileged
35 * 0x80-0xff can be used by users
36 */
37
38 #define ASI_LITTLE 0x08 /* This bit should make an ASI little endian */
39
40 #define ASI_NUCLEUS 0x04 /* [4u] kernel address space */
41 #define ASI_NUCLEUS_LITTLE 0x0c /* [4u] kernel address space, little endian */
42
43 #define ASI_AS_IF_USER_PRIMARY 0x10 /* [4u] primary user address space */
44 #define ASI_AS_IF_USER_SECONDARY 0x11 /* [4u] secondary user address space */
45
46 #define ASI_PHYS_CACHED 0x14 /* [4u] MMU bypass to main memory */
47 #define ASI_PHYS_NON_CACHED 0x15 /* [4u] MMU bypass to I/O location */
48
49 #define ASI_AS_IF_USER_PRIMARY_LITTLE 0x18 /* [4u] primary user address space, little endian */
50 #define ASI_AS_IF_USER_SECONDARY_LITTIE 0x19 /* [4u] secondary user address space, little endian */
51
52 #define ASI_PHYS_CACHED_LITTLE 0x1c /* [4u] MMU bypass to main memory, little endian */
53 #define ASI_PHYS_NON_CACHED_LITTLE 0x1d /* [4u] MMU bypass to I/O location, little endian */
54
55 #define ASI_NUCLEUS_QUAD_LDD 0x24 /* [4u] use w/LDDA to load 128-bit item */
56 #define ASI_NUCLEUS_QUAD_LDD_LITTLE 0x2c /* [4u] use w/LDDA to load 128-bit item, little endian */
57
58 #define ASI_FLUSH_D_PAGE_PRIMARY 0x38 /* [4u] flush D-cache page using primary context */
59 #define ASI_FLUSH_D_PAGE_SECONDARY 0x39 /* [4u] flush D-cache page using secondary context */
60 #define ASI_FLUSH_D_CTX_PRIMARY 0x3a /* [4u] flush D-cache context using primary context */
61 #define ASI_FLUSH_D_CTX_SECONDARY 0x3b /* [4u] flush D-cache context using secondary context */
62
63 #define ASI_LSU_CONTROL_REGISTER 0x45 /* [4u] load/store unit control register */
64
65 #define ASI_DCACHE_DATA 0x46 /* [4u] diagnostic access to D-cache data RAM */
66 #define ASI_DCACHE_TAG 0x47 /* [4u] diagnostic access to D-cache tag RAM */
67
68 #define ASI_INTR_DISPATCH_STATUS 0x48 /* [4u] interrupt dispatch status register */
69 #define ASI_INTR_RECEIVE 0x49 /* [4u] interrupt receive status register */
70 #define ASI_MID_REG 0x4a /* [4u] hardware config and MID */
71 #define ASI_ERROR_EN_REG 0x4b /* [4u] asynchronous error enables */
72 #define ASI_AFSR 0x4c /* [4u] asynchronous fault status register */
73 #define ASI_AFAR 0x4d /* [4u] asynchronous fault address register */
74
75 #define ASI_ICACHE_DATA 0x66 /* [4u] diagnostic access to D-cache data RAM */
76 #define ASI_ICACHE_TAG 0x67 /* [4u] diagnostic access to D-cache tag RAM */
77 #define ASI_FLUSH_I_PAGE_PRIMARY 0x68 /* [4u] flush D-cache page using primary context */
78 #define ASI_FLUSH_I_PAGE_SECONDARY 0x69 /* [4u] flush D-cache page using secondary context */
79 #define ASI_FLUSH_I_CTX_PRIMARY 0x6a /* [4u] flush D-cache context using primary context */
80 #define ASI_FLUSH_I_CTX_SECONDARY 0x6b /* [4u] flush D-cache context using secondary context */
81
82 #define ASI_BLOCK_AS_IF_USER_PRIMARY 0x70 /* [4u] primary user address space, block loads/stores */
83 #define ASI_BLOCK_AS_IF_USER_SECONDARY 0x71 /* [4u] secondary user address space, block loads/stores */
84
85 #define ASI_ECACHE_DIAG 0x76 /* [4u] diag access to E-cache tag and data */
86 #define ASI_DATAPATH_ERR_REG_WRITE 0x77 /* [4u] ASI is reused */
87
88 #define ASI_BLOCK_AS_IF_USER_PRIMARY_LITTLE 0x78 /* [4u] primary user address space, block loads/stores */
89 #define ASI_BLOCK_AS_IF_USER_SECONDARY_LITTLE 0x79 /* [4u] secondary user address space, block loads/stores */
90
91 #define ASI_INTERRUPT_RECEIVE_DATA 0x7f /* [4u] interrupt receive data registers {0,1,2} */
92 #define ASI_DATAPATH_ERR_REG_READ 0x7f /* [4u] read access to datapath error registers (ASI reused) */
93
94 #define ASI_PRIMARY 0x80 /* [4u] primary address space */
95 #define ASI_SECONDARY 0x81 /* [4u] secondary address space */
96 #define ASI_PRIMARY_NO_FAULT 0x82 /* [4u] primary address space, no fault */
97 #define ASI_SECONDARY_NO_FAULT 0x83 /* [4u] secondary address space, no fault */
98
99 #define ASI_PRIMARY_LITTLE 0x88 /* [4u] primary address space, little endian */
100 #define ASI_SECONDARY_LITTLE 0x89 /* [4u] secondary address space, little endian */
101 #define ASI_PRIMARY_NO_FAULT_LITTLE 0x8a /* [4u] primary address space, no fault, little endian */
102 #define ASI_SECONDARY_NO_FAULT_LITTLE 0x8b /* [4u] secondary address space, no fault, little endian */
103
104 #define ASI_PST8_PRIMARY 0xc0 /* [VIS] Eight 8-bit partial store, primary */
105 #define ASI_PST8_SECONDARY 0xc1 /* [VIS] Eight 8-bit partial store, secondary */
106 #define ASI_PST16_PRIMARY 0xc2 /* [VIS] Four 16-bit partial store, primary */
107 #define ASI_PST16_SECONDARY 0xc3 /* [VIS] Fout 16-bit partial store, secondary */
108 #define ASI_PST32_PRIMARY 0xc4 /* [VIS] Two 32-bit partial store, primary */
109 #define ASI_PST32_SECONDARY 0xc5 /* [VIS] Two 32-bit partial store, secondary */
110
111 #define ASI_PST8_PRIMARY_LITTLE 0xc8 /* [VIS] Eight 8-bit partial store, primary, little endian */
112 #define ASI_PST8_SECONDARY_LITTLE 0xc9 /* [VIS] Eight 8-bit partial store, secondary, little endian */
113 #define ASI_PST16_PRIMARY_LITTLE 0xca /* [VIS] Four 16-bit partial store, primary, little endian */
114 #define ASI_PST16_SECONDARY_LITTLE 0xcb /* [VIS] Fout 16-bit partial store, secondary, little endian */
115 #define ASI_PST32_PRIMARY_LITTLE 0xcc /* [VIS] Two 32-bit partial store, primary, little endian */
116 #define ASI_PST32_SECONDARY_LITTLE 0xcd /* [VIS] Two 32-bit partial store, secondary, little endian */
117
118 #define ASI_FL8_PRIMARY 0xd0 /* [VIS] One 8-bit load/store floating, primary */
119 #define ASI_FL8_SECONDARY 0xd1 /* [VIS] One 8-bit load/store floating, secondary */
120 #define ASI_FL16_PRIMARY 0xd2 /* [VIS] One 16-bit load/store floating, primary */
121 #define ASI_FL16_SECONDARY 0xd3 /* [VIS] One 16-bit load/store floating, secondary */
122
123 #define ASI_FL8_PRIMARY_LITTLE 0xd8 /* [VIS] One 8-bit load/store floating, primary, little endian */
124 #define ASI_FL8_SECONDARY_LITTLE 0xd9 /* [VIS] One 8-bit load/store floating, secondary, little endian */
125 #define ASI_FL16_PRIMARY_LITTLE 0xda /* [VIS] One 16-bit load/store floating, primary, little endian */
126 #define ASI_FL16_SECONDARY_LITTLE 0xdb /* [VIS] One 16-bit load/store floating, secondary, little endian */
127
128 #define ASI_BLOCK_COMMIT_PRIMARY 0xe0 /* [4u] block store with commit, primary */
129 #define ASI_BLOCK_COMMIT_SECONDARY 0xe1 /* [4u] block store with commit, secondary */
130 #define ASI_BLOCK_PRIMARY 0xf0 /* [4u] block load/store, primary */
131 #define ASI_BLOCK_SECONDARY 0xf1 /* [4u] block load/store, secondary */
132 #define ASI_BLOCK_PRIMARY_LITTLE 0xf8 /* [4u] block load/store, primary, little endian */
133 #define ASI_BLOCK_SECONDARY_LITTLE 0xf9 /* [4u] block load/store, secondary, little endian */
134
135
136 /*
137 * These are the shorter names used by Solaris
138 */
139
140 #define ASI_N ASI_NUCLEUS
141 #define ASI_NL ASI_NUCLEUS_LITTLE
142 #define ASI_AIUP ASI_AS_IF_USER_PRIMARY
143 #define ASI_AIUS ASI_AS_IF_USER_SECONDARY
144 #define ASI_AIUPL ASI_AS_IF_USER_PRIMARY_LITTLE
145 #define ASI_AIUSL ASI_AS_IF_USER_SECONDARY_LITTLE
146 #define ASI_P ASI_PRIMARY
147 #define ASI_S ASI_SECONDARY
148 #define ASI_PNF ASI_PRIMARY_NO_FAULT
149 #define ASI_SNF ASI_SECONDARY_NO_FAULT
150 #define ASI_PL ASI_PRIMARY_LITTLE
151 #define ASI_SL ASI_SECONDARY_LITTLE
152 #define ASI_PNFL ASI_PRIMARY_NO_FAULT_LITTLE
153 #define ASI_SNFL ASI_SECONDARY_NO_FAULT_LITTLE
154 #define ASI_BLK_AIUP ASI_BLOCK_AS_IF_USER_PRIMARY
155 #define ASI_BLK_AIUPL ASI_BLOCK_AS_IF_USER_PRIMARY_LITTLE
156 #define ASI_BLK_AIUS ASI_BLOCK_AS_IF_USER_SECONDARY
157 #define ASI_BLK_AIUSL ASI_BLOCK_AS_IF_USER_SECONDARY_LITTLE
158 #define ASI_BLK_COMMIT_P ASI_BLOCK_COMMIT_PRIMARY
159 #define ASI_BLK_COMMIT_PRIMARY ASI_BLOCK_COMMIT_PRIMARY
160 #define ASI_BLK_COMMIT_S ASI_BLOCK_COMMIT_SECONDARY
161 #define ASI_BLK_COMMIT_SECONDARY ASI_BLOCK_COMMIT_SECONDARY
162 #define ASI_BLK_P ASI_BLOCK_PRIMARY
163 #define ASI_BLK_PL ASI_BLOCK_PRIMARY_LITTLE
164 #define ASI_BLK_S ASI_BLOCK_SECONDARY
165 #define ASI_BLK_SL ASI_BLOCK_SECONDARY_LITTLE
166
167 #define PHYS_ASI(x) (((x) | 0x09) == 0x1d)
168 #define LITTLE_ASI(x) ((x) & ASI_LITTLE)
169
170 /*
171 * The following are 4u control registers
172 */
173
174
175 /* Get the CPU's UPAID */
176 #define UPA_CR_MID(x) (((x)>>17)&0x1f)
177 #define CPU_UPAID UPA_CR_MID(ldxa(0, ASI_MID_REG))
178
179 /*
180 * [4u] MMU and Cache Control Register (MCCR)
181 * use ASI = 0x45
182 */
183 #define ASI_MCCR ASI_LSU_CONTROL_REGISTER
184 #define MCCR 0x00
185
186 /* MCCR Bits and their meanings */
187 #define MCCR_DMMU_EN 0x08
188 #define MCCR_IMMU_EN 0x04
189 #define MCCR_DCACHE_EN 0x02
190 #define MCCR_ICACHE_EN 0x01
191
192
193 /*
194 * MMU control registers
195 */
196
197 /* Choose an MMU */
198 #define ASI_DMMU 0x58
199 #define ASI_IMMU 0x50
200
201 /* Other assorted MMU ASIs */
202 #define ASI_IMMU_8KPTR 0x51
203 #define ASI_IMMU_64KPTR 0x52
204 #define ASI_IMMU_DATA_IN 0x54
205 #define ASI_IMMU_TLB_DATA 0x55
206 #define ASI_IMMU_TLB_TAG 0x56
207 #define ASI_DMMU_8KPTR 0x59
208 #define ASI_DMMU_64KPTR 0x5a
209 #define ASI_DMMU_DATA_IN 0x5c
210 #define ASI_DMMU_TLB_DATA 0x5d
211 #define ASI_DMMU_TLB_TAG 0x5e
212
213 /*
214 * The following are the control registers
215 * They work on both MMUs unless noted.
216 *
217 * Register contents are defined later on individual registers.
218 */
219 #define TSB_TAG_TARGET 0x0
220 #define TLB_DATA_IN 0x0
221 #define CTX_PRIMARY 0x08 /* primary context -- DMMU only */
222 #define CTX_SECONDARY 0x10 /* secondary context -- DMMU only */
223 #define SFSR 0x18
224 #define SFAR 0x20 /* fault address -- DMMU only */
225 #define TSB 0x28
226 #define TLB_TAG_ACCESS 0x30
227 #define VIRTUAL_WATCHPOINT 0x38
228 #define PHYSICAL_WATCHPOINT 0x40
229
230 /* Tag Target bits */
231 #define TAG_TARGET_VA_MASK 0x03ffffffffffffffffLL
232 #define TAG_TARGET_VA(x) (((x)<<22)&TAG_TARGET_VA_MASK)
233 #define TAG_TARGET_CONTEXT(x) ((x)>>48)
234 #define TAG_TARGET(c,v) ((((uint64_t)c)<<48)|(((uint64_t)v)&TAG_TARGET_VA_MASK))
235
236 /* SFSR bits for both D_SFSR and I_SFSR */
237 #define SFSR_ASI(x) ((x)>>16)
238 #define SFSR_FT_VA_OOR_2 0x02000 /* IMMU: jumpl or return to unsupportd VA */
239 #define SFSR_FT_VA_OOR_1 0x01000 /* fault at unsupported VA */
240 #define SFSR_FT_NFO 0x00800 /* DMMU: Access to page marked NFO */
241 #define SFSR_ILL_ASI 0x00400 /* DMMU: Illegal (unsupported) ASI */
242 #define SFSR_FT_IO_ATOMIC 0x00200 /* DMMU: Atomic access to noncacheable page */
243 #define SFSR_FT_ILL_NF 0x00100 /* DMMU: NF load or flush to page marked E (has side effects) */
244 #define SFSR_FT_PRIV 0x00080 /* Privilege violation */
245 #define SFSR_FT_E 0x00040 /* DMUU: value of E bit associated address */
246 #define SFSR_CTXT(x) (((x)>>4)&0x3)
247 #define SFSR_CTXT_IS_PRIM(x) (SFSR_CTXT(x)==0x00)
248 #define SFSR_CTXT_IS_SECOND(x) (SFSR_CTXT(x)==0x01)
249 #define SFSR_CTXT_IS_NUCLEUS(x) (SFSR_CTXT(x)==0x02)
250 #define SFSR_PRIV 0x00008 /* value of PSTATE.PRIV for faulting access */
251 #define SFSR_W 0x00004 /* DMMU: attempted write */
252 #define SFSR_OW 0x00002 /* Overwrite; prev vault was still valid */
253 #define SFSR_FV 0x00001 /* Fault is valid */
254 #define SFSR_FT (SFSR_FT_VA_OOR_2|SFSR_FT_VA_OOR_1|SFSR_FT_NFO|SFSR_ILL_ASI|SFSR_FT_IO_ATOMIC|SFSR_FT_ILL_NF|SFSR_FT_PRIV)
255
256 #if 0
257 /* Old bits */
258 #define SFSR_BITS "\40\16VAT\15VAD\14NFO\13ASI\12A\11NF\10PRIV\7E\6NUCLEUS\5SECONDCTX\4PRIV\3W\2OW\1FV"
259 #else
260 /* New bits */
261 #define SFSR_BITS "\177\20" \
262 "f\20\30ASI\0" "b\16VAT\0" "b\15VAD\0" "b\14NFO\0" "b\13ASI\0" "b\12A\0" "b\11NF\0" "b\10PRIV\0" \
263 "b\7E\0" "b\6NUCLEUS\0" "b\5SECONDCTX\0" "b\4PRIV\0" "b\3W\0" "b\2OW\0" "b\1FV\0"
264 #endif
265
266 /* ASFR bits */
267 #define ASFR_ME 0x100000000LL
268 #define ASFR_PRIV 0x080000000LL
269 #define ASFR_ISAP 0x040000000LL
270 #define ASFR_ETP 0x020000000LL
271 #define ASFR_IVUE 0x010000000LL
272 #define ASFR_TO 0x008000000LL
273 #define ASFR_BERR 0x004000000LL
274 #define ASFR_LDP 0x002000000LL
275 #define ASFR_CP 0x001000000LL
276 #define ASFR_WP 0x000800000LL
277 #define ASFR_EDP 0x000400000LL
278 #define ASFR_UE 0x000200000LL
279 #define ASFR_CE 0x000100000LL
280 #define ASFR_ETS 0x0000f0000LL
281 #define ASFT_P_SYND 0x00000ffffLL
282
283 #define AFSR_BITS "\177\20" \
284 "b\40ME\0" "b\37PRIV\0" "b\36ISAP\0" "b\35ETP\0" \
285 "b\34IVUE\0" "b\33TO\0" "b\32BERR\0" "b\31LDP\0" \
286 "b\30CP\0" "b\27WP\0" "b\26EDP\0" "b\25UE\0" \
287 "b\24CE\0" "f\20\4ETS\0" "f\0\20P_SYND\0"
288
289 /*
290 * Here's the spitfire TSB control register bits.
291 *
292 * Each TSB entry is 16-bytes wide. The TSB must be size aligned
293 */
294 #define TSB_SIZE_512 0x0 /* 8kB, etc. */
295 #define TSB_SIZE_1K 0x01
296 #define TSB_SIZE_2K 0x02
297 #define TSB_SIZE_4K 0x03
298 #define TSB_SIZE_8K 0x04
299 #define TSB_SIZE_16K 0x05
300 #define TSB_SIZE_32K 0x06
301 #define TSB_SIZE_64K 0x07
302 #define TSB_SPLIT 0x1000
303 #define TSB_BASE 0xffffffffffffe000
304
305 /* TLB Tag Access bits */
306 #define TLB_TAG_ACCESS_VA 0xffffffffffffe000
307 #define TLB_TAG_ACCESS_CTX 0x0000000000001fff
308
309 /*
310 * TLB demap registers. TTEs are defined in v9pte.h
311 *
312 * Use the address space to select between IMMU and DMMU.
313 * The address of the register selects which context register
314 * to read the ASI from.
315 *
316 * The data stored in the register is interpreted as the VA to
317 * use. The DEMAP_CTX_<> registers ignore the address and demap the
318 * entire ASI.
319 *
320 */
321 #define ASI_IMMU_DEMAP 0x57 /* [4u] IMMU TLB demap */
322 #define ASI_DMMU_DEMAP 0x5f /* [4u] IMMU TLB demap */
323
324 #define DEMAP_PAGE_NUCLEUS ((0x02)<<4) /* Demap page from kernel AS */
325 #define DEMAP_PAGE_PRIMARY ((0x00)<<4) /* Demap a page from primary CTXT */
326 #define DEMAP_PAGE_SECONDARY ((0x01)<<4) /* Demap page from secondary CTXT (DMMU only) */
327 #define DEMAP_CTX_NUCLEUS ((0x06)<<4) /* Demap all of kernel CTXT */
328 #define DEMAP_CTX_PRIMARY ((0x04)<<4) /* Demap all of primary CTXT */
329 #define DEMAP_CTX_SECONDARY ((0x05)<<4) /* Demap all of secondary CTXT */
330
331 /*
332 * Interrupt registers. This really gets hairy.
333 */
334
335 /* IRSR -- Interrupt Receive Status Ragister */
336 #define ASI_IRSR 0x49
337 #define IRSR 0x00
338 #define IRSR_BUSY 0x020
339 #define IRSR_MID(x) (x&0x1f)
340
341 /* IRDR -- Interrupt Receive Data Registers */
342 #define ASI_IRDR 0x7f
343 #define IRDR_0H 0x40
344 #define IRDR_0L 0x48 /* unimplemented */
345 #define IRDR_1H 0x50
346 #define IRDR_1L 0x58 /* unimplemented */
347 #define IRDR_2H 0x60
348 #define IRDR_2L 0x68 /* unimplemented */
349 #define IRDR_3H 0x70 /* unimplemented */
350 #define IRDR_3L 0x78 /* unimplemented */
351
352 /* SOFTINT ASRs */
353 #define SET_SOFTINT %asr20 /* Sets these bits */
354 #define CLEAR_SOFTINT %asr21 /* Clears these bits */
355 #define SOFTINT %asr22 /* Reads the register */
356 #define TICK_CMPR %asr23
357
358 #define TICK_INT 0x01 /* level-14 clock tick */
359 #define SOFTINT1 (0x1<<1)
360 #define SOFTINT2 (0x1<<2)
361 #define SOFTINT3 (0x1<<3)
362 #define SOFTINT4 (0x1<<4)
363 #define SOFTINT5 (0x1<<5)
364 #define SOFTINT6 (0x1<<6)
365 #define SOFTINT7 (0x1<<7)
366 #define SOFTINT8 (0x1<<8)
367 #define SOFTINT9 (0x1<<9)
368 #define SOFTINT10 (0x1<<10)
369 #define SOFTINT11 (0x1<<11)
370 #define SOFTINT12 (0x1<<12)
371 #define SOFTINT13 (0x1<<13)
372 #define SOFTINT14 (0x1<<14)
373 #define SOFTINT15 (0x1<<15)
374
375 /* Interrupt Dispatch -- usually reserved for cross-calls */
376 #define ASR_IDSR 0x48 /* Interrupt dispatch status reg */
377 #define IDSR 0x00
378 #define IDSR_NACK 0x02
379 #define IDSR_BUSY 0x01
380
381 #define ASI_INTERRUPT_DISPATCH 0x77 /* [4u] spitfire interrupt dispatch regs */
382 #define IDCR(x) (((x)<<14)&0x70) /* Store anything to this address to dispatch crosscall to CPU (x) */
383 #define IDDR_0H 0x40 /* Store data to send in these regs */
384 #define IDDR_0L 0x48 /* unimplemented */
385 #define IDDR_1H 0x50
386 #define IDDR_1L 0x58 /* unimplemented */
387 #define IDDR_2H 0x60
388 #define IDDR_2L 0x68 /* unimplemented */
389 #define IDDR_3H 0x70 /* unimplemented */
390 #define IDDR_3L 0x78 /* unimplemented */
391
392 /*
393 * Error registers
394 */
395
396 /* Since we won't try to fix async errs, we don't care about the bits in the regs */
397 #define ASI_AFAR 0x4d /* Asynchronous fault address register */
398 #define AFAR 0x00
399 #define ASI_AFSR 0x4c /* Asynchronous fault status register */
400 #define AFSR 0x00
401
402 #define ASI_P_EER 0x4b /* Error enable register */
403 #define P_EER 0x00
404 #define P_EER_ISAPEN 0x04 /* Enable fatal on ISAP */
405 #define P_EER_NCEEN 0x02 /* Enable trap on uncorrectable errs */
406 #define P_EER_CEEN 0x01 /* Enable trap on correctable errs */
407
408 #define ASI_DATAPATH_READ 0x7f /* Read the regs */
409 #define ASI_DATAPATH_WRITE 0x77 /* Write to the regs */
410 #define P_DPER_0 0x00 /* Datapath err reg 0 */
411 #define P_DPER_1 0x18 /* Datapath err reg 1 */
412 #define P_DCR_0 0x20 /* Datapath control reg 0 */
413 #define P_DCR_1 0x38 /* Datapath control reg 0 */
414
415
416 /* From sparc64/asm.h which I think I'll deprecate since it makes bus.h a pain. */
417
418 #ifndef _LOCORE
419 /*
420 * GCC __asm constructs for doing assembly stuff.
421 */
422
423 /*
424 * ``Routines'' to load and store from/to alternate address space.
425 * The location can be a variable, the asi value (address space indicator)
426 * must be a constant.
427 *
428 * N.B.: You can put as many special functions here as you like, since
429 * they cost no kernel space or time if they are not used.
430 *
431 * These were static inline functions, but gcc screws up the constraints
432 * on the address space identifiers (the "n"umeric value part) because
433 * it inlines too late, so we have to use the funny valued-macro syntax.
434 */
435
436 /*
437 * Apparently the definition of bypass ASIs is that they all use the
438 * D$ so we need to flush the D$ to make sure we don't get data pollution.
439 */
440
441 static __inline__ u_char lduba __P((paddr_t loc, int asi));
442 static __inline__ u_short lduha __P((paddr_t loc, int asi));
443 static __inline__ u_int lda __P((paddr_t loc, int asi));
444 static __inline__ int ldswa __P((paddr_t loc, int asi));
445 static __inline__ u_int64_t ldxa __P((paddr_t loc, int asi));
446 static __inline__ u_int64_t ldda __P((paddr_t loc, int asi));
447
448 static __inline__ void stba __P((paddr_t loc, int asi, u_char value));
449 static __inline__ void stha __P((paddr_t loc, int asi, u_short value));
450 static __inline__ void sta __P((paddr_t loc, int asi, u_int value));
451 static __inline__ void stxa __P((paddr_t loc, int asi, u_int64_t value));
452 static __inline__ void stda __P((paddr_t loc, int asi, u_int64_t value));
453
454 #ifdef __arch64__
455 static __inline__ u_char
456 lduba(paddr_t loc, int asi)
457 {
458 register unsigned int _lduba_v;
459
460 if (PHYS_ASI(asi)) {
461 __asm __volatile("wr %3,%%g0,%%asi; "
462 " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; "
463 " lduba [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; "
464 " stxa %%g0,[%1] %4; membar #Sync" :
465 "=&r" (_lduba_v), "=r" (loc):
466 "r" ((unsigned long)(loc)),
467 "r" (asi), "n" (ASI_DCACHE_TAG));
468 } else {
469 __asm __volatile("wr %2,%%g0,%%asi; lduba [%1]%%asi,%0" :
470 "=r" (_lduba_v) :
471 "r" ((unsigned long)(loc)), "r" (asi));
472 }
473 return (_lduba_v);
474 }
475 #else
476 static __inline__ u_char
477 lduba(paddr_t loc, int asi)
478 {
479 register unsigned int _lduba_v, _loc_hi, _pstate;
480
481 _loc_hi = (((u_int64_t)loc)>>32);
482 if (PHYS_ASI(asi)) {
483 __asm __volatile("wr %4,%%g0,%%asi; "
484 " andn %2,0x1f,%0; stxa %%g0,[%0] %5; rdpr %%pstate,%1; "
485 " sllx %3,32,%0; or %0,%2,%0; wrpr %1,8,%%pstate; "
486 " membar #Sync; lduba [%0]%%asi,%0; wrpr %1,0,%%pstate; "
487 " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync" :
488 "=&r" (_lduba_v), "=&r" (_pstate) :
489 "r" ((unsigned long)(loc)), "r" (_loc_hi),
490 "r" (asi), "n" (ASI_DCACHE_TAG));
491 } else {
492 __asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; "
493 " or %0,%1,%0; lduba [%0]%%asi,%0" : "=&r" (_lduba_v) :
494 "r" ((unsigned long)(loc)),
495 "r" (_loc_hi), "r" (asi));
496 }
497 return (_lduba_v);
498 }
499 #endif
500
501 #ifdef __arch64__
502 /* load half-word from alternate address space */
503 static __inline__ u_short
504 lduha(paddr_t loc, int asi)
505 {
506 register unsigned int _lduha_v;
507
508 if (PHYS_ASI(asi)) {
509 __asm __volatile("wr %3,%%g0,%%asi; "
510 " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; "
511 " lduha [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; "
512 " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lduha_v), "=r" (loc) :
513 "r" ((unsigned long)(loc)),
514 "r" (asi), "n" (ASI_DCACHE_TAG));
515 } else {
516 __asm __volatile("wr %2,%%g0,%%asi; lduha [%1]%%asi,%0" :
517 "=r" (_lduha_v) :
518 "r" ((unsigned long)(loc)), "r" (asi));
519 }
520 return (_lduha_v);
521 }
522 #else
523 /* load half-word from alternate address space */
524 static __inline__ u_short
525 lduha(paddr_t loc, int asi) {
526 register unsigned int _lduha_v, _loc_hi, _pstate;
527
528 _loc_hi = (((u_int64_t)loc)>>32);
529
530 if (PHYS_ASI(asi)) {
531 __asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1; "
532 " andn %2,0x1f,%0; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate; sllx %3,32,%0; "
533 " or %0,%2,%0; membar #Sync; lduha [%0]%%asi,%0; wrpr %1,0,%%pstate; "
534 " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync" :
535 "=&r" (_lduha_v), "=&r" (_pstate) :
536 "r" ((unsigned long)(loc)), "r" (_loc_hi),
537 "r" (asi), "n" (ASI_DCACHE_TAG));
538 } else {
539 __asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; "
540 " or %0,%1,%0; lduha [%0]%%asi,%0" : "=&r" (_lduha_v) :
541 "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
542 }
543 return (_lduha_v);
544 }
545 #endif
546
547
548 #ifdef __arch64__
549 /* load unsigned int from alternate address space */
550 static __inline__ u_int
551 lda(paddr_t loc, int asi)
552 {
553 register unsigned int _lda_v;
554
555 if (PHYS_ASI(asi)) {
556 __asm __volatile("wr %3,%%g0,%%asi; "
557 " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; "
558 " lda [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; "
559 " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lda_v), "=r" (loc) :
560 "r" ((unsigned long)(loc)),
561 "r" (asi), "n" (ASI_DCACHE_TAG));
562 } else {
563 __asm __volatile("wr %2,%%g0,%%asi; lda [%1]%%asi,%0" :
564 "=r" (_lda_v) :
565 "r" ((unsigned long)(loc)), "r" (asi));
566 }
567 return (_lda_v);
568 }
569
570 /* load signed int from alternate address space */
571 static __inline__ int
572 ldswa(paddr_t loc, int asi)
573 {
574 register int _lda_v;
575
576 if (PHYS_ASI(asi)) {
577 __asm __volatile("wr %3,%%g0,%%asi; "
578 " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; "
579 " ldswa [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; "
580 " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lda_v), "=r" (loc) :
581 "r" ((unsigned long)(loc)),
582 "r" (asi), "n" (ASI_DCACHE_TAG));
583 } else {
584 __asm __volatile("wr %2,%%g0,%%asi; ldswa [%1]%%asi,%0" :
585 "=r" (_lda_v) :
586 "r" ((unsigned long)(loc)), "r" (asi));
587 }
588 return (_lda_v);
589 }
590 #else /* __arch64__ */
591 /* load unsigned int from alternate address space */
592 static __inline__ u_int
593 lda(paddr_t loc, int asi)
594 {
595 register unsigned int _lda_v, _loc_hi, _pstate;
596
597 _loc_hi = (((u_int64_t)loc)>>32);
598 if (PHYS_ASI(asi)) {
599 __asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1;"
600 " andn %2,0x1f,%0; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate; "
601 " sllx %3,32,%0; or %0,%2,%0; membar #Sync;lda [%0]%%asi,%0; "
602 " wrpr %1,0,%%pstate; andn %2,0x1f,%1; membar #Sync; "
603 " stxa %%g0,[%1] %5; membar #Sync" : "=&r" (_lda_v), "=&r" (_pstate) :
604 "r" ((unsigned long)(loc)), "r" (_loc_hi),
605 "r" (asi), "n" (ASI_DCACHE_TAG));
606 } else {
607 __asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; "
608 " or %0,%1,%0; lda [%0]%%asi,%0" : "=&r" (_lda_v) :
609 "r" ((unsigned long)(loc)),
610 "r" (_loc_hi), "r" (asi));
611 }
612 return (_lda_v);
613 }
614
615 /* load signed int from alternate address space */
616 static __inline__ int
617 ldswa(paddr_t loc, int asi)
618 {
619 register int _lda_v, _loc_hi, _pstate;
620
621 _loc_hi = (((u_int64_t)loc)>>32);
622 if (PHYS_ASI(asi)) {
623 __asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1;"
624 " andn %2,0x1f,%0; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate; sllx %3,32,%0;"
625 " or %0,%2,%0; membar #Sync; ldswa [%0]%%asi,%0; wrpr %1,0,%%pstate; "
626 " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync" :
627 "=&r" (_lda_v), "=&r" (_pstate) :
628 "r" ((unsigned long)(loc)), "r" (_loc_hi),
629 "r" (asi), "n" (ASI_DCACHE_TAG));
630 } else {
631 __asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; "
632 " or %0,%1,%0; ldswa [%0]%%asi,%0" : "=&r" (_lda_v) :
633 "r" ((unsigned long)(loc)),
634 "r" (_loc_hi), "r" (asi));
635 }
636 return (_lda_v);
637 }
638 #endif /* __arch64__ */
639
640 #ifdef __arch64__
641 /* load 64-bit int from alternate address space -- these should never be used */
642 static __inline__ u_int64_t
643 ldda(paddr_t loc, int asi)
644 {
645 register long long _lda_v;
646
647 if (PHYS_ASI(asi)) {
648 __asm __volatile("wr %3,%%g0,%%asi; "
649 " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; "
650 " ldda [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; "
651 " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lda_v), "=&r" (loc) :
652 "r" ((unsigned long)(loc)),
653 "r" (asi), "n" (ASI_DCACHE_TAG));
654 } else {
655 __asm __volatile("wr %2,%%g0,%%asi; ldda [%1]%%asi,%0" :
656 "=r" (_lda_v) :
657 "r" ((unsigned long)(loc)), "r" (asi));
658 }
659 return (_lda_v);
660 }
661 #else
662 /* load 64-bit int from alternate address space */
663 static __inline__ u_int64_t
664 ldda(paddr_t loc, int asi)
665 {
666 register long long _lda_v, _loc_hi, _pstate;
667
668 _loc_hi = (((u_int64_t)loc)>>32);
669 if (PHYS_ASI(asi)) {
670 __asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1;"
671 " andn %2,0x1f,%0; rdpr %%pstate,%1; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate;"
672 " sllx %3,32,%0; or %0,%2,%0; membar #Sync; ldda [%0]%%asi,%0; wrpr %1,0,%%pstate; "
673 " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync" :
674 "=&r" (_lda_v), "=&r" (_pstate) :
675 "r" ((unsigned long)(loc)), "r" (_loc_hi),
676 "r" (asi), "n" (ASI_DCACHE_TAG));
677 } else {
678 __asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; "
679 " or %0,%1,%0; ldda [%0]%%asi,%0" : "=&r" (_lda_v) :
680 "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
681 }
682 return (_lda_v);
683 }
684 #endif
685
686
687 #ifdef __arch64__
688 /* native load 64-bit int from alternate address space w/64-bit compiler*/
689 static __inline__ u_int64_t
690 ldxa(paddr_t loc, int asi)
691 {
692 register unsigned long _lda_v;
693
694 if (PHYS_ASI(asi)) {
695 __asm __volatile("wr %3,%%g0,%%asi; "
696 " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; "
697 " ldxa [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; "
698 " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lda_v), "=r" (loc) :
699 "r" ((unsigned long)(loc)),
700 "r" (asi), "n" (ASI_DCACHE_TAG));
701 } else {
702 __asm __volatile("wr %2,%%g0,%%asi; ldxa [%1]%%asi,%0" :
703 "=r" (_lda_v) :
704 "r" ((unsigned long)(loc)), "r" (asi));
705 }
706 return (_lda_v);
707 }
708 #else
709 /* native load 64-bit int from alternate address space w/32-bit compiler*/
710 static __inline__ u_int64_t
711 ldxa(paddr_t loc, int asi)
712 {
713 register unsigned long _ldxa_lo, _ldxa_hi, _loc_hi;
714
715 _loc_hi = (((u_int64_t)loc)>>32);
716 if (PHYS_ASI(asi)) {
717 __asm __volatile("wr %4,%%g0,%%asi; "
718 " andn %2,0x1f,%0; rdpr %%pstate,%1; stxa %%g0,[%0] %5; "
719 " sllx %3,32,%0; wrpr %1,8,%%pstate; or %0,%2,%0; membar #Sync; ldxa [%0]%%asi,%0; "
720 " wrpr %1,0,%%pstate; andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync; "
721 " srlx %0,32,%1; srl %0,0,%0" :
722 "=&r" (_ldxa_lo), "=&r" (_ldxa_hi) :
723 "r" ((unsigned long)(loc)), "r" (_loc_hi),
724 "r" (asi), "n" (ASI_DCACHE_TAG));
725 } else {
726 __asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; "
727 " or %0,%2,%0; ldxa [%0]%%asi,%0; srlx %0,32,%1; srl %0,0,%0;" :
728 "=&r" (_ldxa_lo), "=&r" (_ldxa_hi) :
729 "r" ((unsigned long)(loc)), "r" (_loc_hi),
730 "r" (asi));
731 }
732 return ((((int64_t)_ldxa_hi)<<32)|_ldxa_lo);
733 }
734 #endif
735
736 /* store byte to alternate address space */
737 #ifdef __arch64__
738 static __inline__ void
739 stba(paddr_t loc, int asi, u_char value)
740 {
741 if (PHYS_ASI(asi)) {
742 __asm __volatile("wr %3,%%g0,%%asi; stba %1,[%2]%%asi;"
743 " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) :
744 "r" ((int)(value)), "r" ((unsigned long)(loc)),
745 "r" (asi), "n" (ASI_DCACHE_TAG));
746 } else {
747 __asm __volatile("wr %2,%%g0,%%asi; stba %0,[%1]%%asi" : :
748 "r" ((int)(value)), "r" ((unsigned long)(loc)),
749 "r" (asi));
750 }
751 }
752 #else
753 static __inline__ void
754 stba(paddr_t loc, int asi, u_char value)
755 {
756 register int _loc_hi, _pstate;
757
758 _loc_hi = (((u_int64_t)loc)>>32);
759 if (PHYS_ASI(asi)) {
760 __asm __volatile("wr %5,%%g0,%%asi; sllx %4,32,%0; rdpr %%pstate,%1;"
761 " or %3,%0,%0; wrpr %1,8,%%pstate; stba %2,[%0]%%asi; wrpr %1,0,%%pstate; "
762 " andn %0,0x1f,%1; membar #Sync; stxa %%g0,[%1] %6; membar #Sync" :
763 "=&r" (_loc_hi), "=&r" (_pstate) :
764 "r" ((int)(value)), "r" ((unsigned long)(loc)),
765 "r" (_loc_hi), "r" (asi), "n" (ASI_DCACHE_TAG));
766 } else {
767 __asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; "
768 " or %2,%0,%0; stba %1,[%0]%%asi" : "=&r" (_loc_hi) :
769 "r" ((int)(value)), "r" ((unsigned long)(loc)),
770 "r" (_loc_hi), "r" (asi));
771 }
772 }
773 #endif
774
775 /* store half-word to alternate address space */
776 #ifdef __arch64__
777 static __inline__ void
778 stha(paddr_t loc, int asi, u_short value)
779 {
780 if (PHYS_ASI(asi)) {
781 __asm __volatile("wr %3,%%g0,%%asi; stha %1,[%2]%%asi;"
782 " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) :
783 "r" ((int)(value)), "r" ((unsigned long)(loc)),
784 "r" (asi), "n" (ASI_DCACHE_TAG));
785 } else {
786 __asm __volatile("wr %2,%%g0,%%asi; stha %0,[%1]%%asi" : :
787 "r" ((int)(value)), "r" ((unsigned long)(loc)),
788 "r" (asi));
789 }
790 }
791 #else
792 static __inline__ void
793 stha(paddr_t loc, int asi, u_short value)
794 {
795 register int _loc_hi, _pstate;
796
797 _loc_hi = (((u_int64_t)loc)>>32);
798 if (PHYS_ASI(asi)) {
799 __asm __volatile("wr %5,%%g0,%%asi; sllx %4,32,%0; rdpr %%pstate,%1;"
800 " or %3,%0,%0; wrpr %1,8,%%pstate; stha %2,[%0]%%asi; wrpr %1,0,%%pstate; "
801 " andn %0,0x1f,%1; membar #Sync; stxa %%g0,[%1] %6; membar #Sync" :
802 "=&r" (_loc_hi), "=&r" (_pstate) :
803 "r" ((int)(value)), "r" ((unsigned long)(loc)),
804 "r" (_loc_hi), "r" (asi), "n" (ASI_DCACHE_TAG));
805 } else {
806 __asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; "
807 " or %2,%0,%0; stha %1,[%0]%%asi" : "=&r" (_loc_hi) :
808 "r" ((int)(value)), "r" ((unsigned long)(loc)),
809 "r" (_loc_hi), "r" (asi));
810 }
811 }
812 #endif
813
814
815 /* store int to alternate address space */
816 #ifdef __arch64__
817 static __inline__ void
818 sta(paddr_t loc, int asi, u_int value)
819 {
820 if (PHYS_ASI(asi)) {
821 __asm __volatile("wr %3,%%g0,%%asi; sta %1,[%2]%%asi;"
822 " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) :
823 "r" ((int)(value)), "r" ((unsigned long)(loc)),
824 "r" (asi), "n" (ASI_DCACHE_TAG));
825 } else {
826 __asm __volatile("wr %2,%%g0,%%asi; sta %0,[%1]%%asi" : :
827 "r" ((int)(value)), "r" ((unsigned long)(loc)),
828 "r" (asi));
829 }
830 }
831 #else
832 static __inline__ void
833 sta(paddr_t loc, int asi, u_int value)
834 {
835 register int _loc_hi, _pstate;
836
837 _loc_hi = (((u_int64_t)loc)>>32);
838 if (PHYS_ASI(asi)) {
839 __asm __volatile("wr %5,%%g0,%%asi; sllx %4,32,%0; rdpr %%pstate,%1;"
840 " or %3,%0,%0; wrpr %1,8,%%pstate; sta %2,[%0]%%asi; wrpr %1,0,%%pstate; "
841 " andn %0,0x1f,%1; membar #Sync; stxa %%g0,[%1] %6; membar #Sync" :
842 "=&r" (_loc_hi), "=&r" (_pstate) :
843 "r" ((int)(value)), "r" ((unsigned long)(loc)),
844 "r" (_loc_hi), "r" (asi), "n" (ASI_DCACHE_TAG));
845 } else {
846 __asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; "
847 " or %2,%0,%0; sta %1,[%0]%%asi" : "=&r" (_loc_hi) :
848 "r" ((int)(value)), "r" ((unsigned long)(loc)),
849 "r" (_loc_hi), "r" (asi));
850 }
851 }
852 #endif
853
854 /* store 64-bit int to alternate address space */
855 #ifdef __arch64__
856 static __inline__ void
857 stda(paddr_t loc, int asi, u_int64_t value)
858 {
859 if (PHYS_ASI(asi)) {
860 __asm __volatile("wr %3,%%g0,%%asi; stda %1,[%2]%%asi;"
861 " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) :
862 "r" ((int)(value)), "r" ((unsigned long)(loc)),
863 "r" (asi), "n" (ASI_DCACHE_TAG));
864 } else {
865 __asm __volatile("wr %2,%%g0,%%asi; stda %0,[%1]%%asi" : :
866 "r" ((long long)(value)), "r" ((unsigned long)(loc)),
867 "r" (asi));
868 }
869 }
870 #else
871 static __inline__ void
872 stda(paddr_t loc, int asi, u_int64_t value)
873 {
874 register int _loc_hi, _pstate;
875
876 _loc_hi = (((u_int64_t)loc)>>32);
877 if (PHYS_ASI(asi)) {
878 __asm __volatile("wr %5,%%g0,%%asi; sllx %4,32,%0; rdpr %%pstate,%1; "
879 " or %3,%0,%0; wrpr %1,8,%%pstate; stda %2,[%0]%%asi; wrpr %1,0,%%pstate;"
880 " andn %0,0x1f,%1; membar #Sync; stxa %%g0,[%1] %6; membar #Sync" :
881 "=&r" (_loc_hi), "=&r" (_pstate) :
882 "r" ((long long)(value)), "r" ((unsigned long)(loc)),
883 "r" (_loc_hi), "r" (asi), "n" (ASI_DCACHE_TAG));
884 } else {
885 __asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; "
886 " or %2,%0,%0; stda %1,[%0]%%asi" : "=&r" (_loc_hi) :
887 "r" ((long long)(value)), "r" ((unsigned long)(loc)),
888 "r" (_loc_hi), "r" (asi));
889 }
890 }
891 #endif
892
893 #ifdef __arch64__
894 /* native store 64-bit int to alternate address space w/64-bit compiler*/
895 static __inline__ void
896 stxa(paddr_t loc, int asi, u_int64_t value)
897 {
898 if (PHYS_ASI(asi)) {
899 __asm __volatile("wr %3,%%g0,%%asi; stxa %1,[%2]%%asi;"
900 " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) :
901 "r" ((int)(value)), "r" ((unsigned long)(loc)),
902 "r" (asi), "n" (ASI_DCACHE_TAG));
903 } else {
904 __asm __volatile("wr %2,%%g0,%%asi; stxa %0,[%1]%%asi" : :
905 "r" ((unsigned long)(value)),
906 "r" ((unsigned long)(loc)), "r" (asi));
907 }
908 }
909 #else
910 /* native store 64-bit int to alternate address space w/32-bit compiler*/
911 static __inline__ void
912 stxa(paddr_t loc, int asi, u_int64_t value)
913 {
914 int _stxa_lo, _stxa_hi, _loc_hi;
915
916 _stxa_lo = value;
917 _stxa_hi = ((u_int64_t)value)>>32;
918 _loc_hi = (((u_int64_t)(u_long)loc)>>32);
919
920 if (PHYS_ASI(asi)) {
921 __asm __volatile("wr %7,%%g0,%%asi; sllx %4,32,%1; sllx %6,32,%0; "
922 " or %1,%3,%1; rdpr %%pstate,%3; or %0,%5,%0; wrpr %3,8,%%pstate; "
923 " stxa %1,[%0]%%asi; wrpr %3,0,%%pstate; "
924 " andn %0,0x1f,%1; membar #Sync; stxa %%g0,[%1] %8; membar #Sync" :
925 "=&r" (_loc_hi), "=&r" (_stxa_hi),
926 "=&r" ((int)(_stxa_lo)) :
927 "r" ((int)(_stxa_lo)), "r" ((int)(_stxa_hi)),
928 "r" ((unsigned long)(loc)), "r" (_loc_hi),
929 "r" (asi), "n" (ASI_DCACHE_TAG));
930 } else {
931 __asm __volatile("wr %6,%%g0,%%asi; sllx %3,32,%1; sllx %5,32,%0; "
932 " or %1,%2,%1; or %0,%4,%0; stxa %1,[%0]%%asi" :
933 "=&r" (_loc_hi), "=&r" (_stxa_hi) :
934 "r" ((int)(_stxa_lo)), "r" ((int)(_stxa_hi)),
935 "r" ((unsigned long)(loc)), "r" (_loc_hi),
936 "r" (asi));
937 }
938 }
939 #endif
940
941 #if 0
942 #ifdef __arch64__
943 /* load byte from alternate address space */
944 #define lduba(loc, asi) ({ \
945 register unsigned int _lduba_v; \
946 if (PHYS_ASI(asi)) { \
947 __asm __volatile("wr %3,%%g0,%%asi; " \
948 " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; " \
949 " lduba [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; " \
950 " stxa %%g0,[%1] %4; membar #Sync" : \
951 "=&r" (_lduba_v), "=r" (loc): \
952 "r" ((unsigned long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
953 } else { \
954 __asm __volatile("wr %2,%%g0,%%asi; lduba [%1]%%asi,%0" : \
955 "=r" (_lduba_v) : \
956 "r" ((unsigned long)(loc)), "r" (asi)); \
957 } \
958 _lduba_v; \
959 })
960 #else
961 /* load byte from alternate address space */
962 #define lduba(loc, asi) ({ \
963 register unsigned int _lduba_v, _loc_hi, _pstate; \
964 _loc_hi = (((u_int64_t)loc)>>32); \
965 if (PHYS_ASI(asi)) { \
966 __asm __volatile("wr %4,%%g0,%%asi; " \
967 " andn %2,0x1f,%0; stxa %%g0,[%0] %5; rdpr %%pstate,%1; " \
968 " sllx %3,32,%0; or %0,%2,%0; wrpr %1,8,%%pstate; " \
969 " membar #Sync; lduba [%0]%%asi,%0; wrpr %1,0,%%pstate; " \
970 " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync" : \
971 "=&r" (_lduba_v), "=&r" (_pstate) : \
972 "r" ((unsigned long)(loc)), "r" (_loc_hi), \
973 "r" (asi), "n" (ASI_DCACHE_TAG)); \
974 } else { \
975 __asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; " \
976 " or %0,%1,%0; lduba [%0]%%asi,%0" : "=&r" (_lduba_v) : \
977 "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
978 } \
979 _lduba_v; \
980 })
981 #endif
982
983 #ifdef __arch64__
984 /* load half-word from alternate address space */
985 #define lduha(loc, asi) ({ \
986 register unsigned int _lduha_v; \
987 if (PHYS_ASI(asi)) { \
988 __asm __volatile("wr %3,%%g0,%%asi; " \
989 " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; " \
990 " lduha [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; " \
991 " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lduha_v), "=r" (loc) : \
992 "r" ((unsigned long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
993 } else { \
994 __asm __volatile("wr %2,%%g0,%%asi; lduha [%1]%%asi,%0" : "=r" (_lduha_v) : \
995 "r" ((unsigned long)(loc)), "r" (asi)); \
996 } \
997 _lduha_v; \
998 })
999 #else
1000 /* load half-word from alternate address space */
1001 #define lduha(loc, asi) ({ \
1002 register unsigned int _lduha_v, _loc_hi, _pstate; \
1003 _loc_hi = (((u_int64_t)loc)>>32); \
1004 if (PHYS_ASI(asi)) { \
1005 __asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1; " \
1006 " andn %2,0x1f,%0; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate; sllx %3,32,%0; " \
1007 " or %0,%2,%0; membar #Sync; lduha [%0]%%asi,%0; wrpr %1,0,%%pstate; " \
1008 " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync" : \
1009 "=&r" (_lduha_v), "=&r" (_pstate) : \
1010 "r" ((unsigned long)(loc)), "r" (_loc_hi), \
1011 "r" (asi), "n" (ASI_DCACHE_TAG)); \
1012 } else { \
1013 __asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; " \
1014 " or %0,%1,%0; lduha [%0]%%asi,%0" : "=&r" (_lduha_v) : \
1015 "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
1016 } \
1017 _lduha_v; \
1018 })
1019 #endif
1020
1021 #ifdef __arch64__
1022 /* load unsigned int from alternate address space */
1023 #define lda(loc, asi) ({ \
1024 register unsigned int _lda_v; \
1025 if (PHYS_ASI(asi)) { \
1026 __asm __volatile("wr %3,%%g0,%%asi; " \
1027 " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; " \
1028 " lda [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; " \
1029 " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lda_v), "=r" (loc) : \
1030 "r" ((unsigned long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
1031 } else { \
1032 __asm __volatile("wr %2,%%g0,%%asi; lda [%1]%%asi,%0" : "=r" (_lda_v) : \
1033 "r" ((unsigned long)(loc)), "r" (asi)); \
1034 } \
1035 _lda_v; \
1036 })
1037
1038 /* load signed int from alternate address space */
1039 #define ldswa(loc, asi) ({ \
1040 register int _lda_v; \
1041 if (PHYS_ASI(asi)) { \
1042 __asm __volatile("wr %3,%%g0,%%asi; " \
1043 " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; " \
1044 " ldswa [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; " \
1045 " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lda_v), "=r" (loc) : \
1046 "r" ((unsigned long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
1047 } else { \
1048 __asm __volatile("wr %2,%%g0,%%asi; ldswa [%1]%%asi,%0" : "=r" (_lda_v) : \
1049 "r" ((unsigned long)(loc)), "r" (asi)); \
1050 } \
1051 _lda_v; \
1052 })
1053 #else /* __arch64__ */
1054 /* load unsigned int from alternate address space */
1055 #define lda(loc, asi) ({ \
1056 register unsigned int _lda_v, _loc_hi, _pstate; \
1057 _loc_hi = (((u_int64_t)loc)>>32); \
1058 if (PHYS_ASI(asi)) { \
1059 __asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1;" \
1060 " andn %2,0x1f,%0; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate; " \
1061 " sllx %3,32,%0; or %0,%2,%0; membar #Sync;lda [%0]%%asi,%0; " \
1062 " wrpr %1,0,%%pstate; andn %2,0x1f,%1; membar #Sync; " \
1063 " stxa %%g0,[%1] %5; membar #Sync" : "=&r" (_lda_v), "=&r" (_pstate) : \
1064 "r" ((unsigned long)(loc)), "r" (_loc_hi), \
1065 "r" (asi), "n" (ASI_DCACHE_TAG)); \
1066 } else { \
1067 __asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; " \
1068 " or %0,%1,%0; lda [%0]%%asi,%0" : "=&r" (_lda_v) : \
1069 "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
1070 } \
1071 _lda_v; \
1072 })
1073
1074 /* load signed int from alternate address space */
1075 #define ldswa(loc, asi) ({ \
1076 register int _lda_v, _loc_hi, _pstate; \
1077 _loc_hi = (((u_int64_t)loc)>>32); \
1078 if (PHYS_ASI(asi)) { \
1079 __asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1;" \
1080 " andn %2,0x1f,%0; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate; sllx %3,32,%0;" \
1081 " or %0,%2,%0; membar #Sync; ldswa [%0]%%asi,%0; wrpr %1,0,%%pstate; " \
1082 " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync" : \
1083 "=&r" (_lda_v), "=&r" (_pstate) : \
1084 "r" ((unsigned long)(loc)), "r" (_loc_hi), \
1085 "r" (asi), "n" (ASI_DCACHE_TAG)); \
1086 } else { \
1087 __asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; " \
1088 " or %0,%1,%0; ldswa [%0]%%asi,%0" : "=&r" (_lda_v) : \
1089 "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
1090 } \
1091 _lda_v; \
1092 })
1093 #endif /* __arch64__ */
1094
1095 #ifdef __arch64__
1096 /* load 64-bit int from alternate address space -- these should never be used */
1097 #define ldda(loc, asi) ({ \
1098 register long long _lda_v; \
1099 if (PHYS_ASI(asi)) { \
1100 __asm __volatile("wr %3,%%g0,%%asi; " \
1101 " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; " \
1102 " ldda [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; " \
1103 " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lda_v), "=&r" (loc) : \
1104 "r" ((unsigned long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
1105 } else { \
1106 __asm __volatile("wr %2,%%g0,%%asi; ldda [%1]%%asi,%0" : "=r" (_lda_v) : \
1107 "r" ((unsigned long)(loc)), "r" (asi)); \
1108 } \
1109 _lda_v; \
1110 })
1111 #else
1112 /* load 64-bit int from alternate address space */
1113 #define ldda(loc, asi) ({ \
1114 register long long _lda_v, _loc_hi, _pstate; \
1115 _loc_hi = (((u_int64_t)loc)>>32); \
1116 if (PHYS_ASI(asi)) { \
1117 __asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1;" \
1118 " andn %2,0x1f,%0; rdpr %%pstate,%1; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate;" \
1119 " sllx %3,32,%0; or %0,%2,%0; membar #Sync; ldda [%0]%%asi,%0; wrpr %1,0,%%pstate; " \
1120 " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync" : \
1121 "=&r" (_lda_v), "=&r" (_pstate) : \
1122 "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi), "n" (ASI_DCACHE_TAG)); \
1123 } else { \
1124 __asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; " \
1125 " or %0,%1,%0; ldda [%0]%%asi,%0" : "=&r" (_lda_v) : \
1126 "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
1127 } \
1128 _lda_v; \
1129 })
1130 #endif
1131
1132 #ifdef __arch64__
1133 /* native load 64-bit int from alternate address space w/64-bit compiler*/
1134 #define ldxa(loc, asi) ({ \
1135 register unsigned long _lda_v; \
1136 if (PHYS_ASI(asi)) { \
1137 __asm __volatile("wr %3,%%g0,%%asi; "\
1138 " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; " \
1139 " ldxa [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; " \
1140 " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lda_v), "=r" (loc) : \
1141 "r" ((unsigned long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
1142 } else { \
1143 __asm __volatile("wr %2,%%g0,%%asi; ldxa [%1]%%asi,%0" : "=r" (_lda_v) : \
1144 "r" ((unsigned long)(loc)), "r" (asi)); \
1145 } \
1146 _lda_v; \
1147 })
1148 #else
1149 /* native load 64-bit int from alternate address space w/32-bit compiler*/
1150 #define ldxa(loc, asi) ({ \
1151 register unsigned long _ldxa_lo, _ldxa_hi, _loc_hi; \
1152 _loc_hi = (((u_int64_t)loc)>>32); \
1153 if (PHYS_ASI(asi)) { \
1154 __asm __volatile("wr %4,%%g0,%%asi; " \
1155 " andn %2,0x1f,%0; rdpr %%pstate,%1; stxa %%g0,[%0] %5; " \
1156 " sllx %3,32,%0; wrpr %1,8,%%pstate; or %0,%2,%0; membar #Sync; ldxa [%0]%%asi,%0; " \
1157 " wrpr %1,0,%%pstate; andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync; " \
1158 " srlx %0,32,%1; srl %0,0,%0" : \
1159 "=&r" (_ldxa_lo), "=&r" (_ldxa_hi) : \
1160 "r" ((unsigned long)(loc)), "r" (_loc_hi), \
1161 "r" (asi), "n" (ASI_DCACHE_TAG)); \
1162 } else { \
1163 __asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; " \
1164 " or %0,%2,%0; ldxa [%0]%%asi,%0; srlx %0,32,%1; srl %0,0,%0;" : \
1165 "=&r" (_ldxa_lo), "=&r" (_ldxa_hi) : \
1166 "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
1167 } \
1168 ((((int64_t)_ldxa_hi)<<32)|_ldxa_lo); \
1169 })
1170 #endif
1171
1172
1173 /* store byte to alternate address space */
1174 #ifdef __arch64__
1175 #define stba(loc, asi, value) ({ \
1176 if (PHYS_ASI(asi)) { \
1177 __asm __volatile("wr %3,%%g0,%%asi; stba %1,[%2]%%asi;" \
1178 " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) : \
1179 "r" ((int)(value)), "r" ((unsigned long)(loc)), \
1180 "r" (asi), "n" (ASI_DCACHE_TAG)); \
1181 } else { \
1182 __asm __volatile("wr %2,%%g0,%%asi; stba %0,[%1]%%asi" : : \
1183 "r" ((int)(value)), "r" ((unsigned long)(loc)), "r" (asi)); \
1184 } \
1185 })
1186 #else
1187 #define stba(loc, asi, value) ({ \
1188 register int _loc_hi, _pstate; \
1189 _loc_hi = (((u_int64_t)loc)>>32); \
1190 if (PHYS_ASI(asi)) { \
1191 __asm __volatile("wr %5,%%g0,%%asi; sllx %4,32,%0; rdpr %%pstate,%1;" \
1192 " or %3,%0,%0; wrpr %1,8,%%pstate; stba %2,[%0]%%asi; wrpr %1,0,%%pstate; " \
1193 " andn %0,0x1f,%1; membar #Sync; stxa %%g0,[%1] %6; membar #Sync" : \
1194 "=&r" (_loc_hi), "=&r" (_pstate) : \
1195 "r" ((int)(value)), "r" ((unsigned long)(loc)), \
1196 "r" (_loc_hi), "r" (asi), "n" (ASI_DCACHE_TAG)); \
1197 } else { \
1198 __asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; " \
1199 " or %2,%0,%0; stba %1,[%0]%%asi" : "=&r" (_loc_hi) : \
1200 "r" ((int)(value)), "r" ((unsigned long)(loc)), \
1201 "r" (_loc_hi), "r" (asi)); \
1202 } \
1203 })
1204 #endif
1205
1206 /* store half-word to alternate address space */
1207 #ifdef __arch64__
1208 #define stha(loc, asi, value) ({ \
1209 if (PHYS_ASI(asi)) { \
1210 __asm __volatile("wr %3,%%g0,%%asi; stha %1,[%2]%%asi;" \
1211 " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) : \
1212 "r" ((int)(value)), "r" ((unsigned long)(loc)), \
1213 "r" (asi), "n" (ASI_DCACHE_TAG)); \
1214 } else { \
1215 __asm __volatile("wr %2,%%g0,%%asi; stha %0,[%1]%%asi" : : \
1216 "r" ((int)(value)), "r" ((unsigned long)(loc)), "r" (asi)); \
1217 } \
1218 })
1219 #else
1220 #define stha(loc, asi, value) ({ \
1221 register int _loc_hi, _pstate; \
1222 _loc_hi = (((u_int64_t)loc)>>32); \
1223 if (PHYS_ASI(asi)) { \
1224 __asm __volatile("wr %5,%%g0,%%asi; sllx %4,32,%0; rdpr %%pstate,%1;" \
1225 " or %3,%0,%0; wrpr %1,8,%%pstate; stha %2,[%0]%%asi; wrpr %1,0,%%pstate; " \
1226 " andn %0,0x1f,%1; membar #Sync; stxa %%g0,[%1] %6; membar #Sync" : \
1227 "=&r" (_loc_hi), "=&r" (_pstate) : \
1228 "r" ((int)(value)), "r" ((unsigned long)(loc)), \
1229 "r" (_loc_hi), "r" (asi), "n" (ASI_DCACHE_TAG)); \
1230 } else { \
1231 __asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; " \
1232 " or %2,%0,%0; stha %1,[%0]%%asi" : "=&r" (_loc_hi) : \
1233 "r" ((int)(value)), "r" ((unsigned long)(loc)), \
1234 "r" (_loc_hi), "r" (asi)); \
1235 } \
1236 })
1237 #endif
1238
1239 /* store int to alternate address space */
1240 #ifdef __arch64__
1241 #define sta(loc, asi, value) ({ \
1242 if (PHYS_ASI(asi)) { \
1243 __asm __volatile("wr %3,%%g0,%%asi; sta %1,[%2]%%asi;" \
1244 " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) : \
1245 "r" ((int)(value)), "r" ((unsigned long)(loc)), \
1246 "r" (asi), "n" (ASI_DCACHE_TAG)); \
1247 } else { \
1248 __asm __volatile("wr %2,%%g0,%%asi; sta %0,[%1]%%asi" : : \
1249 "r" ((int)(value)), "r" ((unsigned long)(loc)), "r" (asi)); \
1250 } \
1251 })
1252 #else
1253 #define sta(loc, asi, value) ({ \
1254 register int _loc_hi, _pstate; \
1255 _loc_hi = (((u_int64_t)loc)>>32); \
1256 if (PHYS_ASI(asi)) { \
1257 __asm __volatile("wr %5,%%g0,%%asi; sllx %4,32,%0; rdpr %%pstate,%1;" \
1258 " or %3,%0,%0; wrpr %1,8,%%pstate; sta %2,[%0]%%asi; wrpr %1,0,%%pstate; " \
1259 " andn %0,0x1f,%1; membar #Sync; stxa %%g0,[%1] %6; membar #Sync" : \
1260 "=&r" (_loc_hi), "=&r" (_pstate) : \
1261 "r" ((int)(value)), "r" ((unsigned long)(loc)), \
1262 "r" (_loc_hi), "r" (asi), "n" (ASI_DCACHE_TAG)); \
1263 } else { \
1264 __asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; " \
1265 " or %2,%0,%0; sta %1,[%0]%%asi" : "=&r" (_loc_hi) : \
1266 "r" ((int)(value)), "r" ((unsigned long)(loc)), \
1267 "r" (_loc_hi), "r" (asi)); \
1268 } \
1269 })
1270 #endif
1271
1272 /* store 64-bit int to alternate address space */
1273 #ifdef __arch64__
1274 #define stda(loc, asi, value) ({ \
1275 if (PHYS_ASI(asi)) { \
1276 __asm __volatile("wr %3,%%g0,%%asi; stda %1,[%2]%%asi;" \
1277 " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) : \
1278 "r" ((int)(value)), "r" ((unsigned long)(loc)), \
1279 "r" (asi), "n" (ASI_DCACHE_TAG)); \
1280 } else { \
1281 __asm __volatile("wr %2,%%g0,%%asi; stda %0,[%1]%%asi" : : \
1282 "r" ((long long)(value)), "r" ((unsigned long)(loc)), "r" (asi)); \
1283 } \
1284 })
1285 #else
1286 #define stda(loc, asi, value) ({ \
1287 register int _loc_hi, _pstate; \
1288 _loc_hi = (((u_int64_t)loc)>>32); \
1289 if (PHYS_ASI(asi)) { \
1290 __asm __volatile("wr %5,%%g0,%%asi; sllx %4,32,%0; rdpr %%pstate,%1; " \
1291 " or %3,%0,%0; wrpr %1,8,%%pstate; stda %2,[%0]%%asi; wrpr %1,0,%%pstate;" \
1292 " andn %0,0x1f,%1; membar #Sync; stxa %%g0,[%1] %6; membar #Sync" : \
1293 "=&r" (_loc_hi), "=&r" (_pstate) : \
1294 "r" ((long long)(value)), "r" ((unsigned long)(loc)), \
1295 "r" (_loc_hi), "r" (asi), "n" (ASI_DCACHE_TAG)); \
1296 } else { \
1297 __asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; " \
1298 " or %2,%0,%0; stda %1,[%0]%%asi" : "=&r" (_loc_hi) : \
1299 "r" ((long long)(value)), "r" ((unsigned long)(loc)), \
1300 "r" (_loc_hi), "r" (asi)); \
1301 } \
1302 })
1303 #endif
1304
1305 #ifdef __arch64__
1306 /* native store 64-bit int to alternate address space w/64-bit compiler*/
1307 #define stxa(loc, asi, value) ({ \
1308 if (PHYS_ASI(asi)) { \
1309 __asm __volatile("wr %3,%%g0,%%asi; stxa %1,[%2]%%asi;" \
1310 " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) : \
1311 "r" ((int)(value)), "r" ((unsigned long)(loc)), \
1312 "r" (asi), "n" (ASI_DCACHE_TAG)); \
1313 } else { \
1314 __asm __volatile("wr %2,%%g0,%%asi; stxa %0,[%1]%%asi" : : \
1315 "r" ((unsigned long)(value)), "r" ((unsigned long)(loc)), "r" (asi)); \
1316 })
1317 #else
1318 /* native store 64-bit int to alternate address space w/32-bit compiler*/
1319 #define stxa(loc, asi, value) ({ \
1320 int _stxa_lo, _stxa_hi, _loc_hi; \
1321 _stxa_lo = value; _stxa_hi = ((u_int64_t)value)>>32; \
1322 _loc_hi = (((u_int64_t)(u_long)loc)>>32); \
1323 if (PHYS_ASI(asi)) { \
1324 __asm __volatile("wr %7,%%g0,%%asi; sllx %4,32,%1; sllx %6,32,%0; " \
1325 " or %1,%3,%1; rdpr %%pstate,%3; or %0,%5,%0; wrpr %3,8,%%pstate; " \
1326 " stxa %1,[%0]%%asi; wrpr %3,0,%%pstate; " \
1327 " andn %0,0x1f,%1; membar #Sync; stxa %%g0,[%1] %8; membar #Sync": \
1328 "=&r" (_loc_hi), "=&r" (_stxa_hi), "=&r" ((int)(_stxa_lo)): \
1329 "r" ((int)(_stxa_lo)), "r" ((int)(_stxa_hi)), \
1330 "r" ((unsigned long)(loc)), "r" (_loc_hi), \
1331 "r" (asi), "n" (ASI_DCACHE_TAG)); \
1332 } else { \
1333 __asm __volatile("wr %6,%%g0,%%asi; sllx %3,32,%1; sllx %5,32,%0; " \
1334 " or %1,%2,%1; or %0,%4,%0; stxa %1,[%0]%%asi" : \
1335 "=&r" (_loc_hi), "=&r" (_stxa_hi) : \
1336 "r" ((int)(_stxa_lo)), "r" ((int)(_stxa_hi)), \
1337 "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
1338 } \
1339 })
1340 #endif
1341 #endif
1342
1343 /* flush address from data cache */
1344 #define flush(loc) ({ \
1345 __asm __volatile("flush %0" : : \
1346 "r" ((unsigned long)(loc))); \
1347 })
1348
1349 /* Flush a D$ line */
1350 #if 0
1351 #define flushline(loc) ({ \
1352 stxa(((paddr_t)loc)&(~0x1f), (ASI_DCACHE_TAG), 0); \
1353 membar_sync(); \
1354 })
1355 #else
1356 #define flushline(loc)
1357 #endif
1358
1359 /* The following two enable or disable the dcache in the LSU control register */
1360 #define dcenable() ({ \
1361 int res; \
1362 __asm __volatile("ldxa [%%g0] %1,%0; or %0,%2,%0; stxa %0,[%%g0] %1; membar #Sync" \
1363 : "r" (res) : "n" (ASI_MCCR), "n" (MCCR_DCACHE_EN)); \
1364 })
1365 #define dcdisable() ({ \
1366 int res; \
1367 __asm __volatile("ldxa [%%g0] %1,%0; andn %0,%2,%0; stxa %0,[%%g0] %1; membar #Sync" \
1368 : "r" (res) : "n" (ASI_MCCR), "n" (MCCR_DCACHE_EN)); \
1369 })
1370
1371 /*
1372 * SPARC V9 memory barrier instructions.
1373 */
1374 /* Make all stores complete before next store */
1375 #define membar_storestore() __asm __volatile("membar #StoreStore" : :)
1376 /* Make all loads complete before next store */
1377 #define membar_loadstore() __asm __volatile("membar #LoadStore" : :)
1378 /* Make all stores complete before next load */
1379 #define membar_storeload() __asm __volatile("membar #StoreLoad" : :)
1380 /* Make all loads complete before next load */
1381 #define membar_loadload() __asm __volatile("membar #LoadLoad" : :)
1382 /* Complete all outstanding memory operations and exceptions */
1383 #define membar_sync() __asm __volatile("membar #Sync" : :)
1384 /* Complete all outstanding memory operations */
1385 #define membar_memissue() __asm __volatile("membar #MemIssue" : :)
1386 /* Complete all outstanding stores before any new loads */
1387 #define membar_lookaside() __asm __volatile("membar #Lookaside" : :)
1388
1389 #ifdef __arch64__
1390 /* read 64-bit %tick register */
1391 #define tick() ({ \
1392 register u_long _tick_tmp; \
1393 __asm __volatile("rdpr %%tick, %0" : "=r" (_tick_tmp) :); \
1394 _tick_tmp; \
1395 })
1396 #else
1397 /* read 64-bit %tick register on 32-bit system */
1398 #define tick() ({ \
1399 register int _tick_hi = 0, _tick_lo = 0; \
1400 __asm __volatile("rdpr %%tick, %1; srlx %0,32,%2; srl %0,0,%0 " \
1401 : "=r" (_tick_hi), "=r" (_tick_lo) : ); \
1402 (((u_int64_t)_tick_hi)<<32)|((u_int64_t)_tick_lo); \
1403 })
1404 #endif
1405
1406 extern void next_tick __P((long));
1407 #endif
1408