Home | History | Annotate | Line # | Download | only in include
ctlreg.h revision 1.18.2.1
      1  1.18.2.1  mrg /*	$NetBSD: ctlreg.h,v 1.18.2.1 2000/07/18 16:23:22 mrg Exp $ */
      2       1.1  eeh 
      3       1.1  eeh /*
      4      1.11  eeh  * Copyright (c) 1996-1999 Eduardo Horvath
      5       1.1  eeh  *
      6       1.1  eeh  * Redistribution and use in source and binary forms, with or without
      7       1.1  eeh  * modification, are permitted provided that the following conditions
      8       1.1  eeh  * are met:
      9       1.1  eeh  * 1. Redistributions of source code must retain the above copyright
     10       1.1  eeh  *    notice, this list of conditions and the following disclaimer.
     11      1.11  eeh  *
     12      1.11  eeh  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR  ``AS IS'' AND
     13       1.1  eeh  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
     14       1.1  eeh  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
     15      1.11  eeh  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR  BE LIABLE
     16       1.1  eeh  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
     17       1.1  eeh  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
     18       1.1  eeh  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
     19       1.1  eeh  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
     20       1.1  eeh  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
     21       1.1  eeh  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
     22       1.1  eeh  * SUCH DAMAGE.
     23       1.1  eeh  *
     24       1.1  eeh  */
     25       1.1  eeh 
     26       1.1  eeh /*
     27       1.1  eeh  * Sun 4u control registers. (includes address space definitions
     28       1.1  eeh  * and some registers in control space).
     29       1.1  eeh  */
     30       1.1  eeh 
     31       1.1  eeh /*
     32       1.1  eeh  * The Alternate address spaces.
     33       1.1  eeh  *
     34       1.1  eeh  * 0x00-0x7f are privileged
     35       1.1  eeh  * 0x80-0xff can be used by users
     36       1.1  eeh  */
     37       1.1  eeh 
     38       1.1  eeh #define ASI_LITTLE	0x08		/* This bit should make an ASI little endian */
     39       1.1  eeh 
     40       1.1  eeh #define ASI_NUCLEUS			0x04	/* [4u] kernel address space */
     41       1.1  eeh #define ASI_NUCLEUS_LITTLE		0x0c	/* [4u] kernel address space, little endian */
     42       1.1  eeh 
     43       1.1  eeh #define ASI_AS_IF_USER_PRIMARY		0x10	/* [4u] primary user address space */
     44       1.1  eeh #define ASI_AS_IF_USER_SECONDARY	0x11	/* [4u] secondary user address space */
     45       1.1  eeh 
     46       1.1  eeh #define ASI_PHYS_CACHED			0x14	/* [4u] MMU bypass to main memory */
     47       1.1  eeh #define ASI_PHYS_NON_CACHED		0x15	/* [4u] MMU bypass to I/O location */
     48       1.1  eeh 
     49       1.1  eeh #define ASI_AS_IF_USER_PRIMARY_LITTLE	0x18	/* [4u] primary user address space, little endian  */
     50       1.1  eeh #define ASI_AS_IF_USER_SECONDARY_LITTIE	0x19	/* [4u] secondary user address space, little endian  */
     51       1.1  eeh 
     52       1.1  eeh #define ASI_PHYS_CACHED_LITTLE		0x1c	/* [4u] MMU bypass to main memory, little endian */
     53       1.1  eeh #define ASI_PHYS_NON_CACHED_LITTLE	0x1d	/* [4u] MMU bypass to I/O location, little endian */
     54       1.1  eeh 
     55       1.1  eeh #define ASI_NUCLEUS_QUAD_LDD		0x24	/* [4u] use w/LDDA to load 128-bit item */
     56       1.1  eeh #define ASI_NUCLEUS_QUAD_LDD_LITTLE	0x2c	/* [4u] use w/LDDA to load 128-bit item, little endian */
     57       1.1  eeh 
     58       1.1  eeh #define ASI_FLUSH_D_PAGE_PRIMARY	0x38	/* [4u] flush D-cache page using primary context */
     59       1.1  eeh #define ASI_FLUSH_D_PAGE_SECONDARY	0x39	/* [4u] flush D-cache page using secondary context */
     60       1.1  eeh #define ASI_FLUSH_D_CTX_PRIMARY		0x3a	/* [4u] flush D-cache context using primary context */
     61       1.1  eeh #define ASI_FLUSH_D_CTX_SECONDARY	0x3b	/* [4u] flush D-cache context using secondary context */
     62       1.6  eeh 
     63       1.6  eeh #define ASI_LSU_CONTROL_REGISTER	0x45	/* [4u] load/store unit control register */
     64       1.6  eeh 
     65       1.1  eeh #define ASI_DCACHE_DATA			0x46	/* [4u] diagnostic access to D-cache data RAM */
     66       1.1  eeh #define ASI_DCACHE_TAG			0x47	/* [4u] diagnostic access to D-cache tag RAM */
     67       1.1  eeh 
     68       1.1  eeh #define ASI_INTR_DISPATCH_STATUS	0x48	/* [4u] interrupt dispatch status register */
     69       1.1  eeh #define ASI_INTR_RECEIVE		0x49	/* [4u] interrupt receive status register */
     70       1.1  eeh #define ASI_MID_REG			0x4a	/* [4u] hardware config and MID */
     71       1.1  eeh #define ASI_ERROR_EN_REG		0x4b	/* [4u] asynchronous error enables */
     72       1.1  eeh #define ASI_AFSR			0x4c	/* [4u] asynchronous fault status register */
     73       1.1  eeh #define ASI_AFAR			0x4d	/* [4u] asynchronous fault address register */
     74       1.1  eeh 
     75       1.1  eeh #define ASI_ICACHE_DATA			0x66	/* [4u] diagnostic access to D-cache data RAM */
     76       1.1  eeh #define ASI_ICACHE_TAG			0x67	/* [4u] diagnostic access to D-cache tag RAM */
     77       1.1  eeh #define ASI_FLUSH_I_PAGE_PRIMARY	0x68	/* [4u] flush D-cache page using primary context */
     78       1.1  eeh #define ASI_FLUSH_I_PAGE_SECONDARY	0x69	/* [4u] flush D-cache page using secondary context */
     79       1.1  eeh #define ASI_FLUSH_I_CTX_PRIMARY		0x6a	/* [4u] flush D-cache context using primary context */
     80       1.1  eeh #define ASI_FLUSH_I_CTX_SECONDARY	0x6b	/* [4u] flush D-cache context using secondary context */
     81       1.1  eeh 
     82       1.1  eeh #define ASI_BLOCK_AS_IF_USER_PRIMARY	0x70	/* [4u] primary user address space, block loads/stores */
     83       1.1  eeh #define ASI_BLOCK_AS_IF_USER_SECONDARY	0x71	/* [4u] secondary user address space, block loads/stores */
     84       1.1  eeh 
     85       1.1  eeh #define ASI_ECACHE_DIAG			0x76	/* [4u] diag access to E-cache tag and data */
     86       1.1  eeh #define ASI_DATAPATH_ERR_REG_WRITE	0x77	/* [4u] ASI is reused */
     87       1.1  eeh 
     88       1.1  eeh #define ASI_BLOCK_AS_IF_USER_PRIMARY_LITTLE	0x78	/* [4u] primary user address space, block loads/stores */
     89       1.1  eeh #define ASI_BLOCK_AS_IF_USER_SECONDARY_LITTLE	0x79	/* [4u] secondary user address space, block loads/stores */
     90       1.1  eeh 
     91       1.1  eeh #define ASI_INTERRUPT_RECEIVE_DATA	0x7f	/* [4u] interrupt receive data registers {0,1,2} */
     92       1.1  eeh #define ASI_DATAPATH_ERR_REG_READ	0x7f	/* [4u] read access to datapath error registers (ASI reused) */
     93       1.1  eeh 
     94       1.1  eeh #define ASI_PRIMARY			0x80	/* [4u] primary address space */
     95       1.1  eeh #define ASI_SECONDARY			0x81	/* [4u] secondary address space */
     96       1.1  eeh #define ASI_PRIMARY_NO_FAULT		0x82	/* [4u] primary address space, no fault */
     97       1.1  eeh #define ASI_SECONDARY_NO_FAULT		0x83	/* [4u] secondary address space, no fault */
     98       1.1  eeh 
     99       1.1  eeh #define ASI_PRIMARY_LITTLE		0x88	/* [4u] primary address space, little endian */
    100       1.1  eeh #define ASI_SECONDARY_LITTLE		0x89	/* [4u] secondary address space, little endian */
    101       1.1  eeh #define ASI_PRIMARY_NO_FAULT_LITTLE	0x8a	/* [4u] primary address space, no fault, little endian */
    102       1.1  eeh #define ASI_SECONDARY_NO_FAULT_LITTLE	0x8b	/* [4u] secondary address space, no fault, little endian */
    103       1.1  eeh 
    104       1.1  eeh #define ASI_PST8_PRIMARY		0xc0	/* [VIS] Eight 8-bit partial store, primary */
    105       1.1  eeh #define ASI_PST8_SECONDARY		0xc1	/* [VIS] Eight 8-bit partial store, secondary */
    106       1.1  eeh #define ASI_PST16_PRIMARY		0xc2	/* [VIS] Four 16-bit partial store, primary */
    107       1.1  eeh #define ASI_PST16_SECONDARY		0xc3	/* [VIS] Fout 16-bit partial store, secondary */
    108       1.1  eeh #define ASI_PST32_PRIMARY		0xc4	/* [VIS] Two 32-bit partial store, primary */
    109       1.1  eeh #define ASI_PST32_SECONDARY		0xc5	/* [VIS] Two 32-bit partial store, secondary */
    110       1.1  eeh 
    111       1.1  eeh #define ASI_PST8_PRIMARY_LITTLE		0xc8	/* [VIS] Eight 8-bit partial store, primary, little endian */
    112       1.1  eeh #define ASI_PST8_SECONDARY_LITTLE	0xc9	/* [VIS] Eight 8-bit partial store, secondary, little endian */
    113       1.1  eeh #define ASI_PST16_PRIMARY_LITTLE	0xca	/* [VIS] Four 16-bit partial store, primary, little endian */
    114       1.1  eeh #define ASI_PST16_SECONDARY_LITTLE	0xcb	/* [VIS] Fout 16-bit partial store, secondary, little endian */
    115       1.1  eeh #define ASI_PST32_PRIMARY_LITTLE	0xcc	/* [VIS] Two 32-bit partial store, primary, little endian */
    116       1.1  eeh #define ASI_PST32_SECONDARY_LITTLE	0xcd	/* [VIS] Two 32-bit partial store, secondary, little endian */
    117       1.1  eeh 
    118       1.1  eeh #define ASI_FL8_PRIMARY			0xd0	/* [VIS] One 8-bit load/store floating, primary */
    119       1.1  eeh #define ASI_FL8_SECONDARY		0xd1	/* [VIS] One 8-bit load/store floating, secondary */
    120       1.1  eeh #define ASI_FL16_PRIMARY		0xd2	/* [VIS] One 16-bit load/store floating, primary */
    121       1.1  eeh #define ASI_FL16_SECONDARY		0xd3	/* [VIS] One 16-bit load/store floating, secondary */
    122       1.1  eeh 
    123       1.1  eeh #define ASI_FL8_PRIMARY_LITTLE		0xd8	/* [VIS] One 8-bit load/store floating, primary, little endian */
    124       1.1  eeh #define ASI_FL8_SECONDARY_LITTLE	0xd9	/* [VIS] One 8-bit load/store floating, secondary, little endian */
    125       1.1  eeh #define ASI_FL16_PRIMARY_LITTLE		0xda	/* [VIS] One 16-bit load/store floating, primary, little endian */
    126       1.1  eeh #define ASI_FL16_SECONDARY_LITTLE	0xdb	/* [VIS] One 16-bit load/store floating, secondary, little endian */
    127       1.1  eeh 
    128       1.1  eeh #define ASI_BLOCK_COMMIT_PRIMARY	0xe0	/* [4u] block store with commit, primary */
    129       1.1  eeh #define ASI_BLOCK_COMMIT_SECONDARY	0xe1	/* [4u] block store with commit, secondary */
    130       1.1  eeh #define ASI_BLOCK_PRIMARY		0xf0	/* [4u] block load/store, primary */
    131       1.1  eeh #define ASI_BLOCK_SECONDARY		0xf1	/* [4u] block load/store, secondary */
    132       1.1  eeh #define ASI_BLOCK_PRIMARY_LITTLE	0xf8	/* [4u] block load/store, primary, little endian */
    133       1.1  eeh #define ASI_BLOCK_SECONDARY_LITTLE	0xf9	/* [4u] block load/store, secondary, little endian */
    134       1.1  eeh 
    135       1.1  eeh 
    136       1.1  eeh /*
    137       1.1  eeh  * These are the shorter names used by Solaris
    138       1.1  eeh  */
    139       1.1  eeh 
    140       1.1  eeh #define ASI_N		ASI_NUCLEUS
    141       1.1  eeh #define ASI_NL		ASI_NUCLEUS_LITTLE
    142       1.1  eeh #define ASI_AIUP	ASI_AS_IF_USER_PRIMARY
    143       1.1  eeh #define ASI_AIUS	ASI_AS_IF_USER_SECONDARY
    144       1.1  eeh #define ASI_AIUPL	ASI_AS_IF_USER_PRIMARY_LITTLE
    145       1.1  eeh #define ASI_AIUSL	ASI_AS_IF_USER_SECONDARY_LITTLE
    146       1.1  eeh #define ASI_P		ASI_PRIMARY
    147       1.1  eeh #define ASI_S		ASI_SECONDARY
    148       1.1  eeh #define ASI_PNF		ASI_PRIMARY_NO_FAULT
    149       1.1  eeh #define ASI_SNF		ASI_SECONDARY_NO_FAULT
    150       1.1  eeh #define ASI_PL		ASI_PRIMARY_LITTLE
    151       1.1  eeh #define ASI_SL		ASI_SECONDARY_LITTLE
    152       1.1  eeh #define ASI_PNFL	ASI_PRIMARY_NO_FAULT_LITTLE
    153       1.1  eeh #define ASI_SNFL	ASI_SECONDARY_NO_FAULT_LITTLE
    154       1.1  eeh #define ASI_BLK_AIUP	ASI_BLOCK_AS_IF_USER_PRIMARY
    155       1.1  eeh #define ASI_BLK_AIUPL	ASI_BLOCK_AS_IF_USER_PRIMARY_LITTLE
    156       1.1  eeh #define ASI_BLK_AIUS	ASI_BLOCK_AS_IF_USER_SECONDARY
    157       1.1  eeh #define ASI_BLK_AIUSL	ASI_BLOCK_AS_IF_USER_SECONDARY_LITTLE
    158       1.1  eeh #define ASI_BLK_COMMIT_P		ASI_BLOCK_COMMIT_PRIMARY
    159       1.1  eeh #define ASI_BLK_COMMIT_PRIMARY		ASI_BLOCK_COMMIT_PRIMARY
    160       1.1  eeh #define ASI_BLK_COMMIT_S		ASI_BLOCK_COMMIT_SECONDARY
    161       1.1  eeh #define ASI_BLK_COMMIT_SECONDARY	ASI_BLOCK_COMMIT_SECONDARY
    162       1.1  eeh #define ASI_BLK_P			ASI_BLOCK_PRIMARY
    163       1.1  eeh #define ASI_BLK_PL			ASI_BLOCK_PRIMARY_LITTLE
    164       1.1  eeh #define ASI_BLK_S			ASI_BLOCK_SECONDARY
    165       1.1  eeh #define ASI_BLK_SL			ASI_BLOCK_SECONDARY_LITTLE
    166       1.1  eeh 
    167      1.14  eeh #define PHYS_ASI(x)	(((x) | 0x09) == 0x1d)
    168      1.14  eeh #define LITTLE_ASI(x)	((x) & ASI_LITTLE)
    169      1.14  eeh 
    170       1.1  eeh /*
    171       1.1  eeh  * The following are 4u control registers
    172       1.1  eeh  */
    173      1.18  eeh 
    174      1.18  eeh 
    175      1.18  eeh /* Get the CPU's UPAID */
    176      1.18  eeh #define	UPA_CR_MID(x)	(((x)>>17)&0x1f)
    177      1.18  eeh #define	CPU_UPAID	UPA_CR_MID(ldxa(0, ASI_MID_REG))
    178       1.1  eeh 
    179       1.1  eeh /*
    180       1.1  eeh  * [4u] MMU and Cache Control Register (MCCR)
    181       1.1  eeh  * use ASI = 0x45
    182       1.1  eeh  */
    183       1.6  eeh #define ASI_MCCR	ASI_LSU_CONTROL_REGISTER
    184       1.1  eeh #define MCCR		0x00
    185       1.1  eeh 
    186       1.1  eeh /* MCCR Bits and their meanings */
    187       1.1  eeh #define MCCR_DMMU_EN	0x08
    188       1.1  eeh #define MCCR_IMMU_EN	0x04
    189       1.1  eeh #define MCCR_DCACHE_EN	0x02
    190       1.1  eeh #define MCCR_ICACHE_EN	0x01
    191       1.1  eeh 
    192       1.1  eeh 
    193       1.1  eeh /*
    194       1.1  eeh  * MMU control registers
    195       1.1  eeh  */
    196       1.1  eeh 
    197       1.1  eeh /* Choose an MMU */
    198       1.1  eeh #define ASI_DMMU		0x58
    199       1.1  eeh #define ASI_IMMU		0x50
    200       1.1  eeh 
    201       1.1  eeh /* Other assorted MMU ASIs */
    202       1.1  eeh #define ASI_IMMU_8KPTR		0x51
    203       1.1  eeh #define ASI_IMMU_64KPTR		0x52
    204       1.1  eeh #define ASI_IMMU_DATA_IN	0x54
    205       1.1  eeh #define ASI_IMMU_TLB_DATA	0x55
    206       1.1  eeh #define ASI_IMMU_TLB_TAG	0x56
    207       1.1  eeh #define ASI_DMMU_8KPTR		0x59
    208       1.1  eeh #define ASI_DMMU_64KPTR		0x5a
    209       1.1  eeh #define ASI_DMMU_DATA_IN	0x5c
    210       1.1  eeh #define ASI_DMMU_TLB_DATA	0x5d
    211       1.1  eeh #define ASI_DMMU_TLB_TAG	0x5e
    212       1.1  eeh 
    213       1.1  eeh /*
    214       1.1  eeh  * The following are the control registers
    215       1.1  eeh  * They work on both MMUs unless noted.
    216       1.1  eeh  *
    217       1.1  eeh  * Register contents are defined later on individual registers.
    218       1.1  eeh  */
    219       1.1  eeh #define TSB_TAG_TARGET		0x0
    220       1.1  eeh #define TLB_DATA_IN		0x0
    221       1.1  eeh #define CTX_PRIMARY		0x08	/* primary context -- DMMU only */
    222       1.1  eeh #define CTX_SECONDARY		0x10	/* secondary context -- DMMU only */
    223       1.1  eeh #define SFSR			0x18
    224       1.1  eeh #define SFAR			0x20	/* fault address -- DMMU only */
    225       1.1  eeh #define TSB			0x28
    226       1.1  eeh #define TLB_TAG_ACCESS		0x30
    227       1.1  eeh #define VIRTUAL_WATCHPOINT	0x38
    228       1.1  eeh #define PHYSICAL_WATCHPOINT	0x40
    229       1.1  eeh 
    230       1.1  eeh /* Tag Target bits */
    231       1.1  eeh #define TAG_TARGET_VA_MASK	0x03ffffffffffffffffLL
    232       1.1  eeh #define TAG_TARGET_VA(x)	(((x)<<22)&TAG_TARGET_VA_MASK)
    233       1.1  eeh #define TAG_TARGET_CONTEXT(x)	((x)>>48)
    234       1.1  eeh #define TAG_TARGET(c,v)		((((uint64_t)c)<<48)|(((uint64_t)v)&TAG_TARGET_VA_MASK))
    235       1.1  eeh 
    236       1.1  eeh /* SFSR bits for both D_SFSR and I_SFSR */
    237       1.1  eeh #define SFSR_ASI(x)		((x)>>16)
    238       1.1  eeh #define SFSR_FT_VA_OOR_2	0x02000 /* IMMU: jumpl or return to unsupportd VA */
    239       1.1  eeh #define SFSR_FT_VA_OOR_1	0x01000 /* fault at unsupported VA */
    240       1.1  eeh #define SFSR_FT_NFO		0x00800	/* DMMU: Access to page marked NFO */
    241       1.1  eeh #define SFSR_ILL_ASI		0x00400	/* DMMU: Illegal (unsupported) ASI */
    242       1.1  eeh #define SFSR_FT_IO_ATOMIC	0x00200	/* DMMU: Atomic access to noncacheable page */
    243       1.1  eeh #define SFSR_FT_ILL_NF		0x00100	/* DMMU: NF load or flush to page marked E (has side effects) */
    244       1.1  eeh #define SFSR_FT_PRIV		0x00080	/* Privilege violation */
    245       1.1  eeh #define SFSR_FT_E		0x00040	/* DMUU: value of E bit associated address */
    246       1.1  eeh #define SFSR_CTXT(x)		(((x)>>4)&0x3)
    247       1.1  eeh #define SFSR_CTXT_IS_PRIM(x)	(SFSR_CTXT(x)==0x00)
    248       1.1  eeh #define SFSR_CTXT_IS_SECOND(x)	(SFSR_CTXT(x)==0x01)
    249       1.1  eeh #define SFSR_CTXT_IS_NUCLEUS(x)	(SFSR_CTXT(x)==0x02)
    250       1.1  eeh #define SFSR_PRIV		0x00008	/* value of PSTATE.PRIV for faulting access */
    251       1.1  eeh #define SFSR_W			0x00004 /* DMMU: attempted write */
    252       1.1  eeh #define SFSR_OW			0x00002 /* Overwrite; prev vault was still valid */
    253       1.1  eeh #define SFSR_FV			0x00001	/* Fault is valid */
    254       1.1  eeh #define SFSR_FT	(SFSR_FT_VA_OOR_2|SFSR_FT_VA_OOR_1|SFSR_FT_NFO|SFSR_ILL_ASI|SFSR_FT_IO_ATOMIC|SFSR_FT_ILL_NF|SFSR_FT_PRIV)
    255       1.1  eeh 
    256       1.3  eeh #if 0
    257       1.3  eeh /* Old bits */
    258       1.1  eeh #define SFSR_BITS "\40\16VAT\15VAD\14NFO\13ASI\12A\11NF\10PRIV\7E\6NUCLEUS\5SECONDCTX\4PRIV\3W\2OW\1FV"
    259       1.3  eeh #else
    260       1.3  eeh /* New bits */
    261       1.3  eeh #define SFSR_BITS "\177\20" \
    262       1.3  eeh 	"f\20\30ASI\0" "b\16VAT\0" "b\15VAD\0" "b\14NFO\0" "b\13ASI\0" "b\12A\0" "b\11NF\0" "b\10PRIV\0" \
    263       1.3  eeh 	 "b\7E\0" "b\6NUCLEUS\0" "b\5SECONDCTX\0" "b\4PRIV\0" "b\3W\0" "b\2OW\0" "b\1FV\0"
    264       1.3  eeh #endif
    265       1.3  eeh 
    266       1.3  eeh /* ASFR bits */
    267       1.3  eeh #define ASFR_ME			0x100000000LL
    268       1.3  eeh #define ASFR_PRIV		0x080000000LL
    269       1.3  eeh #define ASFR_ISAP		0x040000000LL
    270       1.3  eeh #define ASFR_ETP		0x020000000LL
    271       1.3  eeh #define ASFR_IVUE		0x010000000LL
    272       1.3  eeh #define ASFR_TO			0x008000000LL
    273       1.3  eeh #define ASFR_BERR		0x004000000LL
    274       1.3  eeh #define ASFR_LDP		0x002000000LL
    275       1.3  eeh #define ASFR_CP			0x001000000LL
    276       1.3  eeh #define ASFR_WP			0x000800000LL
    277       1.3  eeh #define ASFR_EDP		0x000400000LL
    278       1.3  eeh #define ASFR_UE			0x000200000LL
    279       1.3  eeh #define ASFR_CE			0x000100000LL
    280       1.3  eeh #define ASFR_ETS		0x0000f0000LL
    281       1.3  eeh #define ASFT_P_SYND		0x00000ffffLL
    282       1.3  eeh 
    283       1.3  eeh #define AFSR_BITS "\177\20" \
    284       1.3  eeh         "b\40ME\0"      "b\37PRIV\0"    "b\36ISAP\0"    "b\35ETP\0" \
    285       1.3  eeh         "b\34IVUE\0"    "b\33TO\0"      "b\32BERR\0"    "b\31LDP\0" \
    286       1.3  eeh         "b\30CP\0"      "b\27WP\0"      "b\26EDP\0"     "b\25UE\0" \
    287       1.3  eeh         "b\24CE\0"      "f\20\4ETS\0"   "f\0\20P_SYND\0"
    288       1.3  eeh 
    289       1.1  eeh /*
    290       1.1  eeh  * Here's the spitfire TSB control register bits.
    291       1.1  eeh  *
    292       1.1  eeh  * Each TSB entry is 16-bytes wide.  The TSB must be size aligned
    293       1.1  eeh  */
    294       1.1  eeh #define TSB_SIZE_512		0x0	/* 8kB, etc. */
    295       1.1  eeh #define TSB_SIZE_1K		0x01
    296       1.1  eeh #define TSB_SIZE_2K		0x02
    297       1.1  eeh #define TSB_SIZE_4K		0x03
    298       1.1  eeh #define	TSB_SIZE_8K		0x04
    299       1.1  eeh #define TSB_SIZE_16K		0x05
    300       1.1  eeh #define TSB_SIZE_32K		0x06
    301       1.1  eeh #define TSB_SIZE_64K		0x07
    302       1.1  eeh #define TSB_SPLIT		0x1000
    303       1.1  eeh #define TSB_BASE		0xffffffffffffe000
    304       1.1  eeh 
    305       1.1  eeh /*  TLB Tag Access bits */
    306       1.1  eeh #define TLB_TAG_ACCESS_VA	0xffffffffffffe000
    307       1.1  eeh #define TLB_TAG_ACCESS_CTX	0x0000000000001fff
    308       1.1  eeh 
    309       1.1  eeh /*
    310       1.1  eeh  * TLB demap registers.  TTEs are defined in v9pte.h
    311       1.1  eeh  *
    312       1.1  eeh  * Use the address space to select between IMMU and DMMU.
    313       1.1  eeh  * The address of the register selects which context register
    314       1.1  eeh  * to read the ASI from.
    315       1.1  eeh  *
    316       1.1  eeh  * The data stored in the register is interpreted as the VA to
    317       1.1  eeh  * use.  The DEMAP_CTX_<> registers ignore the address and demap the
    318       1.1  eeh  * entire ASI.
    319       1.1  eeh  *
    320       1.1  eeh  */
    321       1.1  eeh #define ASI_IMMU_DEMAP			0x57	/* [4u] IMMU TLB demap */
    322       1.1  eeh #define ASI_DMMU_DEMAP			0x5f	/* [4u] IMMU TLB demap */
    323       1.1  eeh 
    324       1.1  eeh #define DEMAP_PAGE_NUCLEUS		((0x02)<<4)	/* Demap page from kernel AS */
    325       1.1  eeh #define DEMAP_PAGE_PRIMARY		((0x00)<<4)	/* Demap a page from primary CTXT */
    326       1.1  eeh #define DEMAP_PAGE_SECONDARY		((0x01)<<4)	/* Demap page from secondary CTXT (DMMU only) */
    327       1.1  eeh #define DEMAP_CTX_NUCLEUS		((0x06)<<4)	/* Demap all of kernel CTXT */
    328       1.1  eeh #define DEMAP_CTX_PRIMARY		((0x04)<<4)	/* Demap all of primary CTXT */
    329       1.1  eeh #define DEMAP_CTX_SECONDARY		((0x05)<<4)	/* Demap all of secondary CTXT */
    330       1.1  eeh 
    331       1.1  eeh /*
    332       1.1  eeh  * Interrupt registers.  This really gets hairy.
    333       1.1  eeh  */
    334       1.1  eeh 
    335       1.1  eeh /* IRSR -- Interrupt Receive Status Ragister */
    336       1.1  eeh #define ASI_IRSR	0x49
    337       1.1  eeh #define IRSR		0x00
    338      1.13  mrg #define IRSR_BUSY	0x020
    339      1.13  mrg #define IRSR_MID(x)	(x&0x1f)
    340       1.1  eeh 
    341       1.1  eeh /* IRDR -- Interrupt Receive Data Registers */
    342       1.1  eeh #define ASI_IRDR	0x7f
    343       1.1  eeh #define IRDR_0H		0x40
    344       1.1  eeh #define IRDR_0L		0x48	/* unimplemented */
    345       1.1  eeh #define IRDR_1H		0x50
    346       1.1  eeh #define IRDR_1L		0x58	/* unimplemented */
    347       1.1  eeh #define IRDR_2H		0x60
    348       1.1  eeh #define IRDR_2L		0x68	/* unimplemented */
    349       1.1  eeh #define IRDR_3H		0x70	/* unimplemented */
    350       1.1  eeh #define IRDR_3L		0x78	/* unimplemented */
    351       1.1  eeh 
    352       1.1  eeh /* SOFTINT ASRs */
    353       1.1  eeh #define SET_SOFTINT	%asr20	/* Sets these bits */
    354       1.1  eeh #define CLEAR_SOFTINT	%asr21	/* Clears these bits */
    355       1.1  eeh #define SOFTINT		%asr22	/* Reads the register */
    356       1.9  eeh #define TICK_CMPR	%asr23
    357       1.1  eeh 
    358       1.1  eeh #define	TICK_INT	0x01	/* level-14 clock tick */
    359       1.1  eeh #define SOFTINT1	(0x1<<1)
    360       1.1  eeh #define SOFTINT2	(0x1<<2)
    361       1.1  eeh #define SOFTINT3	(0x1<<3)
    362       1.1  eeh #define SOFTINT4	(0x1<<4)
    363       1.1  eeh #define SOFTINT5	(0x1<<5)
    364       1.1  eeh #define SOFTINT6	(0x1<<6)
    365       1.1  eeh #define SOFTINT7	(0x1<<7)
    366       1.1  eeh #define SOFTINT8	(0x1<<8)
    367       1.1  eeh #define SOFTINT9	(0x1<<9)
    368       1.1  eeh #define SOFTINT10	(0x1<<10)
    369       1.1  eeh #define SOFTINT11	(0x1<<11)
    370       1.1  eeh #define SOFTINT12	(0x1<<12)
    371       1.1  eeh #define SOFTINT13	(0x1<<13)
    372       1.1  eeh #define SOFTINT14	(0x1<<14)
    373       1.1  eeh #define SOFTINT15	(0x1<<15)
    374       1.1  eeh 
    375       1.1  eeh /* Interrupt Dispatch -- usually reserved for cross-calls */
    376       1.1  eeh #define ASR_IDSR	0x48 /* Interrupt dispatch status reg */
    377       1.1  eeh #define IDSR		0x00
    378       1.1  eeh #define IDSR_NACK	0x02
    379       1.1  eeh #define IDSR_BUSY	0x01
    380       1.1  eeh 
    381       1.1  eeh #define ASI_INTERRUPT_DISPATCH		0x77	/* [4u] spitfire interrupt dispatch regs */
    382       1.1  eeh #define IDCR(x)		(((x)<<14)&0x70)	/* Store anything to this address to dispatch crosscall to CPU (x) */
    383       1.1  eeh #define IDDR_0H		0x40			/* Store data to send in these regs */
    384       1.1  eeh #define IDDR_0L		0x48	/* unimplemented */
    385       1.1  eeh #define IDDR_1H		0x50
    386       1.1  eeh #define IDDR_1L		0x58	/* unimplemented */
    387       1.1  eeh #define IDDR_2H		0x60
    388       1.1  eeh #define IDDR_2L		0x68	/* unimplemented */
    389       1.1  eeh #define IDDR_3H		0x70	/* unimplemented */
    390       1.1  eeh #define IDDR_3L		0x78	/* unimplemented */
    391       1.1  eeh 
    392       1.1  eeh /*
    393       1.1  eeh  * Error registers
    394       1.1  eeh  */
    395       1.1  eeh 
    396       1.1  eeh /* Since we won't try to fix async errs, we don't care about the bits in the regs */
    397       1.1  eeh #define ASI_AFAR	0x4d	/* Asynchronous fault address register */
    398       1.1  eeh #define AFAR		0x00
    399       1.1  eeh #define ASI_AFSR	0x4c	/* Asynchronous fault status register */
    400       1.1  eeh #define AFSR		0x00
    401       1.1  eeh 
    402       1.1  eeh #define ASI_P_EER	0x4b	/* Error enable register */
    403       1.1  eeh #define P_EER		0x00
    404       1.1  eeh #define P_EER_ISAPEN	0x04	/* Enable fatal on ISAP */
    405       1.1  eeh #define P_EER_NCEEN	0x02	/* Enable trap on uncorrectable errs */
    406       1.1  eeh #define P_EER_CEEN	0x01	/* Enable trap on correctable errs */
    407       1.1  eeh 
    408       1.1  eeh #define ASI_DATAPATH_READ	0x7f /* Read the regs */
    409       1.1  eeh #define ASI_DATAPATH_WRITE	0x77 /* Write to the regs */
    410       1.1  eeh #define P_DPER_0	0x00	/* Datapath err reg 0 */
    411       1.1  eeh #define P_DPER_1	0x18	/* Datapath err reg 1 */
    412       1.1  eeh #define P_DCR_0		0x20	/* Datapath control reg 0 */
    413       1.1  eeh #define P_DCR_1		0x38	/* Datapath control reg 0 */
    414       1.1  eeh 
    415       1.2  eeh 
    416       1.2  eeh /* From sparc64/asm.h which I think I'll deprecate since it makes bus.h a pain. */
    417       1.2  eeh 
    418  1.18.2.1  mrg #ifndef _LOCORE
    419       1.1  eeh /*
    420       1.2  eeh  * GCC __asm constructs for doing assembly stuff.
    421       1.1  eeh  */
    422       1.2  eeh 
    423       1.2  eeh /*
    424       1.2  eeh  * ``Routines'' to load and store from/to alternate address space.
    425       1.2  eeh  * The location can be a variable, the asi value (address space indicator)
    426       1.2  eeh  * must be a constant.
    427       1.1  eeh  *
    428       1.2  eeh  * N.B.: You can put as many special functions here as you like, since
    429       1.2  eeh  * they cost no kernel space or time if they are not used.
    430       1.1  eeh  *
    431       1.2  eeh  * These were static inline functions, but gcc screws up the constraints
    432       1.2  eeh  * on the address space identifiers (the "n"umeric value part) because
    433       1.2  eeh  * it inlines too late, so we have to use the funny valued-macro syntax.
    434       1.2  eeh  */
    435       1.6  eeh 
    436  1.18.2.1  mrg /*
    437  1.18.2.1  mrg  * Apparently the definition of bypass ASIs is that they all use the
    438  1.18.2.1  mrg  * D$ so we need to flush the D$ to make sure we don't get data pollution.
    439  1.18.2.1  mrg  */
    440  1.18.2.1  mrg 
    441  1.18.2.1  mrg static __inline__ u_char lduba __P((paddr_t loc, int asi));
    442  1.18.2.1  mrg static __inline__ u_short lduha __P((paddr_t loc, int asi));
    443  1.18.2.1  mrg static __inline__ u_int lda __P((paddr_t loc, int asi));
    444  1.18.2.1  mrg static __inline__ int ldswa __P((paddr_t loc, int asi));
    445  1.18.2.1  mrg static __inline__ u_int64_t ldxa __P((paddr_t loc, int asi));
    446  1.18.2.1  mrg static __inline__ u_int64_t ldda __P((paddr_t loc, int asi));
    447  1.18.2.1  mrg 
    448  1.18.2.1  mrg static __inline__ void stba __P((paddr_t loc, int asi, u_char value));
    449  1.18.2.1  mrg static __inline__ void stha __P((paddr_t loc, int asi, u_short value));
    450  1.18.2.1  mrg static __inline__ void sta __P((paddr_t loc, int asi, u_int value));
    451  1.18.2.1  mrg static __inline__ void stxa __P((paddr_t loc, int asi, u_int64_t value));
    452  1.18.2.1  mrg static __inline__ void stda __P((paddr_t loc, int asi, u_int64_t value));
    453  1.18.2.1  mrg 
    454  1.18.2.1  mrg #ifdef __arch64__
    455  1.18.2.1  mrg static __inline__ u_char
    456  1.18.2.1  mrg lduba(paddr_t loc, int asi)
    457  1.18.2.1  mrg {
    458  1.18.2.1  mrg 	register unsigned int _lduba_v;
    459  1.18.2.1  mrg 
    460  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    461  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; "
    462  1.18.2.1  mrg " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; "
    463  1.18.2.1  mrg " lduba [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; "
    464  1.18.2.1  mrg " stxa %%g0,[%1] %4; membar #Sync" :
    465  1.18.2.1  mrg 				 "=&r" (_lduba_v), "=r" (loc):
    466  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)),
    467  1.18.2.1  mrg 				 "r" (asi), "n" (ASI_DCACHE_TAG));
    468  1.18.2.1  mrg 	} else {
    469  1.18.2.1  mrg 		__asm __volatile("wr %2,%%g0,%%asi; lduba [%1]%%asi,%0" :
    470  1.18.2.1  mrg 				 "=r" (_lduba_v) :
    471  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)), "r" (asi));
    472  1.18.2.1  mrg 	}
    473  1.18.2.1  mrg 	return (_lduba_v);
    474  1.18.2.1  mrg }
    475  1.18.2.1  mrg #else
    476  1.18.2.1  mrg static __inline__ u_char
    477  1.18.2.1  mrg lduba(paddr_t loc, int asi)
    478  1.18.2.1  mrg {
    479  1.18.2.1  mrg 	register unsigned int _lduba_v, _loc_hi, _pstate;
    480  1.18.2.1  mrg 
    481  1.18.2.1  mrg 	_loc_hi = (((u_int64_t)loc)>>32);
    482  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    483  1.18.2.1  mrg 		__asm __volatile("wr %4,%%g0,%%asi; "
    484  1.18.2.1  mrg " andn %2,0x1f,%0; stxa %%g0,[%0] %5; rdpr %%pstate,%1; "
    485  1.18.2.1  mrg " sllx %3,32,%0; or %0,%2,%0; wrpr %1,8,%%pstate; "
    486  1.18.2.1  mrg " membar #Sync; lduba [%0]%%asi,%0; wrpr %1,0,%%pstate; "
    487  1.18.2.1  mrg " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync" :
    488  1.18.2.1  mrg 				 "=&r" (_lduba_v),  "=&r" (_pstate) :
    489  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)), "r" (_loc_hi),
    490  1.18.2.1  mrg 				 "r" (asi), "n" (ASI_DCACHE_TAG));
    491  1.18.2.1  mrg 	} else {
    492  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; "
    493  1.18.2.1  mrg " or %0,%1,%0; lduba [%0]%%asi,%0" : "=&r" (_lduba_v) :
    494  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)),
    495  1.18.2.1  mrg 				 "r" (_loc_hi), "r" (asi));
    496  1.18.2.1  mrg 	}
    497  1.18.2.1  mrg 	return (_lduba_v);
    498  1.18.2.1  mrg }
    499  1.18.2.1  mrg #endif
    500  1.18.2.1  mrg 
    501  1.18.2.1  mrg #ifdef __arch64__
    502  1.18.2.1  mrg /* load half-word from alternate address space */
    503  1.18.2.1  mrg static __inline__ u_short
    504  1.18.2.1  mrg lduha(paddr_t loc, int asi)
    505  1.18.2.1  mrg {
    506  1.18.2.1  mrg 	register unsigned int _lduha_v;
    507  1.18.2.1  mrg 
    508  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    509  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; "
    510  1.18.2.1  mrg " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; "
    511  1.18.2.1  mrg " lduha [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; "
    512  1.18.2.1  mrg " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lduha_v), "=r" (loc) :
    513  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)),
    514  1.18.2.1  mrg 				 "r" (asi), "n" (ASI_DCACHE_TAG));
    515  1.18.2.1  mrg 	} else {
    516  1.18.2.1  mrg 		__asm __volatile("wr %2,%%g0,%%asi; lduha [%1]%%asi,%0" :
    517  1.18.2.1  mrg 				 "=r" (_lduha_v) :
    518  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)), "r" (asi));
    519  1.18.2.1  mrg 	}
    520  1.18.2.1  mrg 	return (_lduha_v);
    521  1.18.2.1  mrg }
    522  1.18.2.1  mrg #else
    523  1.18.2.1  mrg /* load half-word from alternate address space */
    524  1.18.2.1  mrg static __inline__ u_short
    525  1.18.2.1  mrg lduha(paddr_t loc, int asi) {
    526  1.18.2.1  mrg 	register unsigned int _lduha_v, _loc_hi, _pstate;
    527  1.18.2.1  mrg 
    528  1.18.2.1  mrg 	_loc_hi = (((u_int64_t)loc)>>32);
    529  1.18.2.1  mrg 
    530  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    531  1.18.2.1  mrg 		__asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1; "
    532  1.18.2.1  mrg " andn %2,0x1f,%0; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate; sllx %3,32,%0; "
    533  1.18.2.1  mrg " or %0,%2,%0; membar #Sync; lduha [%0]%%asi,%0; wrpr %1,0,%%pstate; "
    534  1.18.2.1  mrg " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync" :
    535  1.18.2.1  mrg 				 "=&r" (_lduha_v), "=&r" (_pstate) :
    536  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)), "r" (_loc_hi),
    537  1.18.2.1  mrg 				 "r" (asi), "n" (ASI_DCACHE_TAG));
    538  1.18.2.1  mrg 	} else {
    539  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; "
    540  1.18.2.1  mrg " or %0,%1,%0; lduha [%0]%%asi,%0" : "=&r" (_lduha_v) :
    541  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
    542  1.18.2.1  mrg 	}
    543  1.18.2.1  mrg 	return (_lduha_v);
    544  1.18.2.1  mrg }
    545  1.18.2.1  mrg #endif
    546  1.18.2.1  mrg 
    547  1.18.2.1  mrg 
    548  1.18.2.1  mrg #ifdef __arch64__
    549  1.18.2.1  mrg /* load unsigned int from alternate address space */
    550  1.18.2.1  mrg static __inline__ u_int
    551  1.18.2.1  mrg lda(paddr_t loc, int asi)
    552  1.18.2.1  mrg {
    553  1.18.2.1  mrg 	register unsigned int _lda_v;
    554  1.18.2.1  mrg 
    555  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    556  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; "
    557  1.18.2.1  mrg " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; "
    558  1.18.2.1  mrg " lda [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; "
    559  1.18.2.1  mrg " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lda_v), "=r" (loc) :
    560  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)),
    561  1.18.2.1  mrg 				 "r" (asi), "n" (ASI_DCACHE_TAG));
    562  1.18.2.1  mrg 	} else {
    563  1.18.2.1  mrg 		__asm __volatile("wr %2,%%g0,%%asi; lda [%1]%%asi,%0" :
    564  1.18.2.1  mrg 				 "=r" (_lda_v) :
    565  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)), "r" (asi));
    566  1.18.2.1  mrg 	}
    567  1.18.2.1  mrg 	return (_lda_v);
    568  1.18.2.1  mrg }
    569  1.18.2.1  mrg 
    570  1.18.2.1  mrg /* load signed int from alternate address space */
    571  1.18.2.1  mrg static __inline__ int
    572  1.18.2.1  mrg ldswa(paddr_t loc, int asi)
    573  1.18.2.1  mrg {
    574  1.18.2.1  mrg 	register int _lda_v;
    575  1.18.2.1  mrg 
    576  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    577  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; "
    578  1.18.2.1  mrg " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; "
    579  1.18.2.1  mrg " ldswa [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; "
    580  1.18.2.1  mrg " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lda_v), "=r" (loc) :
    581  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)),
    582  1.18.2.1  mrg 				 "r" (asi), "n" (ASI_DCACHE_TAG));
    583  1.18.2.1  mrg 	} else {
    584  1.18.2.1  mrg 		__asm __volatile("wr %2,%%g0,%%asi; ldswa [%1]%%asi,%0" :
    585  1.18.2.1  mrg 				 "=r" (_lda_v) :
    586  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)), "r" (asi));
    587  1.18.2.1  mrg 	}
    588  1.18.2.1  mrg 	return (_lda_v);
    589  1.18.2.1  mrg }
    590  1.18.2.1  mrg #else	/* __arch64__ */
    591  1.18.2.1  mrg /* load unsigned int from alternate address space */
    592  1.18.2.1  mrg static __inline__ u_int
    593  1.18.2.1  mrg lda(paddr_t loc, int asi)
    594  1.18.2.1  mrg {
    595  1.18.2.1  mrg 	register unsigned int _lda_v, _loc_hi, _pstate;
    596  1.18.2.1  mrg 
    597  1.18.2.1  mrg 	_loc_hi = (((u_int64_t)loc)>>32);
    598  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    599  1.18.2.1  mrg 		__asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1;"
    600  1.18.2.1  mrg " andn %2,0x1f,%0; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate; "
    601  1.18.2.1  mrg " sllx %3,32,%0; or %0,%2,%0; membar #Sync;lda [%0]%%asi,%0; "
    602  1.18.2.1  mrg " wrpr %1,0,%%pstate; andn %2,0x1f,%1; membar #Sync; "
    603  1.18.2.1  mrg " stxa %%g0,[%1] %5; membar #Sync" : "=&r" (_lda_v), "=&r" (_pstate) :
    604  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)), "r" (_loc_hi),
    605  1.18.2.1  mrg 				 "r" (asi), "n" (ASI_DCACHE_TAG));
    606  1.18.2.1  mrg 	} else {
    607  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; "
    608  1.18.2.1  mrg " or %0,%1,%0; lda [%0]%%asi,%0" : "=&r" (_lda_v) :
    609  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)),
    610  1.18.2.1  mrg 				 "r" (_loc_hi), "r" (asi));
    611  1.18.2.1  mrg 	}
    612  1.18.2.1  mrg 	return (_lda_v);
    613  1.18.2.1  mrg }
    614  1.18.2.1  mrg 
    615  1.18.2.1  mrg /* load signed int from alternate address space */
    616  1.18.2.1  mrg static __inline__ int
    617  1.18.2.1  mrg ldswa(paddr_t loc, int asi)
    618  1.18.2.1  mrg {
    619  1.18.2.1  mrg 	register int _lda_v, _loc_hi, _pstate;
    620  1.18.2.1  mrg 
    621  1.18.2.1  mrg 	_loc_hi = (((u_int64_t)loc)>>32);
    622  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    623  1.18.2.1  mrg 		__asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1;"
    624  1.18.2.1  mrg " andn %2,0x1f,%0; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate; sllx %3,32,%0;"
    625  1.18.2.1  mrg " or %0,%2,%0; membar #Sync; ldswa [%0]%%asi,%0; wrpr %1,0,%%pstate; "
    626  1.18.2.1  mrg " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync" :
    627  1.18.2.1  mrg 				 "=&r" (_lda_v), "=&r" (_pstate) :
    628  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)), "r" (_loc_hi),
    629  1.18.2.1  mrg 				 "r" (asi), "n" (ASI_DCACHE_TAG));
    630  1.18.2.1  mrg 	} else {
    631  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; "
    632  1.18.2.1  mrg " or %0,%1,%0; ldswa [%0]%%asi,%0" : "=&r" (_lda_v) :
    633  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)),
    634  1.18.2.1  mrg 				 "r" (_loc_hi), "r" (asi));
    635  1.18.2.1  mrg 	}
    636  1.18.2.1  mrg 	return (_lda_v);
    637  1.18.2.1  mrg }
    638  1.18.2.1  mrg #endif /* __arch64__ */
    639  1.18.2.1  mrg 
    640  1.18.2.1  mrg #ifdef	__arch64__
    641  1.18.2.1  mrg /* load 64-bit int from alternate address space -- these should never be used */
    642  1.18.2.1  mrg static __inline__ u_int64_t
    643  1.18.2.1  mrg ldda(paddr_t loc, int asi)
    644  1.18.2.1  mrg {
    645  1.18.2.1  mrg 	register long long _lda_v;
    646  1.18.2.1  mrg 
    647  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    648  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; "
    649  1.18.2.1  mrg " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; "
    650  1.18.2.1  mrg " ldda [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; "
    651  1.18.2.1  mrg " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lda_v), "=&r" (loc) :
    652  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)),
    653  1.18.2.1  mrg 				 "r" (asi), "n" (ASI_DCACHE_TAG));
    654  1.18.2.1  mrg 	} else {
    655  1.18.2.1  mrg 		__asm __volatile("wr %2,%%g0,%%asi; ldda [%1]%%asi,%0" :
    656  1.18.2.1  mrg 				 "=r" (_lda_v) :
    657  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)), "r" (asi));
    658  1.18.2.1  mrg 	}
    659  1.18.2.1  mrg 	return (_lda_v);
    660  1.18.2.1  mrg }
    661  1.18.2.1  mrg #else
    662  1.18.2.1  mrg /* load 64-bit int from alternate address space */
    663  1.18.2.1  mrg static __inline__ u_int64_t
    664  1.18.2.1  mrg ldda(paddr_t loc, int asi)
    665  1.18.2.1  mrg {
    666  1.18.2.1  mrg 	register long long _lda_v, _loc_hi, _pstate;
    667  1.18.2.1  mrg 
    668  1.18.2.1  mrg 	_loc_hi = (((u_int64_t)loc)>>32);
    669  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    670  1.18.2.1  mrg 		__asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1;"
    671  1.18.2.1  mrg " andn %2,0x1f,%0; rdpr %%pstate,%1; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate;"
    672  1.18.2.1  mrg " sllx %3,32,%0; or %0,%2,%0; membar #Sync; ldda [%0]%%asi,%0; wrpr %1,0,%%pstate; "
    673  1.18.2.1  mrg " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync" :
    674  1.18.2.1  mrg 				 "=&r" (_lda_v), "=&r" (_pstate) :
    675  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)), "r" (_loc_hi),
    676  1.18.2.1  mrg 				 "r" (asi), "n" (ASI_DCACHE_TAG));
    677  1.18.2.1  mrg 	} else {
    678  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; "
    679  1.18.2.1  mrg " or %0,%1,%0; ldda [%0]%%asi,%0" : "=&r" (_lda_v) :
    680  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi));
    681  1.18.2.1  mrg 	}
    682  1.18.2.1  mrg 	return (_lda_v);
    683  1.18.2.1  mrg }
    684  1.18.2.1  mrg #endif
    685  1.18.2.1  mrg 
    686       1.6  eeh 
    687       1.9  eeh #ifdef __arch64__
    688  1.18.2.1  mrg /* native load 64-bit int from alternate address space w/64-bit compiler*/
    689  1.18.2.1  mrg static __inline__ u_int64_t
    690  1.18.2.1  mrg ldxa(paddr_t loc, int asi)
    691  1.18.2.1  mrg {
    692  1.18.2.1  mrg 	register unsigned long _lda_v;
    693  1.18.2.1  mrg 
    694  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    695  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; "
    696  1.18.2.1  mrg " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; "
    697  1.18.2.1  mrg " ldxa [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; "
    698  1.18.2.1  mrg " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lda_v), "=r" (loc) :
    699  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)),
    700  1.18.2.1  mrg 				 "r" (asi), "n" (ASI_DCACHE_TAG));
    701  1.18.2.1  mrg 	} else {
    702  1.18.2.1  mrg 		__asm __volatile("wr %2,%%g0,%%asi; ldxa [%1]%%asi,%0" :
    703  1.18.2.1  mrg 				 "=r" (_lda_v) :
    704  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)), "r" (asi));
    705  1.18.2.1  mrg 	}
    706  1.18.2.1  mrg 	return (_lda_v);
    707  1.18.2.1  mrg }
    708  1.18.2.1  mrg #else
    709  1.18.2.1  mrg /* native load 64-bit int from alternate address space w/32-bit compiler*/
    710  1.18.2.1  mrg static __inline__ u_int64_t
    711  1.18.2.1  mrg ldxa(paddr_t loc, int asi)
    712  1.18.2.1  mrg {
    713  1.18.2.1  mrg 	register unsigned long _ldxa_lo, _ldxa_hi, _loc_hi;
    714  1.18.2.1  mrg 
    715  1.18.2.1  mrg 	_loc_hi = (((u_int64_t)loc)>>32);
    716  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    717  1.18.2.1  mrg 		__asm __volatile("wr %4,%%g0,%%asi; "
    718  1.18.2.1  mrg " andn %2,0x1f,%0; rdpr %%pstate,%1; stxa %%g0,[%0] %5; "
    719  1.18.2.1  mrg " sllx %3,32,%0; wrpr %1,8,%%pstate; or %0,%2,%0; membar #Sync; ldxa [%0]%%asi,%0; "
    720  1.18.2.1  mrg " wrpr %1,0,%%pstate; andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync; "
    721  1.18.2.1  mrg " srlx %0,32,%1; srl %0,0,%0" :
    722  1.18.2.1  mrg 				 "=&r" (_ldxa_lo), "=&r" (_ldxa_hi) :
    723  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)), "r" (_loc_hi),
    724  1.18.2.1  mrg 				 "r" (asi), "n" (ASI_DCACHE_TAG));
    725  1.18.2.1  mrg 	} else {
    726  1.18.2.1  mrg 		__asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; "
    727  1.18.2.1  mrg " or %0,%2,%0; ldxa [%0]%%asi,%0; srlx %0,32,%1; srl %0,0,%0;" :
    728  1.18.2.1  mrg 				 "=&r" (_ldxa_lo), "=&r" (_ldxa_hi) :
    729  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)), "r" (_loc_hi),
    730  1.18.2.1  mrg 				 "r" (asi));
    731  1.18.2.1  mrg 	}
    732  1.18.2.1  mrg 	return ((((int64_t)_ldxa_hi)<<32)|_ldxa_lo);
    733  1.18.2.1  mrg }
    734  1.18.2.1  mrg #endif
    735  1.18.2.1  mrg 
    736  1.18.2.1  mrg /* store byte to alternate address space */
    737  1.18.2.1  mrg #ifdef __arch64__
    738  1.18.2.1  mrg static __inline__ void
    739  1.18.2.1  mrg stba(paddr_t loc, int asi, u_char value)
    740  1.18.2.1  mrg {
    741  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    742  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; stba %1,[%2]%%asi;"
    743  1.18.2.1  mrg " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) :
    744  1.18.2.1  mrg 				 "r" ((int)(value)), "r" ((unsigned long)(loc)),
    745  1.18.2.1  mrg 				 "r" (asi), "n" (ASI_DCACHE_TAG));
    746  1.18.2.1  mrg 	} else {
    747  1.18.2.1  mrg 		__asm __volatile("wr %2,%%g0,%%asi; stba %0,[%1]%%asi" : :
    748  1.18.2.1  mrg 				 "r" ((int)(value)), "r" ((unsigned long)(loc)),
    749  1.18.2.1  mrg 				 "r" (asi));
    750  1.18.2.1  mrg 	}
    751  1.18.2.1  mrg }
    752  1.18.2.1  mrg #else
    753  1.18.2.1  mrg static __inline__ void
    754  1.18.2.1  mrg stba(paddr_t loc, int asi, u_char value)
    755  1.18.2.1  mrg {
    756  1.18.2.1  mrg 	register int _loc_hi, _pstate;
    757  1.18.2.1  mrg 
    758  1.18.2.1  mrg 	_loc_hi = (((u_int64_t)loc)>>32);
    759  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    760  1.18.2.1  mrg 		__asm __volatile("wr %5,%%g0,%%asi; sllx %4,32,%0; rdpr %%pstate,%1;"
    761  1.18.2.1  mrg " or %3,%0,%0; wrpr %1,8,%%pstate; stba %2,[%0]%%asi; wrpr %1,0,%%pstate; "
    762  1.18.2.1  mrg " andn %0,0x1f,%1;  membar #Sync; stxa %%g0,[%1] %6; membar #Sync" :
    763  1.18.2.1  mrg 				 "=&r" (_loc_hi), "=&r" (_pstate) :
    764  1.18.2.1  mrg 				 "r" ((int)(value)), "r" ((unsigned long)(loc)),
    765  1.18.2.1  mrg 				 "r" (_loc_hi), "r" (asi), "n" (ASI_DCACHE_TAG));
    766  1.18.2.1  mrg 	} else {
    767  1.18.2.1  mrg 		__asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; "
    768  1.18.2.1  mrg " or %2,%0,%0; stba %1,[%0]%%asi" : "=&r" (_loc_hi) :
    769  1.18.2.1  mrg 				 "r" ((int)(value)), "r" ((unsigned long)(loc)),
    770  1.18.2.1  mrg 				 "r" (_loc_hi), "r" (asi));
    771  1.18.2.1  mrg 	}
    772  1.18.2.1  mrg }
    773  1.18.2.1  mrg #endif
    774  1.18.2.1  mrg 
    775  1.18.2.1  mrg /* store half-word to alternate address space */
    776  1.18.2.1  mrg #ifdef __arch64__
    777  1.18.2.1  mrg static __inline__ void
    778  1.18.2.1  mrg stha(paddr_t loc, int asi, u_short value)
    779  1.18.2.1  mrg {
    780  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    781  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; stha %1,[%2]%%asi;"
    782  1.18.2.1  mrg " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) :
    783  1.18.2.1  mrg 			"r" ((int)(value)), "r" ((unsigned long)(loc)),
    784  1.18.2.1  mrg 			"r" (asi), "n" (ASI_DCACHE_TAG) : "memory");
    785  1.18.2.1  mrg 	} else {
    786  1.18.2.1  mrg 		__asm __volatile("wr %2,%%g0,%%asi; stha %0,[%1]%%asi" : :
    787  1.18.2.1  mrg 				 "r" ((int)(value)), "r" ((unsigned long)(loc)),
    788  1.18.2.1  mrg 				 "r" (asi) : "memory");
    789  1.18.2.1  mrg 	}
    790  1.18.2.1  mrg }
    791  1.18.2.1  mrg #else
    792  1.18.2.1  mrg static __inline__ void
    793  1.18.2.1  mrg stha(paddr_t loc, int asi, u_short value)
    794  1.18.2.1  mrg {
    795  1.18.2.1  mrg 	register int _loc_hi, _pstate;
    796  1.18.2.1  mrg 
    797  1.18.2.1  mrg 	_loc_hi = (((u_int64_t)loc)>>32);
    798  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    799  1.18.2.1  mrg 		__asm __volatile("wr %5,%%g0,%%asi; sllx %4,32,%0; rdpr %%pstate,%1;"
    800  1.18.2.1  mrg " or %3,%0,%0; wrpr %1,8,%%pstate; stha %2,[%0]%%asi; wrpr %1,0,%%pstate; "
    801  1.18.2.1  mrg " andn %0,0x1f,%1;  membar #Sync; stxa %%g0,[%1] %6; membar #Sync" :
    802  1.18.2.1  mrg 				 "=&r" (_loc_hi), "=&r" (_pstate) :
    803  1.18.2.1  mrg 				 "r" ((int)(value)), "r" ((unsigned long)(loc)),
    804  1.18.2.1  mrg 				 "r" (_loc_hi), "r" (asi),
    805  1.18.2.1  mrg 				 "n" (ASI_DCACHE_TAG) : "memory");
    806  1.18.2.1  mrg 	} else {
    807  1.18.2.1  mrg 		__asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; "
    808  1.18.2.1  mrg " or %2,%0,%0; stha %1,[%0]%%asi" : "=&r" (_loc_hi) :
    809  1.18.2.1  mrg 				 "r" ((int)(value)), "r" ((unsigned long)(loc)),
    810  1.18.2.1  mrg 				 "r" (_loc_hi), "r" (asi) : "memory");
    811  1.18.2.1  mrg 	}
    812  1.18.2.1  mrg }
    813  1.18.2.1  mrg #endif
    814  1.18.2.1  mrg 
    815  1.18.2.1  mrg 
    816  1.18.2.1  mrg /* store int to alternate address space */
    817  1.18.2.1  mrg #ifdef __arch64__
    818  1.18.2.1  mrg static __inline__ void
    819  1.18.2.1  mrg sta(paddr_t loc, int asi, u_int value)
    820  1.18.2.1  mrg {
    821  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    822  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; sta %1,[%2]%%asi;"
    823  1.18.2.1  mrg " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) :
    824  1.18.2.1  mrg 				 "r" ((int)(value)), "r" ((unsigned long)(loc)),
    825  1.18.2.1  mrg 				 "r" (asi), "n" (ASI_DCACHE_TAG) : "memory");
    826  1.18.2.1  mrg 	} else {
    827  1.18.2.1  mrg 		__asm __volatile("wr %2,%%g0,%%asi; sta %0,[%1]%%asi" : :
    828  1.18.2.1  mrg 				 "r" ((int)(value)), "r" ((unsigned long)(loc)),
    829  1.18.2.1  mrg 				 "r" (asi) : "memory");
    830  1.18.2.1  mrg 	}
    831  1.18.2.1  mrg }
    832  1.18.2.1  mrg #else
    833  1.18.2.1  mrg static __inline__ void
    834  1.18.2.1  mrg sta(paddr_t loc, int asi, u_int value)
    835  1.18.2.1  mrg {
    836  1.18.2.1  mrg 	register int _loc_hi, _pstate;
    837  1.18.2.1  mrg 
    838  1.18.2.1  mrg 	_loc_hi = (((u_int64_t)loc)>>32);
    839  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    840  1.18.2.1  mrg 		__asm __volatile("wr %5,%%g0,%%asi; sllx %4,32,%0; rdpr %%pstate,%1;"
    841  1.18.2.1  mrg " or %3,%0,%0; wrpr %1,8,%%pstate; sta %2,[%0]%%asi; wrpr %1,0,%%pstate; "
    842  1.18.2.1  mrg " andn %0,0x1f,%1;  membar #Sync; stxa %%g0,[%1] %6; membar #Sync" :
    843  1.18.2.1  mrg 				 "=&r" (_loc_hi), "=&r" (_pstate) :
    844  1.18.2.1  mrg 				 "r" ((int)(value)), "r" ((unsigned long)(loc)),
    845  1.18.2.1  mrg 				 "r" (_loc_hi), "r" (asi),
    846  1.18.2.1  mrg 				 "n" (ASI_DCACHE_TAG) : "memory");
    847  1.18.2.1  mrg 	} else {
    848  1.18.2.1  mrg 		__asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; "
    849  1.18.2.1  mrg " or %2,%0,%0; sta %1,[%0]%%asi" : "=&r" (_loc_hi) :
    850  1.18.2.1  mrg 				 "r" ((int)(value)), "r" ((unsigned long)(loc)),
    851  1.18.2.1  mrg 				 "r" (_loc_hi), "r" (asi) : "memory");
    852  1.18.2.1  mrg 	}
    853  1.18.2.1  mrg }
    854  1.18.2.1  mrg #endif
    855  1.18.2.1  mrg 
    856  1.18.2.1  mrg /* store 64-bit int to alternate address space */
    857  1.18.2.1  mrg #ifdef __arch64__
    858  1.18.2.1  mrg static __inline__ void
    859  1.18.2.1  mrg stda(paddr_t loc, int asi, u_int64_t value)
    860  1.18.2.1  mrg {
    861  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    862  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; stda %1,[%2]%%asi;"
    863  1.18.2.1  mrg " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) :
    864  1.18.2.1  mrg 				 "r" ((int)(value)), "r" ((unsigned long)(loc)),
    865  1.18.2.1  mrg 				 "r" (asi), "n" (ASI_DCACHE_TAG) : "memory");
    866  1.18.2.1  mrg 	} else {
    867  1.18.2.1  mrg 		__asm __volatile("wr %2,%%g0,%%asi; stda %0,[%1]%%asi" : :
    868  1.18.2.1  mrg 				 "r" ((long long)(value)), "r" ((unsigned long)(loc)),
    869  1.18.2.1  mrg 				 "r" (asi) : "memory");
    870  1.18.2.1  mrg 	}
    871  1.18.2.1  mrg }
    872  1.18.2.1  mrg #else
    873  1.18.2.1  mrg static __inline__ void
    874  1.18.2.1  mrg stda(paddr_t loc, int asi, u_int64_t value)
    875  1.18.2.1  mrg {
    876  1.18.2.1  mrg 	register int _loc_hi, _pstate;
    877  1.18.2.1  mrg 
    878  1.18.2.1  mrg 	_loc_hi = (((u_int64_t)loc)>>32);
    879  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    880  1.18.2.1  mrg 		__asm __volatile("wr %5,%%g0,%%asi; sllx %4,32,%0; rdpr %%pstate,%1; "
    881  1.18.2.1  mrg " or %3,%0,%0; wrpr %1,8,%%pstate; stda %2,[%0]%%asi; wrpr %1,0,%%pstate;"
    882  1.18.2.1  mrg " andn %0,0x1f,%1;  membar #Sync; stxa %%g0,[%1] %6; membar #Sync" :
    883  1.18.2.1  mrg 				 "=&r" (_loc_hi), "=&r" (_pstate) :
    884  1.18.2.1  mrg 				 "r" ((long long)(value)), "r" ((unsigned long)(loc)),
    885  1.18.2.1  mrg 				 "r" (_loc_hi), "r" (asi),
    886  1.18.2.1  mrg 				 "n" (ASI_DCACHE_TAG) : "memory");
    887  1.18.2.1  mrg 	} else {
    888  1.18.2.1  mrg 		__asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; "
    889  1.18.2.1  mrg " or %2,%0,%0; stda %1,[%0]%%asi" : "=&r" (_loc_hi) :
    890  1.18.2.1  mrg 				 "r" ((long long)(value)), "r" ((unsigned long)(loc)),
    891  1.18.2.1  mrg 				 "r" (_loc_hi), "r" (asi) : "memory");
    892  1.18.2.1  mrg 	}
    893  1.18.2.1  mrg }
    894  1.18.2.1  mrg #endif
    895  1.18.2.1  mrg 
    896  1.18.2.1  mrg #ifdef __arch64__
    897  1.18.2.1  mrg /* native store 64-bit int to alternate address space w/64-bit compiler*/
    898  1.18.2.1  mrg static __inline__ void
    899  1.18.2.1  mrg stxa(paddr_t loc, int asi, u_int64_t value)
    900  1.18.2.1  mrg {
    901  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    902  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; stxa %1,[%2]%%asi;"
    903  1.18.2.1  mrg " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) :
    904  1.18.2.1  mrg 				 "r" ((int)(value)), "r" ((unsigned long)(loc)),
    905  1.18.2.1  mrg 				 "r" (asi), "n" (ASI_DCACHE_TAG) : "memory");
    906  1.18.2.1  mrg 	} else {
    907  1.18.2.1  mrg 		__asm __volatile("wr %2,%%g0,%%asi; stxa %0,[%1]%%asi" : :
    908  1.18.2.1  mrg 				 "r" ((unsigned long)(value)),
    909  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)), "r" (asi) : "memory");
    910  1.18.2.1  mrg 	}
    911  1.18.2.1  mrg }
    912  1.18.2.1  mrg #else
    913  1.18.2.1  mrg /* native store 64-bit int to alternate address space w/32-bit compiler*/
    914  1.18.2.1  mrg static __inline__ void
    915  1.18.2.1  mrg stxa(paddr_t loc, int asi, u_int64_t value)
    916  1.18.2.1  mrg {
    917  1.18.2.1  mrg 	int _stxa_lo, _stxa_hi, _loc_hi;
    918  1.18.2.1  mrg 
    919  1.18.2.1  mrg 	_stxa_lo = value;
    920  1.18.2.1  mrg 	_stxa_hi = ((u_int64_t)value)>>32;
    921  1.18.2.1  mrg 	_loc_hi = (((u_int64_t)(u_long)loc)>>32);
    922  1.18.2.1  mrg 
    923  1.18.2.1  mrg 	if (PHYS_ASI(asi)) {
    924  1.18.2.1  mrg 		__asm __volatile("wr %7,%%g0,%%asi; sllx %4,32,%1; sllx %6,32,%0; "
    925  1.18.2.1  mrg " or %1,%3,%1; rdpr %%pstate,%3; or %0,%5,%0; wrpr %3,8,%%pstate; "
    926  1.18.2.1  mrg " stxa %1,[%0]%%asi; wrpr %3,0,%%pstate; "
    927  1.18.2.1  mrg " andn %0,0x1f,%1;  membar #Sync; stxa %%g0,[%1] %8; membar #Sync" :
    928  1.18.2.1  mrg 				 "=&r" (_loc_hi), "=&r" (_stxa_hi),
    929  1.18.2.1  mrg 				 "=&r" ((int)(_stxa_lo)) :
    930  1.18.2.1  mrg 				 "r" ((int)(_stxa_lo)), "r" ((int)(_stxa_hi)),
    931  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)), "r" (_loc_hi),
    932  1.18.2.1  mrg 				 "r" (asi), "n" (ASI_DCACHE_TAG) : "memory");
    933  1.18.2.1  mrg 	} else {
    934  1.18.2.1  mrg 		__asm __volatile("wr %6,%%g0,%%asi; sllx %3,32,%1; sllx %5,32,%0; "
    935  1.18.2.1  mrg " or %1,%2,%1; or %0,%4,%0; stxa %1,[%0]%%asi" :
    936  1.18.2.1  mrg 				 "=&r" (_loc_hi), "=&r" (_stxa_hi) :
    937  1.18.2.1  mrg 				 "r" ((int)(_stxa_lo)), "r" ((int)(_stxa_hi)),
    938  1.18.2.1  mrg 				 "r" ((unsigned long)(loc)), "r" (_loc_hi),
    939  1.18.2.1  mrg 				 "r" (asi) : "memory");
    940  1.18.2.1  mrg 	}
    941  1.18.2.1  mrg }
    942  1.18.2.1  mrg #endif
    943  1.18.2.1  mrg 
    944  1.18.2.1  mrg #if 0
    945  1.18.2.1  mrg #ifdef __arch64__
    946       1.2  eeh /* load byte from alternate address space */
    947       1.6  eeh #define	lduba(loc, asi) ({ \
    948      1.16  eeh 	register unsigned int _lduba_v; \
    949      1.14  eeh 	if (PHYS_ASI(asi)) { \
    950  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; " \
    951  1.18.2.1  mrg " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; " \
    952  1.18.2.1  mrg " lduba [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; " \
    953  1.18.2.1  mrg " stxa %%g0,[%1] %4; membar #Sync" : \
    954  1.18.2.1  mrg 		"=&r" (_lduba_v), "=r" (loc): \
    955      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
    956       1.6  eeh 	} else { \
    957      1.16  eeh 		__asm __volatile("wr %2,%%g0,%%asi; lduba [%1]%%asi,%0" : \
    958      1.16  eeh 		"=r" (_lduba_v) : \
    959      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (asi)); \
    960       1.6  eeh 	} \
    961       1.6  eeh 	_lduba_v; \
    962       1.6  eeh })
    963       1.6  eeh #else
    964       1.9  eeh /* load byte from alternate address space */
    965       1.9  eeh #define	lduba(loc, asi) ({ \
    966      1.16  eeh 	register unsigned int _lduba_v, _loc_hi, _pstate; \
    967       1.9  eeh 	_loc_hi = (((u_int64_t)loc)>>32); \
    968      1.14  eeh 	if (PHYS_ASI(asi)) { \
    969      1.14  eeh 		__asm __volatile("wr %4,%%g0,%%asi; " \
    970      1.14  eeh " andn %2,0x1f,%0; stxa %%g0,[%0] %5; rdpr %%pstate,%1; " \
    971      1.14  eeh " sllx %3,32,%0; or %0,%2,%0; wrpr %1,8,%%pstate; " \
    972  1.18.2.1  mrg " membar #Sync; lduba [%0]%%asi,%0; wrpr %1,0,%%pstate; " \
    973  1.18.2.1  mrg " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync" : \
    974      1.14  eeh 		"=&r" (_lduba_v),  "=&r" (_pstate) : \
    975      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (_loc_hi), \
    976       1.9  eeh 		"r" (asi), "n" (ASI_DCACHE_TAG)); \
    977       1.9  eeh 	} else { \
    978      1.17  eeh 		__asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; " \
    979      1.17  eeh " or %0,%1,%0; lduba [%0]%%asi,%0" : "=&r" (_lduba_v) : \
    980      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
    981       1.9  eeh 	} \
    982       1.9  eeh 	_lduba_v; \
    983       1.9  eeh })
    984       1.9  eeh #endif
    985       1.2  eeh 
    986       1.9  eeh #ifdef __arch64__
    987       1.2  eeh /* load half-word from alternate address space */
    988       1.6  eeh #define	lduha(loc, asi) ({ \
    989      1.16  eeh 	register unsigned int _lduha_v; \
    990      1.14  eeh 	if (PHYS_ASI(asi)) { \
    991  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; " \
    992  1.18.2.1  mrg " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; " \
    993  1.18.2.1  mrg " lduha [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; " \
    994  1.18.2.1  mrg " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lduha_v), "=r" (loc) : \
    995      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
    996       1.6  eeh 	} else { \
    997       1.6  eeh 		__asm __volatile("wr %2,%%g0,%%asi; lduha [%1]%%asi,%0" : "=r" (_lduha_v) : \
    998      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (asi)); \
    999       1.6  eeh 	} \
   1000       1.6  eeh 	_lduha_v; \
   1001       1.6  eeh })
   1002       1.6  eeh #else
   1003       1.9  eeh /* load half-word from alternate address space */
   1004       1.9  eeh #define	lduha(loc, asi) ({ \
   1005      1.16  eeh 	register unsigned int _lduha_v, _loc_hi, _pstate; \
   1006       1.9  eeh 	_loc_hi = (((u_int64_t)loc)>>32); \
   1007      1.14  eeh 	if (PHYS_ASI(asi)) { \
   1008      1.14  eeh 		__asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1; " \
   1009      1.14  eeh " andn %2,0x1f,%0; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate; sllx %3,32,%0; " \
   1010  1.18.2.1  mrg " or %0,%2,%0; membar #Sync; lduha [%0]%%asi,%0; wrpr %1,0,%%pstate; " \
   1011  1.18.2.1  mrg " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync" : \
   1012      1.14  eeh 		"=&r" (_lduha_v), "=&r" (_pstate) : \
   1013      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (_loc_hi), \
   1014       1.9  eeh 		"r" (asi), "n" (ASI_DCACHE_TAG)); \
   1015       1.9  eeh 	} else { \
   1016      1.17  eeh 		__asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; " \
   1017      1.17  eeh " or %0,%1,%0; lduha [%0]%%asi,%0" : "=&r" (_lduha_v) : \
   1018      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
   1019       1.9  eeh 	} \
   1020       1.9  eeh 	_lduha_v; \
   1021       1.9  eeh })
   1022       1.9  eeh #endif
   1023       1.2  eeh 
   1024       1.9  eeh #ifdef __arch64__
   1025       1.6  eeh /* load unsigned int from alternate address space */
   1026       1.6  eeh #define	lda(loc, asi) ({ \
   1027  1.18.2.1  mrg 	register unsigned int _lda_v; \
   1028      1.14  eeh 	if (PHYS_ASI(asi)) { \
   1029  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; " \
   1030  1.18.2.1  mrg " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; " \
   1031  1.18.2.1  mrg " lda [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; " \
   1032  1.18.2.1  mrg " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lda_v), "=r" (loc) : \
   1033      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
   1034       1.6  eeh 	} else { \
   1035       1.6  eeh 		__asm __volatile("wr %2,%%g0,%%asi; lda [%1]%%asi,%0" : "=r" (_lda_v) : \
   1036      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (asi)); \
   1037       1.6  eeh 	} \
   1038       1.6  eeh 	_lda_v; \
   1039       1.6  eeh })
   1040       1.6  eeh 
   1041       1.6  eeh /* load signed int from alternate address space */
   1042       1.6  eeh #define	ldswa(loc, asi) ({ \
   1043       1.6  eeh 	register int _lda_v; \
   1044      1.14  eeh 	if (PHYS_ASI(asi)) { \
   1045  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; " \
   1046  1.18.2.1  mrg " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; " \
   1047  1.18.2.1  mrg " ldswa [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; " \
   1048  1.18.2.1  mrg " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lda_v), "=r" (loc) : \
   1049      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
   1050       1.6  eeh 	} else { \
   1051       1.6  eeh 		__asm __volatile("wr %2,%%g0,%%asi; ldswa [%1]%%asi,%0" : "=r" (_lda_v) : \
   1052      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (asi)); \
   1053       1.6  eeh 	} \
   1054       1.6  eeh 	_lda_v; \
   1055       1.6  eeh })
   1056       1.9  eeh #else	/* __arch64__ */
   1057       1.9  eeh /* load unsigned int from alternate address space */
   1058       1.9  eeh #define	lda(loc, asi) ({ \
   1059  1.18.2.1  mrg 	register unsigned int _lda_v, _loc_hi, _pstate; \
   1060       1.9  eeh 	_loc_hi = (((u_int64_t)loc)>>32); \
   1061      1.14  eeh 	if (PHYS_ASI(asi)) { \
   1062      1.14  eeh 		__asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1;" \
   1063      1.14  eeh " andn %2,0x1f,%0; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate; " \
   1064      1.14  eeh " sllx %3,32,%0; or %0,%2,%0; membar #Sync;lda [%0]%%asi,%0; " \
   1065  1.18.2.1  mrg " wrpr %1,0,%%pstate; andn %2,0x1f,%1; membar #Sync; " \
   1066  1.18.2.1  mrg " stxa %%g0,[%1] %5; membar #Sync" : "=&r" (_lda_v), "=&r" (_pstate) : \
   1067      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (_loc_hi), \
   1068       1.9  eeh 		"r" (asi), "n" (ASI_DCACHE_TAG)); \
   1069       1.9  eeh 	} else { \
   1070      1.17  eeh 		__asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; " \
   1071      1.17  eeh " or %0,%1,%0; lda [%0]%%asi,%0" : "=&r" (_lda_v) : \
   1072      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
   1073       1.9  eeh 	} \
   1074       1.9  eeh 	_lda_v; \
   1075       1.9  eeh })
   1076       1.9  eeh 
   1077       1.9  eeh /* load signed int from alternate address space */
   1078       1.9  eeh #define	ldswa(loc, asi) ({ \
   1079      1.14  eeh 	register int _lda_v, _loc_hi, _pstate; \
   1080       1.9  eeh 	_loc_hi = (((u_int64_t)loc)>>32); \
   1081      1.14  eeh 	if (PHYS_ASI(asi)) { \
   1082      1.14  eeh 		__asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1;" \
   1083      1.14  eeh " andn %2,0x1f,%0; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate; sllx %3,32,%0;" \
   1084  1.18.2.1  mrg " or %0,%2,%0; membar #Sync; ldswa [%0]%%asi,%0; wrpr %1,0,%%pstate; " \
   1085  1.18.2.1  mrg " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync" : \
   1086      1.14  eeh 		"=&r" (_lda_v), "=&r" (_pstate) : \
   1087      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (_loc_hi), \
   1088       1.9  eeh 		"r" (asi), "n" (ASI_DCACHE_TAG)); \
   1089       1.9  eeh 	} else { \
   1090      1.17  eeh 		__asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; " \
   1091      1.17  eeh " or %0,%1,%0; ldswa [%0]%%asi,%0" : "=&r" (_lda_v) : \
   1092      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
   1093       1.9  eeh 	} \
   1094       1.9  eeh 	_lda_v; \
   1095       1.9  eeh })
   1096       1.9  eeh #endif /* __arch64__ */
   1097       1.6  eeh 
   1098       1.9  eeh #ifdef	__arch64__
   1099  1.18.2.1  mrg /* load 64-bit int from alternate address space -- these should never be used */
   1100       1.6  eeh #define	ldda(loc, asi) ({ \
   1101       1.6  eeh 	register long long _lda_v; \
   1102      1.14  eeh 	if (PHYS_ASI(asi)) { \
   1103  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; " \
   1104  1.18.2.1  mrg " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; " \
   1105  1.18.2.1  mrg " ldda [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; " \
   1106  1.18.2.1  mrg " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lda_v), "=&r" (loc) : \
   1107      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
   1108       1.6  eeh 	} else { \
   1109       1.6  eeh 		__asm __volatile("wr %2,%%g0,%%asi; ldda [%1]%%asi,%0" : "=r" (_lda_v) : \
   1110      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (asi)); \
   1111       1.6  eeh 	} \
   1112       1.6  eeh 	_lda_v; \
   1113       1.2  eeh })
   1114       1.9  eeh #else
   1115       1.9  eeh /* load 64-bit int from alternate address space */
   1116       1.9  eeh #define	ldda(loc, asi) ({ \
   1117      1.14  eeh 	register long long _lda_v, _loc_hi, _pstate; \
   1118       1.9  eeh 	_loc_hi = (((u_int64_t)loc)>>32); \
   1119      1.14  eeh 	if (PHYS_ASI(asi)) { \
   1120      1.14  eeh 		__asm __volatile("wr %4,%%g0,%%asi; rdpr %%pstate,%1;" \
   1121      1.14  eeh " andn %2,0x1f,%0; rdpr %%pstate,%1; stxa %%g0,[%0] %5; wrpr %1,8,%%pstate;" \
   1122  1.18.2.1  mrg " sllx %3,32,%0; or %0,%2,%0; membar #Sync; ldda [%0]%%asi,%0; wrpr %1,0,%%pstate; " \
   1123  1.18.2.1  mrg " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync" : \
   1124      1.14  eeh 		 "=&r" (_lda_v), "=&r" (_pstate) : \
   1125      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi), "n" (ASI_DCACHE_TAG)); \
   1126       1.9  eeh 	} else { \
   1127      1.17  eeh 		__asm __volatile("wr %3,%%g0,%%asi; sllx %2,32,%0; " \
   1128      1.17  eeh " or %0,%1,%0; ldda [%0]%%asi,%0" : "=&r" (_lda_v) : \
   1129      1.17  eeh 			"r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
   1130       1.9  eeh 	} \
   1131       1.9  eeh 	_lda_v; \
   1132       1.9  eeh })
   1133       1.9  eeh #endif
   1134       1.2  eeh 
   1135       1.6  eeh #ifdef __arch64__
   1136       1.6  eeh /* native load 64-bit int from alternate address space w/64-bit compiler*/
   1137       1.6  eeh #define	ldxa(loc, asi) ({ \
   1138      1.16  eeh 	register unsigned long _lda_v; \
   1139      1.14  eeh 	if (PHYS_ASI(asi)) { \
   1140  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; "\
   1141  1.18.2.1  mrg " andn %2,0x1f,%0; stxa %%g0,[%0] %4; membar #Sync; " \
   1142  1.18.2.1  mrg " ldxa [%2]%%asi,%0; andn %2,0x1f,%1; membar #Sync; " \
   1143  1.18.2.1  mrg " stxa %%g0,[%1] %4; membar #Sync" : "=&r" (_lda_v), "=r" (loc) : \
   1144      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (asi), "n" (ASI_DCACHE_TAG)); \
   1145       1.6  eeh 	} else { \
   1146       1.6  eeh 		__asm __volatile("wr %2,%%g0,%%asi; ldxa [%1]%%asi,%0" : "=r" (_lda_v) : \
   1147      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (asi)); \
   1148       1.6  eeh 	} \
   1149       1.6  eeh 	_lda_v; \
   1150       1.6  eeh })
   1151       1.6  eeh #else
   1152       1.6  eeh /* native load 64-bit int from alternate address space w/32-bit compiler*/
   1153       1.6  eeh #define	ldxa(loc, asi) ({ \
   1154      1.16  eeh 	register unsigned long _ldxa_lo, _ldxa_hi, _loc_hi; \
   1155       1.9  eeh 	_loc_hi = (((u_int64_t)loc)>>32); \
   1156      1.14  eeh 	if (PHYS_ASI(asi)) { \
   1157       1.9  eeh 		__asm __volatile("wr %4,%%g0,%%asi; " \
   1158      1.14  eeh " andn %2,0x1f,%0; rdpr %%pstate,%1; stxa %%g0,[%0] %5; " \
   1159      1.14  eeh " sllx %3,32,%0; wrpr %1,8,%%pstate; or %0,%2,%0; membar #Sync; ldxa [%0]%%asi,%0; " \
   1160  1.18.2.1  mrg " wrpr %1,0,%%pstate; andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %5; membar #Sync; " \
   1161  1.18.2.1  mrg " srlx %0,32,%1; srl %0,0,%0" : \
   1162       1.9  eeh 		"=&r" (_ldxa_lo), "=&r" (_ldxa_hi) : \
   1163      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (_loc_hi), \
   1164       1.9  eeh 		"r" (asi), "n" (ASI_DCACHE_TAG)); \
   1165       1.6  eeh 	} else { \
   1166      1.10  eeh 		__asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; " \
   1167      1.17  eeh " or %0,%2,%0; ldxa [%0]%%asi,%0; srlx %0,32,%1; srl %0,0,%0;" : \
   1168       1.9  eeh 		"=&r" (_ldxa_lo), "=&r" (_ldxa_hi) : \
   1169      1.16  eeh 		"r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
   1170       1.6  eeh 	} \
   1171       1.6  eeh 	((((int64_t)_ldxa_hi)<<32)|_ldxa_lo); \
   1172       1.2  eeh })
   1173       1.6  eeh #endif
   1174       1.2  eeh 
   1175       1.2  eeh 
   1176  1.18.2.1  mrg /* store byte to alternate address space */
   1177       1.5  mrg #ifdef __arch64__
   1178  1.18.2.1  mrg #define	stba(loc, asi, value) ({ \
   1179      1.17  eeh 	if (PHYS_ASI(asi)) { \
   1180  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; stba %1,[%2]%%asi;" \
   1181  1.18.2.1  mrg " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) : \
   1182  1.18.2.1  mrg 			"r" ((int)(value)), "r" ((unsigned long)(loc)), \
   1183  1.18.2.1  mrg 			"r" (asi), "n" (ASI_DCACHE_TAG)); \
   1184      1.17  eeh 	} else { \
   1185  1.18.2.1  mrg 		__asm __volatile("wr %2,%%g0,%%asi; stba %0,[%1]%%asi" : : \
   1186  1.18.2.1  mrg 		"r" ((int)(value)), "r" ((unsigned long)(loc)), "r" (asi)); \
   1187      1.17  eeh 	} \
   1188       1.6  eeh })
   1189       1.9  eeh #else
   1190       1.9  eeh #define	stba(loc, asi, value) ({ \
   1191      1.14  eeh 	register int _loc_hi, _pstate; \
   1192       1.9  eeh 	_loc_hi = (((u_int64_t)loc)>>32); \
   1193      1.17  eeh 	if (PHYS_ASI(asi)) { \
   1194      1.17  eeh 		__asm __volatile("wr %5,%%g0,%%asi; sllx %4,32,%0; rdpr %%pstate,%1;" \
   1195  1.18.2.1  mrg " or %3,%0,%0; wrpr %1,8,%%pstate; stba %2,[%0]%%asi; wrpr %1,0,%%pstate; " \
   1196  1.18.2.1  mrg " andn %0,0x1f,%1;  membar #Sync; stxa %%g0,[%1] %6; membar #Sync" : \
   1197      1.14  eeh 		"=&r" (_loc_hi), "=&r" (_pstate) : \
   1198      1.17  eeh 		"r" ((int)(value)), "r" ((unsigned long)(loc)), \
   1199  1.18.2.1  mrg 		"r" (_loc_hi), "r" (asi), "n" (ASI_DCACHE_TAG)); \
   1200      1.17  eeh 	} else { \
   1201      1.17  eeh 		__asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; " \
   1202      1.17  eeh " or %2,%0,%0; stba %1,[%0]%%asi" : "=&r" (_loc_hi) : \
   1203      1.17  eeh 		"r" ((int)(value)), "r" ((unsigned long)(loc)), \
   1204      1.17  eeh 		"r" (_loc_hi), "r" (asi)); \
   1205      1.17  eeh 	} \
   1206       1.9  eeh })
   1207       1.9  eeh #endif
   1208       1.6  eeh 
   1209       1.6  eeh /* store half-word to alternate address space */
   1210       1.9  eeh #ifdef __arch64__
   1211       1.6  eeh #define	stha(loc, asi, value) ({ \
   1212  1.18.2.1  mrg 	if (PHYS_ASI(asi)) { \
   1213  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; stha %1,[%2]%%asi;" \
   1214  1.18.2.1  mrg " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) : \
   1215  1.18.2.1  mrg 			"r" ((int)(value)), "r" ((unsigned long)(loc)), \
   1216  1.18.2.1  mrg 			"r" (asi), "n" (ASI_DCACHE_TAG)); \
   1217  1.18.2.1  mrg 	} else { \
   1218       1.6  eeh 	__asm __volatile("wr %2,%%g0,%%asi; stha %0,[%1]%%asi" : : \
   1219      1.16  eeh 	    "r" ((int)(value)), "r" ((unsigned long)(loc)), "r" (asi)); \
   1220  1.18.2.1  mrg 	} \
   1221       1.6  eeh })
   1222       1.9  eeh #else
   1223       1.9  eeh #define	stha(loc, asi, value) ({ \
   1224      1.14  eeh 	register int _loc_hi, _pstate; \
   1225       1.9  eeh 	_loc_hi = (((u_int64_t)loc)>>32); \
   1226      1.17  eeh 	if (PHYS_ASI(asi)) { \
   1227      1.17  eeh 		__asm __volatile("wr %5,%%g0,%%asi; sllx %4,32,%0; rdpr %%pstate,%1;" \
   1228  1.18.2.1  mrg " or %3,%0,%0; wrpr %1,8,%%pstate; stha %2,[%0]%%asi; wrpr %1,0,%%pstate; " \
   1229  1.18.2.1  mrg " andn %0,0x1f,%1;  membar #Sync; stxa %%g0,[%1] %6; membar #Sync" : \
   1230      1.14  eeh 		"=&r" (_loc_hi), "=&r" (_pstate) : \
   1231      1.17  eeh 		"r" ((int)(value)), "r" ((unsigned long)(loc)), \
   1232  1.18.2.1  mrg 		"r" (_loc_hi), "r" (asi), "n" (ASI_DCACHE_TAG)); \
   1233      1.17  eeh 	} else { \
   1234      1.17  eeh 		__asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; " \
   1235      1.17  eeh " or %2,%0,%0; stha %1,[%0]%%asi" : "=&r" (_loc_hi) : \
   1236      1.17  eeh 	    "r" ((int)(value)), "r" ((unsigned long)(loc)), \
   1237      1.17  eeh 		"r" (_loc_hi), "r" (asi)); \
   1238      1.17  eeh 	} \
   1239       1.9  eeh })
   1240       1.9  eeh #endif
   1241       1.6  eeh 
   1242       1.6  eeh /* store int to alternate address space */
   1243       1.9  eeh #ifdef __arch64__
   1244       1.6  eeh #define	sta(loc, asi, value) ({ \
   1245  1.18.2.1  mrg 	if (PHYS_ASI(asi)) { \
   1246  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; sta %1,[%2]%%asi;" \
   1247  1.18.2.1  mrg " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) : \
   1248  1.18.2.1  mrg 			"r" ((int)(value)), "r" ((unsigned long)(loc)), \
   1249  1.18.2.1  mrg 			"r" (asi), "n" (ASI_DCACHE_TAG)); \
   1250  1.18.2.1  mrg 	} else { \
   1251       1.6  eeh 	__asm __volatile("wr %2,%%g0,%%asi; sta %0,[%1]%%asi" : : \
   1252      1.16  eeh 	    "r" ((int)(value)), "r" ((unsigned long)(loc)), "r" (asi)); \
   1253  1.18.2.1  mrg 	} \
   1254       1.6  eeh })
   1255       1.9  eeh #else
   1256       1.9  eeh #define	sta(loc, asi, value) ({ \
   1257      1.14  eeh 	register int _loc_hi, _pstate; \
   1258       1.9  eeh 	_loc_hi = (((u_int64_t)loc)>>32); \
   1259      1.17  eeh 	if (PHYS_ASI(asi)) { \
   1260      1.17  eeh 		__asm __volatile("wr %5,%%g0,%%asi; sllx %4,32,%0; rdpr %%pstate,%1;" \
   1261  1.18.2.1  mrg " or %3,%0,%0; wrpr %1,8,%%pstate; sta %2,[%0]%%asi; wrpr %1,0,%%pstate; " \
   1262  1.18.2.1  mrg " andn %0,0x1f,%1;  membar #Sync; stxa %%g0,[%1] %6; membar #Sync" : \
   1263      1.14  eeh 		"=&r" (_loc_hi), "=&r" (_pstate) : \
   1264      1.17  eeh 		"r" ((int)(value)), "r" ((unsigned long)(loc)), \
   1265  1.18.2.1  mrg 		"r" (_loc_hi), "r" (asi), "n" (ASI_DCACHE_TAG)); \
   1266      1.17  eeh 	} else { \
   1267      1.17  eeh 		__asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; " \
   1268      1.17  eeh " or %2,%0,%0; sta %1,[%0]%%asi" : "=&r" (_loc_hi) : \
   1269      1.17  eeh 		"r" ((int)(value)), "r" ((unsigned long)(loc)), \
   1270      1.17  eeh 		"r" (_loc_hi), "r" (asi)); \
   1271      1.17  eeh 	} \
   1272       1.9  eeh })
   1273       1.9  eeh #endif
   1274       1.6  eeh 
   1275       1.6  eeh /* store 64-bit int to alternate address space */
   1276       1.9  eeh #ifdef __arch64__
   1277       1.6  eeh #define	stda(loc, asi, value) ({ \
   1278  1.18.2.1  mrg 	if (PHYS_ASI(asi)) { \
   1279  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; stda %1,[%2]%%asi;" \
   1280  1.18.2.1  mrg " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) : \
   1281  1.18.2.1  mrg 			"r" ((int)(value)), "r" ((unsigned long)(loc)), \
   1282  1.18.2.1  mrg 			"r" (asi), "n" (ASI_DCACHE_TAG)); \
   1283  1.18.2.1  mrg 	} else { \
   1284       1.6  eeh 	__asm __volatile("wr %2,%%g0,%%asi; stda %0,[%1]%%asi" : : \
   1285      1.16  eeh 	    "r" ((long long)(value)), "r" ((unsigned long)(loc)), "r" (asi)); \
   1286  1.18.2.1  mrg 	} \
   1287       1.6  eeh })
   1288       1.9  eeh #else
   1289       1.9  eeh #define	stda(loc, asi, value) ({ \
   1290      1.14  eeh 	register int _loc_hi, _pstate; \
   1291       1.9  eeh 	_loc_hi = (((u_int64_t)loc)>>32); \
   1292      1.17  eeh 	if (PHYS_ASI(asi)) { \
   1293      1.14  eeh 	__asm __volatile("wr %5,%%g0,%%asi; sllx %4,32,%0; rdpr %%pstate,%1; " \
   1294  1.18.2.1  mrg " or %3,%0,%0; wrpr %1,8,%%pstate; stda %2,[%0]%%asi; wrpr %1,0,%%pstate;" \
   1295  1.18.2.1  mrg " andn %0,0x1f,%1;  membar #Sync; stxa %%g0,[%1] %6; membar #Sync" : \
   1296      1.14  eeh 		"=&r" (_loc_hi), "=&r" (_pstate) : \
   1297      1.17  eeh 		"r" ((long long)(value)), "r" ((unsigned long)(loc)), \
   1298  1.18.2.1  mrg 		"r" (_loc_hi), "r" (asi), "n" (ASI_DCACHE_TAG)); \
   1299      1.17  eeh 	} else { \
   1300      1.17  eeh 		__asm __volatile("wr %4,%%g0,%%asi; sllx %3,32,%0; " \
   1301      1.17  eeh " or %2,%0,%0; stda %1,[%0]%%asi" : "=&r" (_loc_hi) : \
   1302      1.17  eeh 		"r" ((long long)(value)), "r" ((unsigned long)(loc)), \
   1303      1.17  eeh 		"r" (_loc_hi), "r" (asi)); \
   1304      1.17  eeh 	} \
   1305       1.9  eeh })
   1306       1.9  eeh #endif
   1307       1.1  eeh 
   1308       1.5  mrg #ifdef __arch64__
   1309       1.2  eeh /* native store 64-bit int to alternate address space w/64-bit compiler*/
   1310       1.2  eeh #define	stxa(loc, asi, value) ({ \
   1311  1.18.2.1  mrg 	if (PHYS_ASI(asi)) { \
   1312  1.18.2.1  mrg 		__asm __volatile("wr %3,%%g0,%%asi; stxa %1,[%2]%%asi;" \
   1313  1.18.2.1  mrg " andn %2,0x1f,%1; membar #Sync; stxa %%g0,[%1] %4; membar #Sync" : "=&r" (loc) : \
   1314  1.18.2.1  mrg 			"r" ((int)(value)), "r" ((unsigned long)(loc)), \
   1315  1.18.2.1  mrg 			"r" (asi), "n" (ASI_DCACHE_TAG)); \
   1316  1.18.2.1  mrg 	} else { \
   1317       1.6  eeh 	__asm __volatile("wr %2,%%g0,%%asi; stxa %0,[%1]%%asi" : : \
   1318      1.16  eeh 	    "r" ((unsigned long)(value)), "r" ((unsigned long)(loc)), "r" (asi)); \
   1319       1.2  eeh })
   1320       1.2  eeh #else
   1321       1.2  eeh /* native store 64-bit int to alternate address space w/32-bit compiler*/
   1322       1.2  eeh #define	stxa(loc, asi, value) ({ \
   1323       1.9  eeh 	int _stxa_lo, _stxa_hi, _loc_hi; \
   1324       1.9  eeh 	_stxa_lo = value; _stxa_hi = ((u_int64_t)value)>>32; \
   1325      1.14  eeh 	_loc_hi = (((u_int64_t)(u_long)loc)>>32); \
   1326      1.17  eeh 	if (PHYS_ASI(asi)) { \
   1327      1.17  eeh 		__asm __volatile("wr %7,%%g0,%%asi; sllx %4,32,%1; sllx %6,32,%0; " \
   1328      1.14  eeh " or %1,%3,%1; rdpr %%pstate,%3; or %0,%5,%0; wrpr %3,8,%%pstate; " \
   1329  1.18.2.1  mrg " stxa %1,[%0]%%asi; wrpr %3,0,%%pstate; "  \
   1330  1.18.2.1  mrg " andn %0,0x1f,%1;  membar #Sync; stxa %%g0,[%1] %8; membar #Sync": \
   1331      1.17  eeh 		"=&r" (_loc_hi), "=&r" (_stxa_hi), "=&r" ((int)(_stxa_lo)): \
   1332      1.17  eeh 		"r" ((int)(_stxa_lo)), "r" ((int)(_stxa_hi)), \
   1333  1.18.2.1  mrg 		"r" ((unsigned long)(loc)), "r" (_loc_hi), \
   1334  1.18.2.1  mrg 		"r" (asi), "n" (ASI_DCACHE_TAG)); \
   1335      1.17  eeh 	} else { \
   1336      1.17  eeh 	__asm __volatile("wr %6,%%g0,%%asi; sllx %3,32,%1; sllx %5,32,%0; " \
   1337      1.17  eeh " or %1,%2,%1; or %0,%4,%0; stxa %1,[%0]%%asi" : \
   1338      1.17  eeh 	    "=&r" (_loc_hi), "=&r" (_stxa_hi) : \
   1339       1.9  eeh 	    "r" ((int)(_stxa_lo)), "r" ((int)(_stxa_hi)), \
   1340      1.16  eeh 	    "r" ((unsigned long)(loc)), "r" (_loc_hi), "r" (asi)); \
   1341      1.17  eeh 	} \
   1342       1.2  eeh })
   1343       1.1  eeh #endif
   1344  1.18.2.1  mrg #endif
   1345       1.1  eeh 
   1346       1.2  eeh /* flush address from data cache */
   1347       1.2  eeh #define flush(loc) ({ \
   1348       1.2  eeh 	__asm __volatile("flush %0" : : \
   1349      1.16  eeh 	     "r" ((unsigned long)(loc))); \
   1350       1.2  eeh })
   1351       1.2  eeh 
   1352       1.6  eeh /* Flush a D$ line */
   1353       1.6  eeh #if 0
   1354       1.6  eeh #define flushline(loc) ({ \
   1355       1.6  eeh 	stxa(((paddr_t)loc)&(~0x1f), (ASI_DCACHE_TAG), 0); \
   1356       1.6  eeh         membar_sync(); \
   1357       1.6  eeh })
   1358       1.6  eeh #else
   1359       1.6  eeh #define flushline(loc)
   1360       1.6  eeh #endif
   1361       1.6  eeh 
   1362       1.6  eeh /* The following two enable or disable the dcache in the LSU control register */
   1363       1.6  eeh #define dcenable() ({ \
   1364       1.6  eeh 	int res; \
   1365       1.6  eeh 	__asm __volatile("ldxa [%%g0] %1,%0; or %0,%2,%0; stxa %0,[%%g0] %1; membar #Sync" \
   1366       1.6  eeh 		: "r" (res) : "n" (ASI_MCCR), "n" (MCCR_DCACHE_EN)); \
   1367       1.6  eeh })
   1368       1.6  eeh #define dcdisable() ({ \
   1369       1.6  eeh 	int res; \
   1370       1.6  eeh 	__asm __volatile("ldxa [%%g0] %1,%0; andn %0,%2,%0; stxa %0,[%%g0] %1; membar #Sync" \
   1371       1.6  eeh 		: "r" (res) : "n" (ASI_MCCR), "n" (MCCR_DCACHE_EN)); \
   1372       1.6  eeh })
   1373       1.6  eeh 
   1374       1.6  eeh /*
   1375       1.6  eeh  * SPARC V9 memory barrier instructions.
   1376       1.6  eeh  */
   1377       1.6  eeh /* Make all stores complete before next store */
   1378       1.6  eeh #define membar_storestore() __asm __volatile("membar #StoreStore" : :)
   1379       1.6  eeh /* Make all loads complete before next store */
   1380       1.6  eeh #define membar_loadstore() __asm __volatile("membar #LoadStore" : :)
   1381       1.6  eeh /* Make all stores complete before next load */
   1382       1.6  eeh #define membar_storeload() __asm __volatile("membar #StoreLoad" : :)
   1383       1.6  eeh /* Make all loads complete before next load */
   1384       1.6  eeh #define membar_loadload() __asm __volatile("membar #LoadLoad" : :)
   1385       1.6  eeh /* Complete all outstanding memory operations and exceptions */
   1386       1.2  eeh #define membar_sync() __asm __volatile("membar #Sync" : :)
   1387       1.6  eeh /* Complete all outstanding memory operations */
   1388       1.6  eeh #define membar_memissue() __asm __volatile("membar #MemIssue" : :)
   1389       1.6  eeh /* Complete all outstanding stores before any new loads */
   1390       1.6  eeh #define membar_lookaside() __asm __volatile("membar #Lookaside" : :)
   1391       1.2  eeh 
   1392       1.5  mrg #ifdef __arch64__
   1393       1.2  eeh /* read 64-bit %tick register */
   1394       1.2  eeh #define	tick() ({ \
   1395       1.3  eeh 	register u_long _tick_tmp; \
   1396       1.2  eeh 	__asm __volatile("rdpr %%tick, %0" : "=r" (_tick_tmp) :); \
   1397       1.2  eeh 	_tick_tmp; \
   1398       1.2  eeh })
   1399       1.2  eeh #else
   1400       1.9  eeh /* read 64-bit %tick register on 32-bit system */
   1401       1.2  eeh #define	tick() ({ \
   1402      1.10  eeh 	register int _tick_hi = 0, _tick_lo = 0; \
   1403      1.10  eeh 	__asm __volatile("rdpr %%tick, %1; srlx %0,32,%2; srl %0,0,%0 " \
   1404      1.10  eeh 		: "=r" (_tick_hi), "=r" (_tick_lo) : ); \
   1405      1.10  eeh 	(((u_int64_t)_tick_hi)<<32)|((u_int64_t)_tick_lo); \
   1406       1.2  eeh })
   1407       1.1  eeh #endif
   1408       1.2  eeh 
   1409      1.12  mrg extern void next_tick __P((long));
   1410       1.9  eeh #endif
   1411