Home | History | Annotate | Line # | Download | only in booke
copyout.c revision 1.7
      1  1.7      rin /*	$NetBSD: copyout.c,v 1.7 2020/03/05 00:33:56 rin Exp $	*/
      2  1.2     matt 
      3  1.2     matt /*-
      4  1.2     matt  * Copyright (c) 2010, 2011 The NetBSD Foundation, Inc.
      5  1.2     matt  * All rights reserved.
      6  1.2     matt  *
      7  1.2     matt  * This code is derived from software contributed to The NetBSD Foundation
      8  1.2     matt  * by Raytheon BBN Technologies Corp and Defense Advanced Research Projects
      9  1.2     matt  * Agency and which was developed by Matt Thomas of 3am Software Foundry.
     10  1.2     matt  *
     11  1.2     matt  * This material is based upon work supported by the Defense Advanced Research
     12  1.2     matt  * Projects Agency and Space and Naval Warfare Systems Center, Pacific, under
     13  1.2     matt  * Contract No. N66001-09-C-2073.
     14  1.2     matt  * Approved for Public Release, Distribution Unlimited
     15  1.2     matt  *
     16  1.2     matt  * Redistribution and use in source and binary forms, with or without
     17  1.2     matt  * modification, are permitted provided that the following conditions
     18  1.2     matt  * are met:
     19  1.2     matt  * 1. Redistributions of source code must retain the above copyright
     20  1.2     matt  *    notice, this list of conditions and the following disclaimer.
     21  1.2     matt  * 2. Redistributions in binary form must reproduce the above copyright
     22  1.2     matt  *    notice, this list of conditions and the following disclaimer in the
     23  1.2     matt  *    documentation and/or other materials provided with the distribution.
     24  1.2     matt  *
     25  1.2     matt  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     26  1.2     matt  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     27  1.2     matt  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     28  1.2     matt  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     29  1.2     matt  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     30  1.2     matt  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     31  1.2     matt  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     32  1.2     matt  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     33  1.2     matt  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     34  1.2     matt  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     35  1.2     matt  * POSSIBILITY OF SUCH DAMAGE.
     36  1.2     matt  */
     37  1.2     matt 
     38  1.2     matt #include <sys/cdefs.h>
     39  1.7      rin __KERNEL_RCSID(0, "$NetBSD: copyout.c,v 1.7 2020/03/05 00:33:56 rin Exp $");
     40  1.5  thorpej 
     41  1.5  thorpej #define	__UFETCHSTORE_PRIVATE
     42  1.2     matt 
     43  1.2     matt #include <sys/param.h>
     44  1.2     matt #include <sys/lwp.h>
     45  1.5  thorpej #include <sys/systm.h>
     46  1.2     matt 
     47  1.3     matt #include <powerpc/pcb.h>
     48  1.3     matt 
     49  1.3     matt #include <powerpc/booke/cpuvar.h>
     50  1.2     matt 
     51  1.2     matt static inline void
     52  1.2     matt copyout_uint8(uint8_t *udaddr, uint8_t data, register_t ds_msr)
     53  1.2     matt {
     54  1.2     matt 	register_t msr;
     55  1.2     matt 	__asm volatile(
     56  1.2     matt 		"mfmsr	%[msr]"				/* Save MSR */
     57  1.2     matt 	"\n\t"	"mtmsr	%[ds_msr]; sync; isync"		/* DS on */
     58  1.2     matt 	"\n\t"	"stb	%[data],0(%[udaddr])"		/* store user byte */
     59  1.2     matt 	"\n\t"	"mtmsr	%[msr]; sync; isync"		/* DS off */
     60  1.2     matt 	    : [msr] "=&r" (msr)
     61  1.2     matt 	    : [ds_msr] "r" (ds_msr), [data] "r" (data), [udaddr] "b" (udaddr));
     62  1.2     matt }
     63  1.2     matt 
     64  1.2     matt static inline void
     65  1.5  thorpej copyout_uint16(uint16_t *udaddr, uint8_t data, register_t ds_msr)
     66  1.2     matt {
     67  1.2     matt 	register_t msr;
     68  1.2     matt 	__asm volatile(
     69  1.2     matt 		"mfmsr	%[msr]"				/* Save MSR */
     70  1.2     matt 	"\n\t"	"mtmsr	%[ds_msr]; sync; isync"		/* DS on */
     71  1.5  thorpej 	"\n\t"	"sth	%[data],0(%[udaddr])"		/* store user half */
     72  1.2     matt 	"\n\t"	"mtmsr	%[msr]; sync; isync"		/* DS off */
     73  1.2     matt 	    : [msr] "=&r" (msr)
     74  1.2     matt 	    : [ds_msr] "r" (ds_msr), [data] "r" (data), [udaddr] "b" (udaddr));
     75  1.2     matt }
     76  1.2     matt 
     77  1.2     matt static inline void
     78  1.2     matt copyout_uint32(uint32_t * const udaddr, uint32_t data, register_t ds_msr)
     79  1.2     matt {
     80  1.2     matt 	register_t msr;
     81  1.2     matt 	__asm volatile(
     82  1.2     matt 		"mfmsr	%[msr]"				/* Save MSR */
     83  1.2     matt 	"\n\t"	"mtmsr	%[ds_msr]; sync; isync"		/* DS on */
     84  1.2     matt 	"\n\t"	"stw	%[data],0(%[udaddr])"		/* store user data */
     85  1.2     matt 	"\n\t"	"mtmsr	%[msr]; sync; isync"		/* DS off */
     86  1.2     matt 	    : [msr] "=&r" (msr)
     87  1.2     matt 	    : [ds_msr] "r" (ds_msr), [data] "r" (data), [udaddr] "b" (udaddr));
     88  1.2     matt }
     89  1.2     matt 
     90  1.4    joerg #if 0
     91  1.2     matt static inline void
     92  1.2     matt copyout_le32(uint32_t * const udaddr, uint32_t data, register_t ds_msr)
     93  1.2     matt {
     94  1.2     matt 	register_t msr;
     95  1.2     matt 	__asm volatile(
     96  1.2     matt 		"mfmsr	%[msr]"				/* Save MSR */
     97  1.2     matt 	"\n\t"	"mtmsr	%[ds_msr]; sync; isync"		/* DS on */
     98  1.2     matt 	"\n\t"	"stwbrx	%[data],0,%[udaddr]"		/* store user data */
     99  1.2     matt 	"\n\t"	"mtmsr	%[msr]; sync; isync"		/* DS off */
    100  1.2     matt 	    : [msr] "=&r" (msr)
    101  1.2     matt 	    : [ds_msr] "r" (ds_msr), [data] "r" (data), [udaddr] "b" (udaddr));
    102  1.2     matt }
    103  1.2     matt 
    104  1.2     matt static inline void
    105  1.2     matt copyout_le32_with_mask(uint32_t * const udaddr, uint32_t data,
    106  1.2     matt 	uint32_t mask, register_t ds_msr)
    107  1.2     matt {
    108  1.2     matt 	register_t msr;
    109  1.2     matt 	uint32_t tmp;
    110  1.2     matt 	KASSERT((data & ~mask) == 0);
    111  1.2     matt 	__asm volatile(
    112  1.2     matt 		"mfmsr	%[msr]"				/* Save MSR */
    113  1.2     matt 	"\n\t"	"mtmsr	%[ds_msr]; sync; isync"		/* DS on */
    114  1.2     matt 	"\n\t"	"lwbrx	%[tmp],0,%[udaddr]"		/* fetch user data */
    115  1.2     matt 	"\n\t"	"andc	%[tmp],%[tmp],%[mask]"		/* mask out new data */
    116  1.2     matt 	"\n\t"	"or	%[tmp],%[tmp],%[data]"		/* merge new data */
    117  1.2     matt 	"\n\t"	"stwbrx	%[tmp],0,%[udaddr]"		/* store user data */
    118  1.2     matt 	"\n\t"	"mtmsr	%[msr]; sync; isync"		/* DS off */
    119  1.2     matt 	    : [msr] "=&r" (msr), [tmp] "=&r" (tmp)
    120  1.2     matt 	    : [ds_msr] "r" (ds_msr), [data] "r" (data),
    121  1.2     matt 	      [mask] "r" (mask), [udaddr] "b" (udaddr));
    122  1.2     matt }
    123  1.4    joerg #endif
    124  1.2     matt 
    125  1.2     matt static inline void
    126  1.2     matt copyout_16uint8s(const uint8_t *ksaddr8, uint8_t *udaddr8, register_t ds_msr)
    127  1.2     matt {
    128  1.2     matt 	register_t msr;
    129  1.2     matt 	__asm volatile(
    130  1.2     matt 		"mfmsr	%[msr]"				/* Save MSR */
    131  1.2     matt 	"\n\t"	"mtmsr	%[ds_msr]; sync; isync"		/* DS on */
    132  1.2     matt 	"\n\t"	"stb	%[data0],0(%[udaddr8])"		/* store user data */
    133  1.2     matt 	"\n\t"	"stb	%[data1],1(%[udaddr8])"		/* store user data */
    134  1.2     matt 	"\n\t"	"stb	%[data2],2(%[udaddr8])"		/* store user data */
    135  1.2     matt 	"\n\t"	"stb	%[data3],3(%[udaddr8])"		/* store user data */
    136  1.2     matt 	"\n\t"	"stb	%[data4],4(%[udaddr8])"		/* store user data */
    137  1.2     matt 	"\n\t"	"stb	%[data5],5(%[udaddr8])"		/* store user data */
    138  1.2     matt 	"\n\t"	"stb	%[data6],6(%[udaddr8])"		/* store user data */
    139  1.2     matt 	"\n\t"	"stb	%[data7],7(%[udaddr8])"		/* store user data */
    140  1.2     matt 	"\n\t"	"stb	%[data8],8(%[udaddr8])"		/* store user data */
    141  1.2     matt 	"\n\t"	"stb	%[data9],9(%[udaddr8])"		/* store user data */
    142  1.2     matt 	"\n\t"	"stb	%[data10],10(%[udaddr8])"	/* store user data */
    143  1.2     matt 	"\n\t"	"stb	%[data11],11(%[udaddr8])"	/* store user data */
    144  1.2     matt 	"\n\t"	"stb	%[data12],12(%[udaddr8])"	/* store user data */
    145  1.2     matt 	"\n\t"	"stb	%[data13],13(%[udaddr8])"	/* store user data */
    146  1.2     matt 	"\n\t"	"stb	%[data14],14(%[udaddr8])"	/* store user data */
    147  1.2     matt 	"\n\t"	"stb	%[data15],15(%[udaddr8])"	/* store user data */
    148  1.2     matt 	"\n\t"	"mtmsr	%[msr]; sync; isync"		/* DS off */
    149  1.2     matt 	    : [msr] "=&r" (msr)
    150  1.2     matt 	    : [ds_msr] "r" (ds_msr), [udaddr8] "b" (udaddr8),
    151  1.2     matt 	      [data0] "r" (ksaddr8[0]), [data1] "r" (ksaddr8[1]),
    152  1.2     matt 	      [data2] "r" (ksaddr8[2]), [data3] "r" (ksaddr8[3]),
    153  1.2     matt 	      [data4] "r" (ksaddr8[4]), [data5] "r" (ksaddr8[5]),
    154  1.2     matt 	      [data6] "r" (ksaddr8[6]), [data7] "r" (ksaddr8[7]),
    155  1.2     matt 	      [data8] "r" (ksaddr8[8]), [data9] "r" (ksaddr8[9]),
    156  1.2     matt 	      [data10] "r" (ksaddr8[10]), [data11] "r" (ksaddr8[11]),
    157  1.2     matt 	      [data12] "r" (ksaddr8[12]), [data13] "r" (ksaddr8[13]),
    158  1.2     matt 	      [data14] "r" (ksaddr8[14]), [data15] "r" (ksaddr8[15]));
    159  1.2     matt }
    160  1.2     matt 
    161  1.2     matt static inline void
    162  1.2     matt copyout_8uint32s(const uint32_t * const ksaddr32, uint32_t * const udaddr32,
    163  1.2     matt 	const register_t ds_msr, const size_t line_mask)
    164  1.2     matt {
    165  1.2     matt 	register_t msr;
    166  1.2     matt 	register_t tmp;
    167  1.2     matt 	__asm volatile(
    168  1.2     matt 		"and.	%[tmp],%[line_mask],%[udaddr32]"
    169  1.2     matt 	"\n\t"	"mfmsr	%[msr]"				/* Save MSR */
    170  1.2     matt 	"\n\t"	"mtmsr	%[ds_msr]; sync; isync"		/* DS on */
    171  1.2     matt 	"\n\t"	"bne	0,1f"
    172  1.2     matt 	"\n\t"	"dcba	0,%[udaddr32]"
    173  1.2     matt 	"\n"	"1:"
    174  1.2     matt 	"\n\t"	"stw	%[data0],0(%[udaddr32])"	/* store user data */
    175  1.2     matt 	"\n\t"	"stw	%[data1],4(%[udaddr32])"	/* store user data */
    176  1.2     matt 	"\n\t"	"stw	%[data2],8(%[udaddr32])"	/* store user data */
    177  1.2     matt 	"\n\t"	"stw	%[data3],12(%[udaddr32])"	/* store user data */
    178  1.2     matt 	"\n\t"	"stw	%[data4],16(%[udaddr32])"	/* store user data */
    179  1.2     matt 	"\n\t"	"stw	%[data5],20(%[udaddr32])"	/* store user data */
    180  1.2     matt 	"\n\t"	"stw	%[data6],24(%[udaddr32])"	/* store user data */
    181  1.2     matt 	"\n\t"	"stw	%[data7],28(%[udaddr32])"	/* store user data */
    182  1.2     matt 	"\n\t"	"mtmsr	%[msr]; sync; isync"		/* DS off */
    183  1.2     matt 	    : [msr] "=&r" (msr), [tmp] "=&r" (tmp)
    184  1.2     matt 	    : [ds_msr] "r" (ds_msr), [udaddr32] "b" (udaddr32),
    185  1.2     matt 	      [line_mask] "r" (line_mask),
    186  1.2     matt 	      [data0] "r" (ksaddr32[0]), [data1] "r" (ksaddr32[1]),
    187  1.2     matt 	      [data2] "r" (ksaddr32[2]), [data3] "r" (ksaddr32[3]),
    188  1.2     matt 	      [data4] "r" (ksaddr32[4]), [data5] "r" (ksaddr32[5]),
    189  1.2     matt 	      [data6] "r" (ksaddr32[6]), [data7] "r" (ksaddr32[7])
    190  1.2     matt 	    : "cr0");
    191  1.2     matt }
    192  1.2     matt 
    193  1.2     matt static inline void
    194  1.2     matt copyout_16uint32s(const uint32_t * const ksaddr32, uint32_t * const udaddr32,
    195  1.2     matt 	const register_t ds_msr, const size_t line_mask)
    196  1.2     matt {
    197  1.2     matt 	KASSERT(((uintptr_t)udaddr32 & line_mask) == 0);
    198  1.2     matt 	register_t msr;
    199  1.2     matt 	register_t tmp;
    200  1.2     matt 	__asm volatile(
    201  1.2     matt 		"and.	%[tmp],%[line_mask],%[udaddr32]"
    202  1.2     matt 	"\n\t"	"cmplwi	2,%[line_size],32"
    203  1.2     matt 	"\n\t"	"mfmsr	%[msr]"				/* Save MSR */
    204  1.2     matt 	"\n\t"	"mtmsr	%[ds_msr]; sync; isync"		/* DS on */
    205  1.2     matt 	"\n\t"	"bne	0,1f"
    206  1.2     matt 	"\n\t"	"dcba	0,%[udaddr32]"
    207  1.2     matt 	"\n\t"	"bne	2,1f"
    208  1.2     matt 	"\n\t"	"dcba	%[line_size],%[udaddr32]"
    209  1.2     matt 	"\n"	"1:"
    210  1.2     matt 	"\n\t"	"stw	%[data0],0(%[udaddr32])"	/* store user data */
    211  1.2     matt 	"\n\t"	"stw	%[data1],4(%[udaddr32])"	/* store user data */
    212  1.2     matt 	"\n\t"	"stw	%[data2],8(%[udaddr32])"	/* store user data */
    213  1.2     matt 	"\n\t"	"stw	%[data3],12(%[udaddr32])"	/* store user data */
    214  1.2     matt 	"\n\t"	"stw	%[data4],16(%[udaddr32])"	/* store user data */
    215  1.2     matt 	"\n\t"	"stw	%[data5],20(%[udaddr32])"	/* store user data */
    216  1.2     matt 	"\n\t"	"stw	%[data6],24(%[udaddr32])"	/* store user data */
    217  1.2     matt 	"\n\t"	"stw	%[data7],28(%[udaddr32])"	/* store user data */
    218  1.2     matt 	"\n\t"	"stw	%[data8],32(%[udaddr32])"	/* store user data */
    219  1.2     matt 	"\n\t"	"stw	%[data9],36(%[udaddr32])"	/* store user data */
    220  1.2     matt 	"\n\t"	"stw	%[data10],40(%[udaddr32])"	/* store user data */
    221  1.2     matt 	"\n\t"	"stw	%[data11],44(%[udaddr32])"	/* store user data */
    222  1.2     matt 	"\n\t"	"stw	%[data12],48(%[udaddr32])"	/* store user data */
    223  1.2     matt 	"\n\t"	"stw	%[data13],52(%[udaddr32])"	/* store user data */
    224  1.2     matt 	"\n\t"	"stw	%[data14],56(%[udaddr32])"	/* store user data */
    225  1.2     matt 	"\n\t"	"stw	%[data15],60(%[udaddr32])"	/* store user data */
    226  1.2     matt 	"\n\t"	"mtmsr	%[msr]; sync; isync"		/* DS off */
    227  1.2     matt 	    : [msr] "=&r" (msr), [tmp] "=&r" (tmp)
    228  1.2     matt 	    : [ds_msr] "r" (ds_msr), [udaddr32] "b" (udaddr32),
    229  1.2     matt 	      [line_size] "r" (line_mask + 1), [line_mask] "r" (line_mask),
    230  1.2     matt 	      [data0] "r" (ksaddr32[0]), [data1] "r" (ksaddr32[1]),
    231  1.2     matt 	      [data2] "r" (ksaddr32[2]), [data3] "r" (ksaddr32[3]),
    232  1.2     matt 	      [data4] "r" (ksaddr32[4]), [data5] "r" (ksaddr32[5]),
    233  1.2     matt 	      [data6] "r" (ksaddr32[6]), [data7] "r" (ksaddr32[7]),
    234  1.2     matt 	      [data8] "r" (ksaddr32[8]), [data9] "r" (ksaddr32[9]),
    235  1.2     matt 	      [data10] "r" (ksaddr32[10]), [data11] "r" (ksaddr32[11]),
    236  1.2     matt 	      [data12] "r" (ksaddr32[12]), [data13] "r" (ksaddr32[13]),
    237  1.2     matt 	      [data14] "r" (ksaddr32[14]), [data15] "r" (ksaddr32[15])
    238  1.2     matt 	    : "cr0", "cr2");
    239  1.2     matt }
    240  1.2     matt 
    241  1.2     matt static inline void
    242  1.2     matt copyout_uint8s(vaddr_t ksaddr, vaddr_t udaddr, size_t len, register_t ds_msr)
    243  1.2     matt {
    244  1.2     matt 	const uint8_t *ksaddr8 = (void *)ksaddr;
    245  1.2     matt 	uint8_t *udaddr8 = (void *)udaddr;
    246  1.2     matt 
    247  1.2     matt 	__builtin_prefetch(ksaddr8, 0, 1);
    248  1.2     matt 
    249  1.2     matt 	for (; len >= 16; len -= 16, ksaddr8 += 16, udaddr8 += 16) {
    250  1.2     matt 		__builtin_prefetch(ksaddr8 + 16, 0, 1);
    251  1.2     matt 		copyout_16uint8s(ksaddr8, udaddr8, ds_msr);
    252  1.2     matt 	}
    253  1.2     matt 
    254  1.2     matt 	while (len-- > 0) {
    255  1.2     matt 		copyout_uint8(udaddr8++, *ksaddr8++, ds_msr);
    256  1.2     matt 	}
    257  1.2     matt }
    258  1.2     matt 
    259  1.2     matt static inline void
    260  1.2     matt copyout_uint32s(vaddr_t ksaddr, vaddr_t udaddr, size_t len, register_t ds_msr)
    261  1.2     matt {
    262  1.2     matt 	const size_t line_size = curcpu()->ci_ci.dcache_line_size;
    263  1.2     matt 	const size_t line_mask = line_size - 1;
    264  1.2     matt 	const size_t udalignment = udaddr & line_mask;
    265  1.2     matt 	KASSERT((ksaddr & 3) == 0);
    266  1.2     matt 	KASSERT((udaddr & 3) == 0);
    267  1.2     matt 	const uint32_t *ksaddr32 = (void *)ksaddr;
    268  1.2     matt 	uint32_t *udaddr32 = (void *)udaddr;
    269  1.2     matt 	len >>= 2;
    270  1.2     matt 	__builtin_prefetch(ksaddr32, 0, 1);
    271  1.2     matt 	if (udalignment != 0 && udalignment + 4*len > line_size) {
    272  1.2     matt 		size_t slen = (line_size - udalignment) >> 2;
    273  1.2     matt 		len -= slen;
    274  1.2     matt 		for (; slen >= 8; ksaddr32 += 8, udaddr32 += 8, slen -= 8) {
    275  1.2     matt 			copyout_8uint32s(ksaddr32, udaddr32, ds_msr, line_mask);
    276  1.2     matt 		}
    277  1.2     matt 		while (slen-- > 0) {
    278  1.2     matt 			copyout_uint32(udaddr32++, *ksaddr32++, ds_msr);
    279  1.2     matt 		}
    280  1.2     matt 		if (len == 0)
    281  1.2     matt 			return;
    282  1.2     matt 	}
    283  1.2     matt 	__builtin_prefetch(ksaddr32, 0, 1);
    284  1.2     matt 	while (len >= 16) {
    285  1.2     matt 		__builtin_prefetch(ksaddr32 + 8, 0, 1);
    286  1.2     matt 		__builtin_prefetch(ksaddr32 + 16, 0, 1);
    287  1.2     matt 		copyout_16uint32s(ksaddr32, udaddr32, ds_msr, line_mask);
    288  1.2     matt 		ksaddr32 += 16, udaddr32 += 16, len -= 16;
    289  1.2     matt 	}
    290  1.2     matt 	KASSERT(len <= 16);
    291  1.2     matt 	if (len >= 8) {
    292  1.2     matt 		__builtin_prefetch(ksaddr32 + 8, 0, 1);
    293  1.2     matt 		copyout_8uint32s(ksaddr32, udaddr32, ds_msr, line_mask);
    294  1.2     matt 		ksaddr32 += 8, udaddr32 += 8, len -= 8;
    295  1.2     matt 	}
    296  1.2     matt 	while (len-- > 0) {
    297  1.2     matt 		copyout_uint32(udaddr32++, *ksaddr32++, ds_msr);
    298  1.2     matt 	}
    299  1.2     matt }
    300  1.2     matt 
    301  1.2     matt int
    302  1.5  thorpej _ustore_8(uint8_t *vusaddr, uint8_t val)
    303  1.5  thorpej {
    304  1.5  thorpej 	struct pcb * const pcb = lwp_getpcb(curlwp);
    305  1.5  thorpej 	struct faultbuf env;
    306  1.5  thorpej 
    307  1.5  thorpej 	if (setfault(&env) != 0) {
    308  1.5  thorpej 		pcb->pcb_onfault = NULL;
    309  1.5  thorpej 		return EFAULT;
    310  1.5  thorpej 	}
    311  1.5  thorpej 
    312  1.5  thorpej 	copyout_uint8(vusaddr, val, mfmsr() | PSL_DS);
    313  1.5  thorpej 
    314  1.5  thorpej 	pcb->pcb_onfault = NULL;
    315  1.5  thorpej 
    316  1.5  thorpej 	return 0;
    317  1.5  thorpej }
    318  1.5  thorpej 
    319  1.5  thorpej int
    320  1.5  thorpej _ustore_16(uint16_t *vusaddr, uint16_t val)
    321  1.5  thorpej {
    322  1.5  thorpej 	struct pcb * const pcb = lwp_getpcb(curlwp);
    323  1.5  thorpej 	struct faultbuf env;
    324  1.5  thorpej 
    325  1.5  thorpej 	if (setfault(&env) != 0) {
    326  1.5  thorpej 		pcb->pcb_onfault = NULL;
    327  1.5  thorpej 		return EFAULT;
    328  1.5  thorpej 	}
    329  1.5  thorpej 
    330  1.5  thorpej 	copyout_uint16(vusaddr, val, mfmsr() | PSL_DS);
    331  1.5  thorpej 
    332  1.5  thorpej 	pcb->pcb_onfault = NULL;
    333  1.5  thorpej 
    334  1.5  thorpej 	return 0;
    335  1.5  thorpej }
    336  1.5  thorpej 
    337  1.5  thorpej int
    338  1.5  thorpej _ustore_32(uint32_t *vusaddr, uint32_t val)
    339  1.5  thorpej {
    340  1.5  thorpej 	struct pcb * const pcb = lwp_getpcb(curlwp);
    341  1.5  thorpej 	struct faultbuf env;
    342  1.5  thorpej 
    343  1.5  thorpej 	if (setfault(&env) != 0) {
    344  1.5  thorpej 		pcb->pcb_onfault = NULL;
    345  1.5  thorpej 		return EFAULT;
    346  1.5  thorpej 	}
    347  1.5  thorpej 
    348  1.5  thorpej 	copyout_uint32(vusaddr, val, mfmsr() | PSL_DS);
    349  1.5  thorpej 
    350  1.5  thorpej 	pcb->pcb_onfault = NULL;
    351  1.5  thorpej 
    352  1.5  thorpej 	return 0;
    353  1.5  thorpej }
    354  1.5  thorpej 
    355  1.5  thorpej int
    356  1.2     matt copyout(const void *vksaddr, void *vudaddr, size_t len)
    357  1.2     matt {
    358  1.2     matt 	struct pcb * const pcb = lwp_getpcb(curlwp);
    359  1.2     matt 	struct faultbuf env;
    360  1.2     matt 	vaddr_t udaddr = (vaddr_t) vudaddr;
    361  1.2     matt 	vaddr_t ksaddr = (vaddr_t) vksaddr;
    362  1.2     matt 
    363  1.2     matt 	if (__predict_false(len == 0)) {
    364  1.2     matt 		return 0;
    365  1.2     matt 	}
    366  1.2     matt 
    367  1.2     matt 	const register_t ds_msr = mfmsr() | PSL_DS;
    368  1.2     matt 
    369  1.2     matt 	int rv = setfault(&env);
    370  1.2     matt 	if (rv != 0) {
    371  1.2     matt 		pcb->pcb_onfault = NULL;
    372  1.2     matt 		return rv;
    373  1.2     matt 	}
    374  1.2     matt 
    375  1.2     matt 	if (__predict_false(len < 4)) {
    376  1.2     matt 		copyout_uint8s(ksaddr, udaddr, len, ds_msr);
    377  1.2     matt 		pcb->pcb_onfault = NULL;
    378  1.2     matt 		return 0;
    379  1.2     matt 	}
    380  1.2     matt 
    381  1.2     matt 	const size_t alignment = (udaddr ^ ksaddr) & 3;
    382  1.2     matt 	if (__predict_true(alignment == 0)) {
    383  1.2     matt 		size_t slen;
    384  1.2     matt 		if (__predict_false(ksaddr & 3)) {
    385  1.2     matt 			slen = 4 - (ksaddr & 3);
    386  1.2     matt 			copyout_uint8s(ksaddr, udaddr, slen, ds_msr);
    387  1.2     matt 			udaddr += slen, ksaddr += slen, len -= slen;
    388  1.2     matt 		}
    389  1.2     matt 		slen = len & ~3;
    390  1.2     matt 		if (__predict_true(slen >= 4)) {
    391  1.2     matt 			copyout_uint32s(ksaddr, udaddr, slen, ds_msr);
    392  1.2     matt 			udaddr += slen, ksaddr += slen, len -= slen;
    393  1.2     matt 		}
    394  1.2     matt 	}
    395  1.2     matt 
    396  1.2     matt 	if (len > 0) {
    397  1.2     matt 		copyout_uint8s(ksaddr, udaddr, len, ds_msr);
    398  1.2     matt 	}
    399  1.2     matt 	pcb->pcb_onfault = NULL;
    400  1.2     matt 	return 0;
    401  1.2     matt }
    402  1.2     matt 
    403  1.6      rin #if 1
    404  1.2     matt int
    405  1.7      rin copyoutstr(const void *ksaddr, void *udaddr, size_t len, size_t *done)
    406  1.2     matt {
    407  1.2     matt 	struct pcb * const pcb = lwp_getpcb(curlwp);
    408  1.2     matt 	struct faultbuf env;
    409  1.7      rin 	int rv;
    410  1.2     matt 
    411  1.2     matt 	if (__predict_false(len == 0)) {
    412  1.7      rin 		if (done)
    413  1.7      rin 			*done = 0;
    414  1.2     matt 		return 0;
    415  1.2     matt 	}
    416  1.2     matt 
    417  1.7      rin 	rv = setfault(&env);
    418  1.7      rin 	if (rv != 0) {
    419  1.2     matt 		pcb->pcb_onfault = NULL;
    420  1.7      rin 		if (done)
    421  1.7      rin 			*done = 0;
    422  1.7      rin 		return rv;
    423  1.2     matt 	}
    424  1.2     matt 
    425  1.2     matt 	const register_t ds_msr = mfmsr() | PSL_DS;
    426  1.2     matt 	const uint8_t *ksaddr8 = ksaddr;
    427  1.2     matt 	size_t copylen = 0;
    428  1.2     matt 
    429  1.2     matt 	uint8_t *udaddr8 = (void *)udaddr;
    430  1.2     matt 
    431  1.2     matt 	while (copylen++ < len) {
    432  1.2     matt 		const uint8_t data = *ksaddr8++;
    433  1.2     matt 		copyout_uint8(udaddr8++, data, ds_msr);
    434  1.2     matt 		if (data == 0)
    435  1.2     matt 			break;
    436  1.2     matt 	}
    437  1.6      rin 
    438  1.6      rin 	pcb->pcb_onfault = NULL;
    439  1.7      rin 	if (done)
    440  1.7      rin 		*done = copylen;
    441  1.6      rin 	return 0;
    442  1.6      rin }
    443  1.2     matt #else
    444  1.6      rin /* XXX This version of copyoutstr(9) has never beeen enabled so far. */
    445  1.6      rin int
    446  1.6      rin copyoutstr(const void *ksaddr, void *udaddr, size_t len, size_t *lenp)
    447  1.6      rin {
    448  1.6      rin 	struct pcb * const pcb = lwp_getpcb(curlwp);
    449  1.6      rin 	struct faultbuf env;
    450  1.6      rin 
    451  1.6      rin 	if (__predict_false(len == 0)) {
    452  1.6      rin 		if (lenp)
    453  1.6      rin 			*lenp = 0;
    454  1.6      rin 		return 0;
    455  1.6      rin 	}
    456  1.6      rin 
    457  1.6      rin 	if (setfault(&env)) {
    458  1.6      rin 		pcb->pcb_onfault = NULL;
    459  1.6      rin 		if (lenp)
    460  1.6      rin 			*lenp = 0;
    461  1.6      rin 		return EFAULT;
    462  1.6      rin 	}
    463  1.6      rin 
    464  1.6      rin 	const register_t ds_msr = mfmsr() | PSL_DS;
    465  1.6      rin 	const uint8_t *ksaddr8 = ksaddr;
    466  1.6      rin 	size_t copylen = 0;
    467  1.6      rin 
    468  1.2     matt 	uint32_t *udaddr32 = (void *)((uintptr_t)udaddr & ~3);
    469  1.2     matt 
    470  1.2     matt 	size_t boff = (uintptr_t)udaddr & 3;
    471  1.2     matt 	bool done = false;
    472  1.2     matt 	size_t wlen = 0;
    473  1.2     matt 	size_t data = 0;
    474  1.2     matt 
    475  1.2     matt 	/*
    476  1.2     matt 	 * If the destination buffer doesn't start on a 32-bit boundary
    477  1.2     matt 	 * try to partially fill in the first word.  If we succeed we can
    478  1.2     matt 	 * finish writing it while preserving the bytes on front.
    479  1.2     matt 	 */
    480  1.2     matt 	if (boff > 0) {
    481  1.2     matt 		KASSERT(len > 0);
    482  1.2     matt 		do {
    483  1.2     matt 			data = (data << 8) | *ksaddr8++;
    484  1.2     matt 			wlen++;
    485  1.2     matt 			done = ((uint8_t)data == 0 || len == wlen);
    486  1.2     matt 		} while (!done && boff + wlen < 4);
    487  1.2     matt 		KASSERT(wlen > 0);
    488  1.2     matt 		data <<= 8 * boff;
    489  1.2     matt 		if (!done || boff + wlen == 4) {
    490  1.2     matt 			uint32_t mask = 0xffffffff << (8 * boff);
    491  1.2     matt 			copyout_le32_with_mask(udaddr32++, data, mask, ds_msr);
    492  1.2     matt 			boff = 0;
    493  1.2     matt 			copylen = wlen;
    494  1.2     matt 			wlen = 0;
    495  1.2     matt 			data = 0;
    496  1.2     matt 		}
    497  1.2     matt 	}
    498  1.2     matt 
    499  1.2     matt 	/*
    500  1.2     matt 	 * Now we get to the heart of the routine.  Build up complete words
    501  1.2     matt 	 * if possible.  When we have one, write it to the user's address
    502  1.2     matt 	 * space and go for the next.  If we ran out of space or we found the
    503  1.2     matt 	 * end of the string, stop building.  If we managed to build a complete
    504  1.2     matt 	 * word, just write it and be happy.  Otherwise we have to deal with
    505  1.2     matt 	 * the trailing bytes.
    506  1.2     matt 	 */
    507  1.2     matt 	KASSERT(done || boff == 0);
    508  1.2     matt 	KASSERT(done || copylen < len);
    509  1.2     matt 	while (!done) {
    510  1.2     matt 		KASSERT(wlen == 0);
    511  1.2     matt 		KASSERT(copylen < len);
    512  1.2     matt 		do {
    513  1.2     matt 			data = (data << 8) | *ksaddr8++;
    514  1.2     matt 			wlen++;
    515  1.2     matt 			done = ((uint8_t)data == 0 || copylen + wlen == len);
    516  1.2     matt 		} while (!done && wlen < 4);
    517  1.2     matt 		KASSERT(done || wlen == 4);
    518  1.2     matt 		if (__predict_true(wlen == 4)) {
    519  1.2     matt 			copyout_le32(udaddr32++, data, ds_msr);
    520  1.2     matt 			data = 0;
    521  1.2     matt 			copylen += wlen;
    522  1.2     matt 			wlen = 0;
    523  1.2     matt 			KASSERT(copylen < len || done);
    524  1.2     matt 		}
    525  1.2     matt 	}
    526  1.2     matt 	KASSERT(wlen < 3);
    527  1.2     matt 	if (wlen) {
    528  1.2     matt 		/*
    529  1.2     matt 		 * Remember even though we are running big-endian we are using
    530  1.2     matt 		 * byte reversed load/stores so we need to deal with things as
    531  1.2     matt 		 * little endian.
    532  1.2     matt 		 *
    533  1.2     matt 		 * wlen=1 boff=0:
    534  1.2     matt 		 * (~(~0 <<  8) <<  0) -> (~(0xffffff00) <<  0) -> 0x000000ff
    535  1.2     matt 		 * wlen=1 boff=1:
    536  1.2     matt 		 * (~(~0 <<  8) <<  8) -> (~(0xffffff00) <<  8) -> 0x0000ff00
    537  1.2     matt 		 * wlen=1 boff=2:
    538  1.2     matt 		 * (~(~0 <<  8) << 16) -> (~(0xffffff00) << 16) -> 0x00ff0000
    539  1.2     matt 		 * wlen=1 boff=3:
    540  1.2     matt 		 * (~(~0 <<  8) << 24) -> (~(0xffffff00) << 24) -> 0xff000000
    541  1.2     matt 		 * wlen=2 boff=0:
    542  1.2     matt 		 * (~(~0 << 16) <<  0) -> (~(0xffff0000) <<  0) -> 0x0000ffff
    543  1.2     matt 		 * wlen=2 boff=1:
    544  1.2     matt 		 * (~(~0 << 16) <<  8) -> (~(0xffff0000) <<  8) -> 0x00ffff00
    545  1.2     matt 		 * wlen=2 boff=2:
    546  1.2     matt 		 * (~(~0 << 16) << 16) -> (~(0xffff0000) << 16) -> 0xffff0000
    547  1.2     matt 		 * wlen=3 boff=0:
    548  1.2     matt 		 * (~(~0 << 24) <<  0) -> (~(0xff000000) <<  0) -> 0x00ffffff
    549  1.2     matt 		 * wlen=3 boff=1:
    550  1.2     matt 		 * (~(~0 << 24) <<  8) -> (~(0xff000000) <<  8) -> 0xffffff00
    551  1.2     matt 		 */
    552  1.2     matt 		KASSERT(boff + wlen <= 4);
    553  1.2     matt 		uint32_t mask = (~(~0 << (8 * wlen))) << (8 * boff);
    554  1.2     matt 		KASSERT(mask != 0xffffffff);
    555  1.2     matt 		copyout_le32_with_mask(udaddr32, data, mask, ds_msr);
    556  1.2     matt 		copylen += wlen;
    557  1.2     matt 	}
    558  1.2     matt 
    559  1.2     matt 	pcb->pcb_onfault = NULL;
    560  1.2     matt 	if (lenp)
    561  1.2     matt 		*lenp = copylen;
    562  1.2     matt 	return 0;
    563  1.2     matt }
    564  1.6      rin #endif
    565