Home | History | Annotate | Line # | Download | only in string
memset2.c revision 1.1.2.2
      1  1.1.2.1  matt /*-
      2  1.1.2.1  matt  * Copyright (c) 2009 The NetBSD Foundation, Inc.
      3  1.1.2.1  matt  * All rights reserved.
      4  1.1.2.1  matt  *
      5  1.1.2.1  matt  * This code is derived from software contributed to The NetBSD Foundation
      6  1.1.2.1  matt  * by Matt Thomas <matt (at) 3am-software.com>.
      7  1.1.2.1  matt  *
      8  1.1.2.1  matt  * Redistribution and use in source and binary forms, with or without
      9  1.1.2.1  matt  * modification, are permitted provided that the following conditions
     10  1.1.2.1  matt  * are met:
     11  1.1.2.1  matt  * 1. Redistributions of source code must retain the above copyright
     12  1.1.2.1  matt  *    notice, this list of conditions and the following disclaimer.
     13  1.1.2.1  matt  * 2. Redistributions in binary form must reproduce the above copyright
     14  1.1.2.1  matt  *    notice, this list of conditions and the following disclaimer in the
     15  1.1.2.1  matt  *    documentation and/or other materials provided with the distribution.
     16  1.1.2.1  matt  *
     17  1.1.2.1  matt  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     18  1.1.2.1  matt  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     19  1.1.2.1  matt  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     20  1.1.2.1  matt  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     21  1.1.2.1  matt  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     22  1.1.2.1  matt  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     23  1.1.2.1  matt  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     24  1.1.2.1  matt  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     25  1.1.2.1  matt  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     26  1.1.2.1  matt  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     27  1.1.2.1  matt  * POSSIBILITY OF SUCH DAMAGE.
     28  1.1.2.1  matt  */
     29  1.1.2.1  matt 
     30  1.1.2.1  matt #include <sys/types.h>
     31  1.1.2.1  matt 
     32  1.1.2.1  matt #if !defined(_KERNEL) && !defined(_STANDALONE)
     33  1.1.2.1  matt #include <assert.h>
     34  1.1.2.1  matt #include <limits.h>
     35  1.1.2.1  matt #include <string.h>
     36  1.1.2.1  matt #include <inttypes.h>
     37  1.1.2.1  matt #else
     38  1.1.2.1  matt #include <lib/libkern/libkern.h>
     39  1.1.2.1  matt #include <machine/limits.h>
     40  1.1.2.1  matt #endif
     41  1.1.2.1  matt 
     42  1.1.2.1  matt #include <sys/endian.h>
     43  1.1.2.1  matt #include <machine/types.h>
     44  1.1.2.1  matt 
     45  1.1.2.2  matt #ifdef TEST
     46  1.1.2.2  matt #include <assert.h>
     47  1.1.2.2  matt #define _DIAGASSERT(a)		assert(a)
     48  1.1.2.2  matt #endif
     49  1.1.2.2  matt 
     50  1.1.2.1  matt #ifdef _FORTIFY_SOURCE
     51  1.1.2.1  matt #undef bzero
     52  1.1.2.1  matt #undef memset
     53  1.1.2.1  matt #endif
     54  1.1.2.1  matt 
     55  1.1.2.1  matt #if defined(LIBC_SCCS) && !defined(lint)
     56  1.1.2.2  matt __RCSID("$NetBSD: memset2.c,v 1.1.2.2 2009/08/23 06:40:49 matt Exp $");
     57  1.1.2.1  matt #endif /* LIBC_SCCS and not lint */
     58  1.1.2.1  matt 
     59  1.1.2.1  matt /*
     60  1.1.2.2  matt  * Assume uregister_t is the widest non-synthetic unsigned type.
     61  1.1.2.1  matt  */
     62  1.1.2.2  matt typedef uregister_t memword_t;
     63  1.1.2.1  matt 
     64  1.1.2.1  matt #ifdef BZERO
     65  1.1.2.1  matt static inline
     66  1.1.2.1  matt #define	memset memset0
     67  1.1.2.1  matt #endif
     68  1.1.2.1  matt 
     69  1.1.2.2  matt #ifdef TEST
     70  1.1.2.2  matt static
     71  1.1.2.2  matt #define memset test_memset
     72  1.1.2.2  matt #endif
     73  1.1.2.2  matt 
     74  1.1.2.2  matt CTASSERT((~(memword_t)0U >> 1) != ~(memword_t)0U);
     75  1.1.2.2  matt 
     76  1.1.2.1  matt void *
     77  1.1.2.1  matt memset(void *addr, int c, size_t len)
     78  1.1.2.1  matt {
     79  1.1.2.1  matt 	memword_t *dstp = addr;
     80  1.1.2.1  matt 	memword_t *edstp;
     81  1.1.2.1  matt 	memword_t fill;
     82  1.1.2.1  matt #ifndef __OPTIMIZE_SIZE__
     83  1.1.2.1  matt 	memword_t keep_mask = 0;
     84  1.1.2.1  matt #endif
     85  1.1.2.1  matt 	size_t fill_count;
     86  1.1.2.1  matt 
     87  1.1.2.1  matt 	_DIAGASSERT(addr != 0);
     88  1.1.2.1  matt 
     89  1.1.2.1  matt 	if (__predict_false(len == 0))
     90  1.1.2.1  matt 		return addr;
     91  1.1.2.1  matt 
     92  1.1.2.1  matt 	/*
     93  1.1.2.1  matt 	 * Pad out the fill byte (v) across a memword_t.
     94  1.1.2.1  matt 	 * The conditional at the end prevents GCC from complaing about
     95  1.1.2.1  matt 	 * shift count >= width of type
     96  1.1.2.1  matt 	 */
     97  1.1.2.1  matt 	fill = c;
     98  1.1.2.1  matt 	fill |= fill << 8;
     99  1.1.2.1  matt 	fill |= fill << 16;
    100  1.1.2.1  matt 	fill |= fill << (sizeof(c) < sizeof(fill) ? 32 : 0);
    101  1.1.2.1  matt 
    102  1.1.2.1  matt 	/*
    103  1.1.2.1  matt 	 * Get the number of unaligned bytes to fill in the first word.
    104  1.1.2.1  matt 	 */
    105  1.1.2.1  matt 	fill_count = -(uintptr_t)addr & (sizeof(memword_t) - 1);
    106  1.1.2.1  matt 
    107  1.1.2.1  matt 	if (__predict_false(fill_count != 0)) {
    108  1.1.2.1  matt #ifndef __OPTIMIZE_SIZE__
    109  1.1.2.1  matt 		/*
    110  1.1.2.1  matt 		 * We want to clear <fill_count> trailing bytes in the word.
    111  1.1.2.1  matt 		 * On big/little endian, these are the least/most significant,
    112  1.1.2.1  matt 		 * bits respectively.  So as we shift, the keep_mask will only
    113  1.1.2.1  matt 		 * have bits set for the bytes we won't be filling.
    114  1.1.2.1  matt 		 */
    115  1.1.2.1  matt #if BYTE_ORDER == BIG_ENDIAN
    116  1.1.2.1  matt 		keep_mask = ~(memword_t)0U << (fill_count * 8);
    117  1.1.2.1  matt #endif
    118  1.1.2.1  matt #if BYTE_ORDER == LITTLE_ENDIAN
    119  1.1.2.2  matt 		keep_mask = ~(memword_t)0U >> (fill_count * 8);
    120  1.1.2.1  matt #endif
    121  1.1.2.1  matt 		/*
    122  1.1.2.1  matt 		 * Make sure dstp is aligned to a memword_t boundary.
    123  1.1.2.1  matt 		 */
    124  1.1.2.1  matt 		dstp = (memword_t *)((uintptr_t)addr & -sizeof(memword_t));
    125  1.1.2.1  matt 		if (len >= fill_count) {
    126  1.1.2.1  matt 			/*
    127  1.1.2.1  matt 			 * If we can fill the rest of this word, then we mask
    128  1.1.2.1  matt 			 * off the bytes we are filling and then fill in those
    129  1.1.2.1  matt 			 * bytes with the new fill value.
    130  1.1.2.1  matt 			 */
    131  1.1.2.1  matt 			*dstp = (*dstp & keep_mask) | (fill & ~keep_mask);
    132  1.1.2.1  matt 			len -= fill_count;
    133  1.1.2.1  matt 			if (__predict_false(len == 0))
    134  1.1.2.1  matt 				return addr;
    135  1.1.2.1  matt 			/*
    136  1.1.2.1  matt 			 * Since we were able to fill the rest of this word,
    137  1.1.2.1  matt 			 * we will advance to the next word and thus have no
    138  1.1.2.1  matt 			 * bytes to preserve.
    139  1.1.2.1  matt 			 *
    140  1.1.2.1  matt 			 * If we don't have enough to fill the rest of this
    141  1.1.2.1  matt 			 * word, we will fall through the following loop
    142  1.1.2.1  matt 			 * (since there are no full words to fill).  Then we
    143  1.1.2.1  matt 			 * use the keep_mask above to preserve the leading
    144  1.1.2.1  matt 			 * bytes of word.
    145  1.1.2.1  matt 			 */
    146  1.1.2.1  matt 			dstp++;
    147  1.1.2.1  matt 			keep_mask = 0;
    148  1.1.2.2  matt 		} else {
    149  1.1.2.2  matt 			len += (uintptr_t)addr & (sizeof(memword_t) - 1);
    150  1.1.2.1  matt 		}
    151  1.1.2.1  matt #else /* __OPTIMIZE_SIZE__ */
    152  1.1.2.1  matt 		uint8_t *dp, *ep;
    153  1.1.2.2  matt 		if (len < fill_count)
    154  1.1.2.2  matt 			fill_count = len;
    155  1.1.2.1  matt 		for (dp = (uint8_t *)dstp, ep = dp + fill_count;
    156  1.1.2.1  matt 		     dp != ep; dp++)
    157  1.1.2.1  matt 			*dp = fill;
    158  1.1.2.2  matt 		if ((len -= fill_count) == 0)
    159  1.1.2.2  matt 			return addr;
    160  1.1.2.1  matt 		dstp = (memword_t *)ep;
    161  1.1.2.1  matt #endif /* __OPTIMIZE_SIZE__ */
    162  1.1.2.1  matt 	}
    163  1.1.2.1  matt 
    164  1.1.2.1  matt 	/*
    165  1.1.2.1  matt 	 * Simply fill memory one word at time (for as many full words we have
    166  1.1.2.1  matt 	 * to write).
    167  1.1.2.1  matt 	 */
    168  1.1.2.1  matt 	for (edstp = dstp + len / sizeof(memword_t); dstp != edstp; dstp++)
    169  1.1.2.1  matt 		*dstp = fill;
    170  1.1.2.1  matt 
    171  1.1.2.1  matt 	/*
    172  1.1.2.1  matt 	 * We didn't subtract out the full words we just filled since we know
    173  1.1.2.1  matt 	 * by the time we get here we will have less than a words worth to
    174  1.1.2.1  matt 	 * write.  So we can concern ourselves with only the subword len bits.
    175  1.1.2.1  matt 	 */
    176  1.1.2.1  matt 	len &= sizeof(memword_t)-1;
    177  1.1.2.1  matt 	if (len > 0) {
    178  1.1.2.1  matt #ifndef __OPTIMIZE_SIZE__
    179  1.1.2.1  matt 		/*
    180  1.1.2.1  matt 		 * We want to clear <len> leading bytes in the word.
    181  1.1.2.1  matt 		 * On big/little endian, these are the most/least significant
    182  1.1.2.1  matt 		 * bits, respectively,  But as we want the mask of the bytes to
    183  1.1.2.1  matt 		 * keep, we have to complement the mask.  So after we shift,
    184  1.1.2.1  matt 		 * the keep_mask will only have bits set for the bytes we won't
    185  1.1.2.1  matt 		 * be filling.
    186  1.1.2.1  matt 		 *
    187  1.1.2.1  matt 		 * But the keep_mask could already have bytes to preserve
    188  1.1.2.1  matt 		 * if the amount to fill was less than the amount of traiing
    189  1.1.2.1  matt 		 * space in the first word.
    190  1.1.2.1  matt 		 */
    191  1.1.2.1  matt #if BYTE_ORDER == BIG_ENDIAN
    192  1.1.2.2  matt 		keep_mask |= ~(memword_t)0U >> (len * 8);
    193  1.1.2.1  matt #endif
    194  1.1.2.1  matt #if BYTE_ORDER == LITTLE_ENDIAN
    195  1.1.2.2  matt 		keep_mask |= ~(memword_t)0U << (len * 8);
    196  1.1.2.1  matt #endif
    197  1.1.2.1  matt 		/*
    198  1.1.2.1  matt 		 * Now we mask off the bytes we are filling and then fill in
    199  1.1.2.1  matt 		 * those bytes with the new fill value.
    200  1.1.2.1  matt 		 */
    201  1.1.2.1  matt 		*dstp = (*dstp & keep_mask) | (fill & ~keep_mask);
    202  1.1.2.1  matt #else /* __OPTIMIZE_SIZE__ */
    203  1.1.2.1  matt 		uint8_t *dp, *ep;
    204  1.1.2.1  matt 		for (dp = (uint8_t *)dstp, ep = dp + len;
    205  1.1.2.1  matt 		     dp != ep; dp++)
    206  1.1.2.1  matt 			*dp = fill;
    207  1.1.2.1  matt #endif /* __OPTIMIZE_SIZE__ */
    208  1.1.2.1  matt 	}
    209  1.1.2.1  matt 
    210  1.1.2.1  matt 	/*
    211  1.1.2.1  matt 	 * Return the initial addr
    212  1.1.2.1  matt 	 */
    213  1.1.2.1  matt 	return addr;
    214  1.1.2.1  matt }
    215  1.1.2.1  matt 
    216  1.1.2.1  matt #ifdef BZERO
    217  1.1.2.1  matt /*
    218  1.1.2.1  matt  * For bzero, simply inline memset and let the compiler optimize things away.
    219  1.1.2.1  matt  */
    220  1.1.2.1  matt void
    221  1.1.2.1  matt bzero(void *addr, size_t len)
    222  1.1.2.1  matt {
    223  1.1.2.1  matt 	memset(addr, 0, len);
    224  1.1.2.1  matt }
    225  1.1.2.1  matt #endif
    226  1.1.2.2  matt 
    227  1.1.2.2  matt #ifdef TEST
    228  1.1.2.2  matt #include <stdbool.h>
    229  1.1.2.2  matt #include <stdio.h>
    230  1.1.2.2  matt 
    231  1.1.2.2  matt #undef memset
    232  1.1.2.2  matt 
    233  1.1.2.2  matt static union {
    234  1.1.2.2  matt 	uint8_t bytes[sizeof(memword_t) * 4];
    235  1.1.2.2  matt 	memword_t words[4];
    236  1.1.2.2  matt } testmem;
    237  1.1.2.2  matt 
    238  1.1.2.2  matt int
    239  1.1.2.2  matt main(int argc, char **argv)
    240  1.1.2.2  matt {
    241  1.1.2.2  matt 	size_t start;
    242  1.1.2.2  matt 	size_t len;
    243  1.1.2.2  matt 	bool failed = false;
    244  1.1.2.2  matt 
    245  1.1.2.2  matt 	for (start = 1; start < sizeof(testmem) - 1; start++) {
    246  1.1.2.2  matt 		for (len = 1; start + len < sizeof(testmem) - 1; len++) {
    247  1.1.2.2  matt 			bool ok = true;
    248  1.1.2.2  matt 			size_t i;
    249  1.1.2.2  matt 			uint8_t check_value;
    250  1.1.2.2  matt 			memset(testmem.bytes, 0xff, sizeof(testmem));
    251  1.1.2.2  matt 			test_memset(testmem.bytes + start, 0x00, len);
    252  1.1.2.2  matt 			for (i = 0; i < sizeof(testmem); i++) {
    253  1.1.2.2  matt 				if (i == 0 || i == start + len)
    254  1.1.2.2  matt 					check_value = 0xff;
    255  1.1.2.2  matt 				else if (i == start)
    256  1.1.2.2  matt 					check_value = 0x00;
    257  1.1.2.2  matt 				if (testmem.bytes[i] != check_value) {
    258  1.1.2.2  matt 					if (ok)
    259  1.1.2.2  matt 						printf("pass @ %zu .. %zu failed",
    260  1.1.2.2  matt 						    start, start + len - 1);
    261  1.1.2.2  matt 					ok = false;
    262  1.1.2.2  matt 					printf(" [%zu]=0x%02x(!0x%02x)",
    263  1.1.2.2  matt 					    i, testmem.bytes[i], check_value);
    264  1.1.2.2  matt 				}
    265  1.1.2.2  matt 			}
    266  1.1.2.2  matt 			if (!ok) {
    267  1.1.2.2  matt 				printf("\n");
    268  1.1.2.2  matt 				failed = 1;
    269  1.1.2.2  matt 			}
    270  1.1.2.2  matt 		}
    271  1.1.2.2  matt 	}
    272  1.1.2.2  matt 
    273  1.1.2.2  matt 	return failed ? 1 : 0;
    274  1.1.2.2  matt }
    275  1.1.2.2  matt #endif /* TEST */
    276