Home | History | Annotate | Line # | Download | only in string
memset2.c revision 1.2.6.1
      1      1.2  matt /*-
      2      1.2  matt  * Copyright (c) 2009 The NetBSD Foundation, Inc.
      3      1.2  matt  * All rights reserved.
      4      1.2  matt  *
      5      1.2  matt  * This code is derived from software contributed to The NetBSD Foundation
      6      1.2  matt  * by Matt Thomas <matt (at) 3am-software.com>.
      7      1.2  matt  *
      8      1.2  matt  * Redistribution and use in source and binary forms, with or without
      9      1.2  matt  * modification, are permitted provided that the following conditions
     10      1.2  matt  * are met:
     11      1.2  matt  * 1. Redistributions of source code must retain the above copyright
     12      1.2  matt  *    notice, this list of conditions and the following disclaimer.
     13      1.2  matt  * 2. Redistributions in binary form must reproduce the above copyright
     14      1.2  matt  *    notice, this list of conditions and the following disclaimer in the
     15      1.2  matt  *    documentation and/or other materials provided with the distribution.
     16      1.2  matt  *
     17      1.2  matt  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     18      1.2  matt  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     19      1.2  matt  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     20      1.2  matt  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     21      1.2  matt  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     22      1.2  matt  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     23      1.2  matt  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     24      1.2  matt  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     25      1.2  matt  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     26      1.2  matt  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     27      1.2  matt  * POSSIBILITY OF SUCH DAMAGE.
     28      1.2  matt  */
     29      1.2  matt 
     30      1.2  matt #include <sys/types.h>
     31      1.2  matt 
     32      1.2  matt #if !defined(_KERNEL) && !defined(_STANDALONE)
     33      1.2  matt #include <assert.h>
     34      1.2  matt #include <limits.h>
     35      1.2  matt #include <string.h>
     36      1.2  matt #include <inttypes.h>
     37      1.2  matt #else
     38      1.2  matt #include <lib/libkern/libkern.h>
     39      1.2  matt #include <machine/limits.h>
     40      1.2  matt #endif
     41      1.2  matt 
     42      1.2  matt #include <sys/endian.h>
     43      1.2  matt #include <machine/types.h>
     44      1.2  matt 
     45      1.2  matt #ifdef TEST
     46      1.2  matt #include <assert.h>
     47      1.2  matt #define _DIAGASSERT(a)		assert(a)
     48      1.2  matt #endif
     49      1.2  matt 
     50      1.2  matt #ifdef _FORTIFY_SOURCE
     51      1.2  matt #undef bzero
     52      1.2  matt #endif
     53  1.2.6.1  yamt #undef memset
     54      1.2  matt 
     55      1.2  matt #if defined(LIBC_SCCS) && !defined(lint)
     56  1.2.6.1  yamt __RCSID("$NetBSD: memset2.c,v 1.2.6.1 2011/11/10 14:31:10 yamt Exp $");
     57      1.2  matt #endif /* LIBC_SCCS and not lint */
     58      1.2  matt 
     59      1.2  matt /*
     60      1.2  matt  * Assume uregister_t is the widest non-synthetic unsigned type.
     61      1.2  matt  */
     62      1.2  matt typedef uregister_t memword_t;
     63      1.2  matt 
     64      1.2  matt #ifdef BZERO
     65      1.2  matt static inline
     66      1.2  matt #define	memset memset0
     67      1.2  matt #endif
     68      1.2  matt 
     69      1.2  matt #ifdef TEST
     70      1.2  matt static
     71      1.2  matt #define memset test_memset
     72      1.2  matt #endif
     73      1.2  matt 
     74      1.2  matt #ifdef CTASSERT
     75      1.2  matt CTASSERT((~(memword_t)0U >> 1) != ~(memword_t)0U);
     76      1.2  matt #endif
     77      1.2  matt 
     78      1.2  matt void *
     79      1.2  matt memset(void *addr, int c, size_t len)
     80      1.2  matt {
     81      1.2  matt 	memword_t *dstp = addr;
     82      1.2  matt 	memword_t *edstp;
     83      1.2  matt 	memword_t fill;
     84      1.2  matt #ifndef __OPTIMIZE_SIZE__
     85      1.2  matt 	memword_t keep_mask = 0;
     86      1.2  matt #endif
     87      1.2  matt 	size_t fill_count;
     88      1.2  matt 
     89      1.2  matt 	_DIAGASSERT(addr != 0);
     90      1.2  matt 
     91      1.2  matt 	if (__predict_false(len == 0))
     92      1.2  matt 		return addr;
     93      1.2  matt 
     94      1.2  matt 	/*
     95      1.2  matt 	 * Pad out the fill byte (v) across a memword_t.
     96      1.2  matt 	 * The conditional at the end prevents GCC from complaing about
     97      1.2  matt 	 * shift count >= width of type
     98      1.2  matt 	 */
     99      1.2  matt 	fill = c;
    100      1.2  matt 	fill |= fill << 8;
    101      1.2  matt 	fill |= fill << 16;
    102      1.2  matt 	fill |= fill << (sizeof(c) < sizeof(fill) ? 32 : 0);
    103      1.2  matt 
    104      1.2  matt 	/*
    105      1.2  matt 	 * Get the number of unaligned bytes to fill in the first word.
    106      1.2  matt 	 */
    107      1.2  matt 	fill_count = -(uintptr_t)addr & (sizeof(memword_t) - 1);
    108      1.2  matt 
    109      1.2  matt 	if (__predict_false(fill_count != 0)) {
    110      1.2  matt #ifndef __OPTIMIZE_SIZE__
    111      1.2  matt 		/*
    112      1.2  matt 		 * We want to clear <fill_count> trailing bytes in the word.
    113      1.2  matt 		 * On big/little endian, these are the least/most significant,
    114      1.2  matt 		 * bits respectively.  So as we shift, the keep_mask will only
    115      1.2  matt 		 * have bits set for the bytes we won't be filling.
    116      1.2  matt 		 */
    117      1.2  matt #if BYTE_ORDER == BIG_ENDIAN
    118      1.2  matt 		keep_mask = ~(memword_t)0U << (fill_count * 8);
    119      1.2  matt #endif
    120      1.2  matt #if BYTE_ORDER == LITTLE_ENDIAN
    121      1.2  matt 		keep_mask = ~(memword_t)0U >> (fill_count * 8);
    122      1.2  matt #endif
    123      1.2  matt 		/*
    124      1.2  matt 		 * Make sure dstp is aligned to a memword_t boundary.
    125      1.2  matt 		 */
    126      1.2  matt 		dstp = (memword_t *)((uintptr_t)addr & -sizeof(memword_t));
    127      1.2  matt 		if (len >= fill_count) {
    128      1.2  matt 			/*
    129      1.2  matt 			 * If we can fill the rest of this word, then we mask
    130      1.2  matt 			 * off the bytes we are filling and then fill in those
    131      1.2  matt 			 * bytes with the new fill value.
    132      1.2  matt 			 */
    133      1.2  matt 			*dstp = (*dstp & keep_mask) | (fill & ~keep_mask);
    134      1.2  matt 			len -= fill_count;
    135      1.2  matt 			if (__predict_false(len == 0))
    136      1.2  matt 				return addr;
    137      1.2  matt 			/*
    138      1.2  matt 			 * Since we were able to fill the rest of this word,
    139      1.2  matt 			 * we will advance to the next word and thus have no
    140      1.2  matt 			 * bytes to preserve.
    141      1.2  matt 			 *
    142      1.2  matt 			 * If we don't have enough to fill the rest of this
    143      1.2  matt 			 * word, we will fall through the following loop
    144      1.2  matt 			 * (since there are no full words to fill).  Then we
    145      1.2  matt 			 * use the keep_mask above to preserve the leading
    146      1.2  matt 			 * bytes of word.
    147      1.2  matt 			 */
    148      1.2  matt 			dstp++;
    149      1.2  matt 			keep_mask = 0;
    150      1.2  matt 		} else {
    151      1.2  matt 			len += (uintptr_t)addr & (sizeof(memword_t) - 1);
    152      1.2  matt 		}
    153      1.2  matt #else /* __OPTIMIZE_SIZE__ */
    154      1.2  matt 		uint8_t *dp, *ep;
    155      1.2  matt 		if (len < fill_count)
    156      1.2  matt 			fill_count = len;
    157      1.2  matt 		for (dp = (uint8_t *)dstp, ep = dp + fill_count;
    158      1.2  matt 		     dp != ep; dp++)
    159      1.2  matt 			*dp = fill;
    160      1.2  matt 		if ((len -= fill_count) == 0)
    161      1.2  matt 			return addr;
    162      1.2  matt 		dstp = (memword_t *)ep;
    163      1.2  matt #endif /* __OPTIMIZE_SIZE__ */
    164      1.2  matt 	}
    165      1.2  matt 
    166      1.2  matt 	/*
    167      1.2  matt 	 * Simply fill memory one word at time (for as many full words we have
    168      1.2  matt 	 * to write).
    169      1.2  matt 	 */
    170      1.2  matt 	for (edstp = dstp + len / sizeof(memword_t); dstp != edstp; dstp++)
    171      1.2  matt 		*dstp = fill;
    172      1.2  matt 
    173      1.2  matt 	/*
    174      1.2  matt 	 * We didn't subtract out the full words we just filled since we know
    175      1.2  matt 	 * by the time we get here we will have less than a words worth to
    176      1.2  matt 	 * write.  So we can concern ourselves with only the subword len bits.
    177      1.2  matt 	 */
    178      1.2  matt 	len &= sizeof(memword_t)-1;
    179      1.2  matt 	if (len > 0) {
    180      1.2  matt #ifndef __OPTIMIZE_SIZE__
    181      1.2  matt 		/*
    182      1.2  matt 		 * We want to clear <len> leading bytes in the word.
    183      1.2  matt 		 * On big/little endian, these are the most/least significant
    184      1.2  matt 		 * bits, respectively,  But as we want the mask of the bytes to
    185      1.2  matt 		 * keep, we have to complement the mask.  So after we shift,
    186      1.2  matt 		 * the keep_mask will only have bits set for the bytes we won't
    187      1.2  matt 		 * be filling.
    188      1.2  matt 		 *
    189      1.2  matt 		 * But the keep_mask could already have bytes to preserve
    190      1.2  matt 		 * if the amount to fill was less than the amount of traiing
    191      1.2  matt 		 * space in the first word.
    192      1.2  matt 		 */
    193      1.2  matt #if BYTE_ORDER == BIG_ENDIAN
    194      1.2  matt 		keep_mask |= ~(memword_t)0U >> (len * 8);
    195      1.2  matt #endif
    196      1.2  matt #if BYTE_ORDER == LITTLE_ENDIAN
    197      1.2  matt 		keep_mask |= ~(memword_t)0U << (len * 8);
    198      1.2  matt #endif
    199      1.2  matt 		/*
    200      1.2  matt 		 * Now we mask off the bytes we are filling and then fill in
    201      1.2  matt 		 * those bytes with the new fill value.
    202      1.2  matt 		 */
    203      1.2  matt 		*dstp = (*dstp & keep_mask) | (fill & ~keep_mask);
    204      1.2  matt #else /* __OPTIMIZE_SIZE__ */
    205      1.2  matt 		uint8_t *dp, *ep;
    206      1.2  matt 		for (dp = (uint8_t *)dstp, ep = dp + len;
    207      1.2  matt 		     dp != ep; dp++)
    208      1.2  matt 			*dp = fill;
    209      1.2  matt #endif /* __OPTIMIZE_SIZE__ */
    210      1.2  matt 	}
    211      1.2  matt 
    212      1.2  matt 	/*
    213      1.2  matt 	 * Return the initial addr
    214      1.2  matt 	 */
    215      1.2  matt 	return addr;
    216      1.2  matt }
    217      1.2  matt 
    218      1.2  matt #ifdef BZERO
    219      1.2  matt /*
    220      1.2  matt  * For bzero, simply inline memset and let the compiler optimize things away.
    221      1.2  matt  */
    222      1.2  matt void
    223      1.2  matt bzero(void *addr, size_t len)
    224      1.2  matt {
    225      1.2  matt 	memset(addr, 0, len);
    226      1.2  matt }
    227      1.2  matt #endif
    228      1.2  matt 
    229      1.2  matt #ifdef TEST
    230      1.2  matt #include <stdbool.h>
    231      1.2  matt #include <stdio.h>
    232      1.2  matt 
    233      1.2  matt #undef memset
    234      1.2  matt 
    235      1.2  matt static union {
    236      1.2  matt 	uint8_t bytes[sizeof(memword_t) * 4];
    237      1.2  matt 	memword_t words[4];
    238      1.2  matt } testmem;
    239      1.2  matt 
    240      1.2  matt int
    241      1.2  matt main(int argc, char **argv)
    242      1.2  matt {
    243      1.2  matt 	size_t start;
    244      1.2  matt 	size_t len;
    245      1.2  matt 	bool failed = false;
    246      1.2  matt 
    247      1.2  matt 	for (start = 1; start < sizeof(testmem) - 1; start++) {
    248      1.2  matt 		for (len = 1; start + len < sizeof(testmem) - 1; len++) {
    249      1.2  matt 			bool ok = true;
    250      1.2  matt 			size_t i;
    251      1.2  matt 			uint8_t check_value;
    252      1.2  matt 			memset(testmem.bytes, 0xff, sizeof(testmem));
    253      1.2  matt 			test_memset(testmem.bytes + start, 0x00, len);
    254      1.2  matt 			for (i = 0; i < sizeof(testmem); i++) {
    255      1.2  matt 				if (i == 0 || i == start + len)
    256      1.2  matt 					check_value = 0xff;
    257      1.2  matt 				else if (i == start)
    258      1.2  matt 					check_value = 0x00;
    259      1.2  matt 				if (testmem.bytes[i] != check_value) {
    260      1.2  matt 					if (ok)
    261      1.2  matt 						printf("pass @ %zu .. %zu failed",
    262      1.2  matt 						    start, start + len - 1);
    263      1.2  matt 					ok = false;
    264      1.2  matt 					printf(" [%zu]=0x%02x(!0x%02x)",
    265      1.2  matt 					    i, testmem.bytes[i], check_value);
    266      1.2  matt 				}
    267      1.2  matt 			}
    268      1.2  matt 			if (!ok) {
    269      1.2  matt 				printf("\n");
    270      1.2  matt 				failed = 1;
    271      1.2  matt 			}
    272      1.2  matt 		}
    273      1.2  matt 	}
    274      1.2  matt 
    275      1.2  matt 	return failed ? 1 : 0;
    276      1.2  matt }
    277      1.2  matt #endif /* TEST */
    278