1 1.1 matt /* $NetBSD: memset.S,v 1.1 2014/09/03 19:34:25 matt Exp $ */ 2 1.1 matt 3 1.1 matt /*- 4 1.1 matt * Copyright (c) 2014 The NetBSD Foundation, Inc. 5 1.1 matt * All rights reserved. 6 1.1 matt * 7 1.1 matt * This code is derived from software contributed to The NetBSD Foundation 8 1.1 matt * by Matt Thomas of 3am Software Foundry. 9 1.1 matt * 10 1.1 matt * Redistribution and use in source and binary forms, with or without 11 1.1 matt * modification, are permitted provided that the following conditions 12 1.1 matt * are met: 13 1.1 matt * 1. Redistributions of source code must retain the above copyright 14 1.1 matt * notice, this list of conditions and the following disclaimer. 15 1.1 matt * 2. Redistributions in binary form must reproduce the above copyright 16 1.1 matt * notice, this list of conditions and the following disclaimer in the 17 1.1 matt * documentation and/or other materials provided with the distribution. 18 1.1 matt * 19 1.1 matt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS 20 1.1 matt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED 21 1.1 matt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR 22 1.1 matt * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS 23 1.1 matt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR 24 1.1 matt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF 25 1.1 matt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS 26 1.1 matt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN 27 1.1 matt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) 28 1.1 matt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 29 1.1 matt * POSSIBILITY OF SUCH DAMAGE. 30 1.1 matt */ 31 1.1 matt 32 1.1 matt #include <machine/asm.h> 33 1.1 matt 34 1.1 matt 35 1.1 matt #if defined(LIBC_SCCS) && !defined(lint) 36 1.1 matt __RCSID("$NetBSD: memset.S,v 1.1 2014/09/03 19:34:25 matt Exp $") 37 1.1 matt #endif /* LIBC_SCCS && !lint */ 38 1.1 matt 39 1.1 matt /*----------------------------------------------------------------------*/ 40 1.1 matt /* 41 1.1 matt void bzero(void *b r3, size_t len r4); 42 1.1 matt void * memset(void *b r3, int c r4, size_t len r5); 43 1.1 matt */ 44 1.1 matt /*----------------------------------------------------------------------*/ 45 1.1 matt 46 1.1 matt 47 1.1 matt #ifdef _BZERO 48 1.1 matt #define r_fill r0 49 1.1 matt ENTRY(bzero) 50 1.1 matt #else 51 1.1 matt #define r_fill r4 52 1.1 matt ENTRY(memset) 53 1.1 matt #endif 54 1.1 matt #ifdef _BZERO 55 1.1 matt l.ori r5, r4, 0 56 1.1 matt l.or r4, r0, r0 57 1.1 matt #else 58 1.1 matt l.or r11, r3, r0 /* move start to return value */ 59 1.1 matt #endif 60 1.1 matt l.sfeqi r5, 0 /* anything to do? */ 61 1.1 matt l.bf .Lret /* no, just return */ 62 1.1 matt l.nop 63 1.1 matt 64 1.1 matt l.sfgeui r5, 7 /* small buffer? */ 65 1.1 matt l.add r5, r5, r3 /* r5 is end pointer */ 66 1.1 matt l.bnf .Lbyte_fill /* yes. just byte fill */ 67 1.1 matt l.nop 68 1.1 matt 69 1.1 matt #ifndef _BZERO 70 1.1 matt // Let's see the fill type 71 1.1 matt l.sfeqi r4, 0 /* filling with 0? */ 72 1.1 matt l.bf .Lalignment_check /* don't to replicate */ 73 1.1 matt l.nop 74 1.1 matt l.extbz r4, r4 /* truncate to 8 bits */ 75 1.1 matt l.slli r13, r4, 8 /* shift left 8 bits */ 76 1.1 matt l.or r4, r4, r13 /* merge the two bytes */ 77 1.1 matt l.slli r13, r4, 16 /* shift left 16 bits */ 78 1.1 matt l.or r4, r4, r13 /* merge the two halves */ 79 1.1 matt 80 1.1 matt .Lalignment_check: 81 1.1 matt #endif 82 1.1 matt l.andi r13, r3, 3 /* get low bits of start */ 83 1.1 matt l.sfeqi r13, 0 /* word aligned? */ 84 1.1 matt l.bf .Lword_fill /* yes, start setting */ 85 1.1 matt l.nop 86 1.1 matt 87 1.1 matt l.add r5, r5, r13 /* increase length */ 88 1.1 matt l.sub r3, r3, r13 /* mask word aligned */ 89 1.1 matt l.slli r13, r13, 3 /* bytes to bits */ 90 1.1 matt l.addi r15, r13, -8 /* minus one byte */ 91 1.1 matt 92 1.1 matt l.lwz r6, 0(r3) /* get first word */ 93 1.1 matt l.movhi r7, 0xff00 /* 0xff000000 */ 94 1.1 matt l.sra r7, r7, r13 /* shift right align bytes */ 95 1.1 matt l.and r6, r6, r7 /* clear bytes to be filled */ 96 1.1 matt #ifndef _BZERO 97 1.1 matt l.srl r7, r_fill, r13 /* clear bytes to preserve */ 98 1.1 matt l.or r6, r6, r7 /* merge existing with new */ 99 1.1 matt #endif 100 1.1 matt l.sw 0(r3), r6 /* store first word */ 101 1.1 matt l.addi r3, r3, 4 /* advance to next word */ 102 1.1 matt l.addi r5, r5, -4 /* one less word to do */ 103 1.1 matt 104 1.1 matt .Lword_aligned: 105 1.1 matt l.srli r6, r5, 2 /* clear low two bits of len */ 106 1.1 matt l.srli r6, r6, 2 /* ... */ 107 1.1 matt l.sfgeu r3, r6 /* any more full words? */ 108 1.1 matt l.bf .Lend_fill /* no, handle the last bytes */ 109 1.1 matt l.nop 110 1.1 matt 111 1.1 matt .Lword_fill: 112 1.1 matt l.sw 0(r3), r_fill /* store a word */ 113 1.1 matt l.addi r3, r3, 4 /* advance */ 114 1.1 matt l.sfgeu r3, r6 /* any more full words? */ 115 1.1 matt l.bnf .Lword_fill /* yes, fill next word */ 116 1.1 matt l.nop 117 1.1 matt l.j .Lend_fill /* fill any leftover bytes */ 118 1.1 matt l.nop 119 1.1 matt 120 1.1 matt .Lbyte_fill: 121 1.1 matt l.sb 0(r3), r_fill /* store a byte */ 122 1.1 matt l.addi r3, r3, 1 /* advance */ 123 1.1 matt .Lend_fill: 124 1.1 matt l.sfeq r3, r5 /* at the end? */ 125 1.1 matt l.bnf .Lbyte_fill /* no, fill next byte */ 126 1.1 matt l.nop 127 1.1 matt 128 1.1 matt .Lret: 129 1.1 matt l.jr lr /* return */ 130 1.1 matt l.nop 131 1.1 matt #ifdef _BZERO 132 1.1 matt END(bzero) 133 1.1 matt #else 134 1.1 matt END(memset) 135 1.1 matt #endif 136