strchr_arm.S revision 1.8 1 1.1 matt /*-
2 1.1 matt * Copyright (c) 2013 The NetBSD Foundation, Inc.
3 1.1 matt * All rights reserved.
4 1.1 matt *
5 1.1 matt * This code is derived from software contributed to The NetBSD Foundation
6 1.1 matt * by Matt Thomas of 3am Software Foundry.
7 1.1 matt *
8 1.1 matt * Redistribution and use in source and binary forms, with or without
9 1.1 matt * modification, are permitted provided that the following conditions
10 1.1 matt * are met:
11 1.1 matt * 1. Redistributions of source code must retain the above copyright
12 1.1 matt * notice, this list of conditions and the following disclaimer.
13 1.1 matt * 2. Redistributions in binary form must reproduce the above copyright
14 1.1 matt * notice, this list of conditions and the following disclaimer in the
15 1.1 matt * documentation and/or other materials provided with the distribution.
16 1.1 matt *
17 1.1 matt * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
18 1.1 matt * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
19 1.1 matt * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
20 1.1 matt * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
21 1.1 matt * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22 1.1 matt * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23 1.1 matt * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
24 1.1 matt * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
25 1.1 matt * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
26 1.1 matt * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
27 1.1 matt * POSSIBILITY OF SUCH DAMAGE.
28 1.1 matt */
29 1.1 matt
30 1.1 matt #include <machine/asm.h>
31 1.1 matt
32 1.8 matt RCSID("$NetBSD: strchr_arm.S,v 1.8 2013/08/19 17:50:04 matt Exp $")
33 1.8 matt
34 1.8 matt #if defined(__thumb__) && !defined(_ARM_ARCH_T2)
35 1.8 matt #error Only Thumb2 or ARM supported
36 1.8 matt #endif
37 1.1 matt
38 1.1 matt #ifdef __ARMEL__
39 1.1 matt #define BYTE0 0x000000ff
40 1.1 matt #define BYTE1 0x0000ff00
41 1.1 matt #define BYTE2 0x00ff0000
42 1.1 matt #define BYTE3 0xff000000
43 1.1 matt #define lshi lsl
44 1.6 matt #define lshis lsls
45 1.1 matt #else
46 1.1 matt #define BYTE0 0xff000000
47 1.1 matt #define BYTE1 0x00ff0000
48 1.1 matt #define BYTE2 0x0000ff00
49 1.1 matt #define BYTE3 0x000000ff
50 1.1 matt #define lshi lsr
51 1.6 matt #define lshis lsrs
52 1.1 matt #endif
53 1.1 matt
54 1.1 matt .text
55 1.1 matt ENTRY(strchr)
56 1.1 matt and r2, r1, #0xff /* restrict to byte value */
57 1.1 matt 1: tst r0, #3 /* test for word alignment */
58 1.1 matt beq .Lpre_main_loop /* finally word aligned */
59 1.1 matt ldrb r3, [r0], #1 /* load a byte */
60 1.1 matt cmp r3, r2 /* is it a match? */
61 1.1 matt beq 2f /* yes, return current ptr - 1 */
62 1.6 matt cmp r3, #0 /* no, was it 0? */
63 1.1 matt bne 1b /* no, try next byte */
64 1.6 matt movs r0, #0 /* yes, set return value to NULL */
65 1.1 matt RET /* return */
66 1.6 matt 2: subs r0, r0, #1 /* back up by one */
67 1.1 matt RET /* return */
68 1.1 matt .Lpre_main_loop:
69 1.1 matt #if defined(_ARM_ARCH_7)
70 1.6 matt movw ip, #0xfefe /* magic constant; 254 in each byte */
71 1.6 matt movt ip, #0xfefe /* magic constant; 254 in each byte */
72 1.1 matt #elif defined(_ARM_ARCH_6)
73 1.6 matt mov ip, #0xfe /* put 254 in low byte */
74 1.6 matt orr ip, ip, ip, lsl #8 /* move to next byte */
75 1.6 matt orr ip, ip, ip, lsl #16 /* move to next halfword */
76 1.1 matt #endif /* _ARM_ARCH_6 */
77 1.1 matt orr r2, r2, r2, lsl #8 /* move to next byte */
78 1.1 matt orr r2, r2, r2, lsl #16 /* move to next halfword */
79 1.1 matt .Lmain_loop:
80 1.1 matt ldr r3, [r0], #4 /* load next word */
81 1.1 matt #if defined(_ARM_ARCH_6)
82 1.1 matt /*
83 1.1 matt * Add 254 to each byte using the UQADD8 (unsigned saturating add 8)
84 1.1 matt * instruction. For every non-NUL byte, the result for that byte will
85 1.1 matt * become 255. For NUL, it will be 254. When we complement the
86 1.1 matt * result, if the result is non-0 then we must have encountered a NUL.
87 1.1 matt */
88 1.6 matt uqadd8 r1, r3, ip /* NUL detection happens here */
89 1.6 matt eors r3, r3, r2 /* xor to clear each lane */
90 1.6 matt uqadd8 r3, r3, ip /* char detection happens here */
91 1.6 matt ands r3, r3, r1 /* merge results */
92 1.1 matt mvns r3, r3 /* is the complement non-0? */
93 1.1 matt beq .Lmain_loop /* no, then keep going */
94 1.1 matt
95 1.1 matt /*
96 1.1 matt * We've encountered a NUL or a match but we don't know which happened
97 1.1 matt * first.
98 1.1 matt */
99 1.8 matt #if defined(__thumb__) && defined(_ARM_ARCH_T2)
100 1.8 matt cbz r2, .Lfind_match /* searching for NUL? yes, find it */
101 1.8 matt #else
102 1.7 matt cmp r2, #0 /* searching for NUL? */
103 1.5 matt beq .Lfind_match /* yes, find the match */
104 1.8 matt #endif
105 1.6 matt mvns r1, r1 /* did we encounter a NUL? */
106 1.1 matt beq .Lfind_match /* no, find the match */
107 1.6 matt bics r3, r3, r1 /* clear match for the NUL(s) */
108 1.3 matt beq .Lnomatch /* any left set? if not, no match */
109 1.6 matt lshis r1, r1, #8 /* replicate NUL bit to other bytes */
110 1.8 matt #ifdef __thumb__
111 1.8 matt itt ne
112 1.8 matt #endif
113 1.6 matt orrne r1, r1, r1, lshi #8 /* replicate NUL bit to other bytes */
114 1.6 matt orrne r1, r1, r1, lshi #8 /* replicate NUL bit to other bytes */
115 1.6 matt bics r3, r3, r1 /* clear any match bits after the NUL */
116 1.1 matt beq .Lnomatch /* any left set? if not, no match */
117 1.1 matt .Lfind_match:
118 1.1 matt #ifdef __ARMEL__
119 1.1 matt rev r3, r3 /* we want this in BE for the CLZ */
120 1.1 matt #endif
121 1.1 matt clz r3, r3 /* count how many leading zeros */
122 1.1 matt add r0, r0, r3, lsr #3 /* divide that by 8 and add to count */
123 1.6 matt subs r0, r0, #4 /* compensate for the post-inc */
124 1.1 matt RET
125 1.1 matt .Lnomatch:
126 1.6 matt movs r0, #0
127 1.1 matt RET
128 1.1 matt #else
129 1.1 matt /*
130 1.1 matt * No fancy shortcuts so just test each byte lane for a NUL.
131 1.1 matt * (other tests for NULs in a word take more instructions/cycles).
132 1.1 matt */
133 1.6 matt eor r1, r3, r2 /* xor .. */
134 1.1 matt tst r3, #BYTE0 /* is this byte NUL? */
135 1.6 matt tstne r1, #BYTE0 /* no, does this byte match? */
136 1.1 matt tstne r3, #BYTE1 /* no, is this byte NUL? */
137 1.6 matt tstne r1, #BYTE1 /* no, does this byte match? */
138 1.1 matt tstne r3, #BYTE2 /* no, is this byte NUL? */
139 1.6 matt tstne r1, #BYTE2 /* no, does this byte match? */
140 1.1 matt tstne r3, #BYTE3 /* no, is this byte NUL? */
141 1.6 matt tstne r1, #BYTE3 /* no, does this byte match? */
142 1.1 matt bne .Lmain_loop
143 1.1 matt
144 1.1 matt sub r2, r0, #4 /* un post-inc */
145 1.1 matt mov r0, #0 /* assume no match */
146 1.2 matt
147 1.6 matt tst r1, #BYTE0 /* does this byte match? */
148 1.1 matt moveq r0, r2 /* yes, point to it */
149 1.1 matt RETc(eq) /* and return */
150 1.2 matt tst r3, #BYTE0 /* is this byte NUL? */
151 1.1 matt RETc(eq) /* yes, return NULL */
152 1.2 matt
153 1.6 matt tst r1, #BYTE1 /* does this byte match? */
154 1.1 matt addeq r0, r2, #1 /* yes, point to it */
155 1.1 matt RETc(eq) /* and return */
156 1.2 matt tst r3, #BYTE1 /* is this byte NUL? */
157 1.1 matt RETc(eq) /* yes, return NULL */
158 1.2 matt
159 1.6 matt tst r1, #BYTE2 /* does this byte match? */
160 1.1 matt addeq r0, r2, #2 /* yes, point to it */
161 1.1 matt RETc(eq) /* and return */
162 1.2 matt tst r3, #BYTE2 /* is this byte NUL? */
163 1.1 matt RETc(eq) /* yes, return NULL */
164 1.2 matt
165 1.6 matt tst r1, #BYTE3 /* does this byte match? */
166 1.2 matt addeq r0, r2, #3 /* yes, point to it */
167 1.1 matt /*
168 1.1 matt * Since no NULs and no matches this must be the only case left.
169 1.1 matt */
170 1.1 matt RET /* return */
171 1.1 matt #endif /* _ARM_ARCH_6 */
172 1.1 matt END(strchr)
173