atomic_op_asm.h revision 1.3.2.1 1 /* $NetBSD: atomic_op_asm.h,v 1.3.2.1 2021/08/11 17:19:01 martin Exp $ */
2
3 /*-
4 * Copyright (c) 2014 The NetBSD Foundation, Inc.
5 * All rights reserved.
6 *
7 * This code is derived from software contributed to The NetBSD Foundation
8 * by Matt Thomas of 3am Software Foundry.
9 *
10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions
12 * are met:
13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer.
15 * 2. Redistributions in binary form must reproduce the above copyright
16 * notice, this list of conditions and the following disclaimer in the
17 * documentation and/or other materials provided with the distribution.
18 *
19 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29 * POSSIBILITY OF SUCH DAMAGE.
30 */
31
32 #ifndef _ATOMIC_OP_ASM_H_
33 #define _ATOMIC_OP_ASM_H_
34
35 #include <machine/asm.h>
36
37 #define ATOMIC_OP8(OP, INSN) \
38 ENTRY_NP(_atomic_##OP##_8) ;\
39 mov x4, x0 ;\
40 1: ldxrb w0, [x4] /* load old value */ ;\
41 INSN w2, w0, w1 /* calculate new value */ ;\
42 stxrb w3, w2, [x4] /* try to store */ ;\
43 cbnz w3, 1b /* succeed? no, try again */ ;\
44 ret /* return old value */ ;\
45 END(_atomic_##OP##_8)
46
47 #define SYNC_FETCH_OP8(OP, INSN) \
48 ENTRY_NP(__sync_fetch_and_##OP##_1) ;\
49 mov x4, x0 ;\
50 dmb ish ;\
51 1: ldxrb w0, [x4] /* load old value */ ;\
52 INSN w2, w0, w1 /* calculate new value */ ;\
53 stxrb w3, w2, [x4] /* try to store */ ;\
54 cbnz w3, 1b /* succeed? no, try again */ ;\
55 dmb ish ;\
56 ret /* return old value */ ;\
57 END(__sync_fetch_and_##OP##_1)
58
59 #define ATOMIC_OP8_NV(OP, INSN) \
60 ENTRY_NP(_atomic_##OP##_8_nv) ;\
61 mov x4, x0 /* need r0 for return value */ ;\
62 1: ldxrb w0, [x4] /* load old value */ ;\
63 INSN w0, w0, w1 /* calc new (return) value */ ;\
64 stxrb w3, w0, [x4] /* try to store */ ;\
65 cbnz w3, 1b /* succeed? no, try again */ ;\
66 ret /* return new value */ ;\
67 END(_atomic_##OP##_8_nv)
68
69 #define SYNC_OP8_FETCH(OP, INSN) \
70 ENTRY_NP(__sync_##OP##_and_fetch_1) ;\
71 mov x4, x0 /* need x0 for return value */ ;\
72 dmb ish ;\
73 1: ldxrb w0, [x4] /* load old value */ ;\
74 INSN w0, w0, w1 /* calc new (return) value */ ;\
75 stxrb w3, w0, [x4] /* try to store */ ;\
76 cbnz w3, 1b /* succeed? no, try again */ ;\
77 dmb ish ;\
78 ret /* return new value */ ;\
79 END(__sync_##OP##_and_fetch_1)
80
81 #define ATOMIC_OP16(OP, INSN) \
82 ENTRY_NP(_atomic_##OP##_16) ;\
83 mov x4, x0 ;\
84 1: ldxrh w0, [x4] /* load old value */ ;\
85 INSN w2, w0, w1 /* calculate new value */ ;\
86 stxrh w3, w2, [x4] /* try to store */ ;\
87 cbnz w3, 1b /* succeed? no, try again */ ;\
88 ret /* return old value */ ;\
89 END(_atomic_##OP##_16)
90
91 #define SYNC_FETCH_OP16(OP, INSN) \
92 ENTRY_NP(__sync_fetch_and_##OP##_2) ;\
93 mov x4, x0 ;\
94 dmb ish ;\
95 1: ldxrh w0, [x4] /* load old value */ ;\
96 INSN w2, w0, w1 /* calculate new value */ ;\
97 stxrh w3, w2, [x4] /* try to store */ ;\
98 cbnz w3, 1b /* succeed? no, try again */ ;\
99 dmb ish ;\
100 ret /* return old value */ ;\
101 END(__sync_fetch_and_##OP##_2)
102
103 #define ATOMIC_OP16_NV(OP, INSN) \
104 ENTRY_NP(_atomic_##OP##_16_nv) ;\
105 mov x4, x0 /* need r0 for return value */ ;\
106 1: ldxrh w0, [x4] /* load old value */ ;\
107 INSN w0, w0, w1 /* calc new (return) value */ ;\
108 stxrh w3, w0, [x4] /* try to store */ ;\
109 cbnz w3, 1b /* succeed? no, try again */ ;\
110 ret /* return new value */ ;\
111 END(_atomic_##OP##_16_nv)
112
113 #define SYNC_OP16_FETCH(OP, INSN) \
114 ENTRY_NP(__sync__##OP##_and_fetch_2) ;\
115 mov x4, x0 /* need x0 for return value */ ;\
116 dmb ish ;\
117 1: ldxrh w0, [x4] /* load old value */ ;\
118 INSN w0, w0, w1 /* calc new (return) value */ ;\
119 stxrh w3, w0, [x4] /* try to store */ ;\
120 cbnz w3, 1b /* succeed? no, try again */ ;\
121 dmb ish ;\
122 ret /* return new value */ ;\
123 END(__sync__##OP##_and_fetch_2)
124
125 #define ATOMIC_OP32(OP, INSN) \
126 ENTRY_NP(_atomic_##OP##_32) ;\
127 mov x4, x0 ;\
128 1: ldxr w0, [x4] /* load old value */ ;\
129 INSN w2, w0, w1 /* calculate new value */ ;\
130 stxr w3, w2, [x4] /* try to store */ ;\
131 cbnz w3, 1b /* succeed? no, try again */ ;\
132 ret /* return old value */ ;\
133 END(_atomic_##OP##_32)
134
135 #define SYNC_FETCH_OP32(OP, INSN) \
136 ENTRY_NP(__sync_fetch_and_##OP##_4) ;\
137 mov x4, x0 ;\
138 dmb ish ;\
139 1: ldxr w0, [x4] /* load old value */ ;\
140 INSN w2, w0, w1 /* calculate new value */ ;\
141 stxr w3, w2, [x4] /* try to store */ ;\
142 cbnz w3, 1b /* succeed? no, try again */ ;\
143 dmb ish ;\
144 ret /* return old value */ ;\
145 END(__sync_fetch_and_##OP##_4)
146
147 #define ATOMIC_OP32_NV(OP, INSN) \
148 ENTRY_NP(_atomic_##OP##_32_nv) ;\
149 mov x4, x0 /* need r0 for return value */ ;\
150 1: ldxr w0, [x4] /* load old value */ ;\
151 INSN w0, w0, w1 /* calc new (return) value */ ;\
152 stxr w3, w0, [x4] /* try to store */ ;\
153 cbnz w3, 1b /* succeed? no, try again? */ ;\
154 ret /* return new value */ ;\
155 END(_atomic_##OP##_32_nv)
156
157 #define SYNC_OP32_FETCH(OP, INSN) \
158 ENTRY_NP(__sync__##OP##_and_fetch_4) ;\
159 mov x4, x0 /* need x0 for return value */ ;\
160 dmb ish ;\
161 1: ldxr w0, [x4] /* load old value */ ;\
162 INSN w0, w0, w1 /* calc new (return) value */ ;\
163 stxr w3, w0, [x4] /* try to store */ ;\
164 cbnz w3, 1b /* succeed? no, try again? */ ;\
165 dmb ish ;\
166 ret /* return new value */ ;\
167 END(__sync__##OP##_and_fetch_4)
168
169 #define ATOMIC_OP64(OP, INSN) \
170 ENTRY_NP(_atomic_##OP##_64) ;\
171 mov x4, x0 ;\
172 1: ldxr x0, [x4] /* load old value */ ;\
173 INSN x2, x0, x1 /* calculate new value */ ;\
174 stxr w3, x2, [x4] /* try to store */ ;\
175 cbnz w3, 1b /* succeed? no, try again */ ;\
176 ret /* return old value */ ;\
177 END(_atomic_##OP##_64)
178
179 #define SYNC_FETCH_OP64(OP, INSN) \
180 ENTRY_NP(__sync_fetch_and_##OP##_8) ;\
181 mov x4, x0 ;\
182 dmb ish ;\
183 1: ldxr x0, [x4] /* load old value */ ;\
184 INSN x2, x0, x1 /* calculate new value */ ;\
185 stxr w3, x2, [x4] /* try to store */ ;\
186 cbnz w3, 1b /* succeed? no, try again */ ;\
187 dmb ish ;\
188 ret /* return old value */ ;\
189 END(__sync_fetch_and_##OP##_8)
190
191 #define ATOMIC_OP64_NV(OP, INSN) \
192 ENTRY_NP(_atomic_##OP##_64_nv) ;\
193 mov x4, x0 /* need r0 for return value */ ;\
194 1: ldxr x0, [x4] /* load old value */ ;\
195 INSN x0, x0, x1 /* calc new (return) value */ ;\
196 stxr w3, x0, [x4] /* try to store */ ;\
197 cbnz w3, 1b /* succeed? no, try again? */ ;\
198 ret /* return new value */ ;\
199 END(_atomic_##OP##_64_nv)
200
201 #define SYNC_OP64_FETCH(OP, INSN) \
202 ENTRY_NP(__sync_##OP##_and_fetch_8) ;\
203 mov x4, x0 /* need x0 for return value */ ;\
204 dmb ish ;\
205 1: ldxr x0, [x4] /* load old value */ ;\
206 INSN x0, x0, x1 /* calc new (return) value */ ;\
207 stxr w3, x0, [x4] /* try to store */ ;\
208 cbnz w3, 1b /* succeed? no, try again? */ ;\
209 dmb ish ;\
210 ret /* return new value */ ;\
211 END(__sync_##OP##_and_fetch_8)
212
213 #if defined(_KERNEL)
214
215 #define ATOMIC_OP_ALIAS(a,s) STRONG_ALIAS(a,s)
216
217 #else /* _KERNEL */
218
219 #define ATOMIC_OP_ALIAS(a,s) WEAK_ALIAS(a,s)
220
221 #endif /* _KERNEL */
222
223 #endif /* _ATOMIC_OP_ASM_H_ */
224