macros.h revision 1.46 1 1.46 ragge /* $NetBSD: macros.h,v 1.46 2017/05/22 17:12:11 ragge Exp $ */
2 1.2 cgd
3 1.1 ragge /*
4 1.18 ragge * Copyright (c) 1994, 1998, 2000 Ludd, University of Lule}, Sweden.
5 1.1 ragge * All rights reserved.
6 1.1 ragge *
7 1.1 ragge * Redistribution and use in source and binary forms, with or without
8 1.1 ragge * modification, are permitted provided that the following conditions
9 1.1 ragge * are met:
10 1.1 ragge * 1. Redistributions of source code must retain the above copyright
11 1.1 ragge * notice, this list of conditions and the following disclaimer.
12 1.1 ragge * 2. Redistributions in binary form must reproduce the above copyright
13 1.1 ragge * notice, this list of conditions and the following disclaimer in the
14 1.1 ragge * documentation and/or other materials provided with the distribution.
15 1.1 ragge *
16 1.1 ragge * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
17 1.1 ragge * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
18 1.1 ragge * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
19 1.1 ragge * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
20 1.1 ragge * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
21 1.1 ragge * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
22 1.1 ragge * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
23 1.1 ragge * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
24 1.1 ragge * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
25 1.1 ragge * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
26 1.1 ragge */
27 1.1 ragge
28 1.1 ragge /* All bugs are subject to removal without further notice */
29 1.3 ragge
30 1.31 kleink #if !defined(_VAX_MACROS_H_) && !defined(__lint__)
31 1.15 ragge #define _VAX_MACROS_H_
32 1.1 ragge
33 1.29 ragge void __blkset(void *, int, size_t);
34 1.29 ragge void __blkcpy(const void *, void *, size_t);
35 1.29 ragge
36 1.40 matt #if !__GNUC_PREREQ__(4, 1)
37 1.1 ragge /* Here general macros are supposed to be stored */
38 1.1 ragge
39 1.36 perry static __inline int __attribute__((__unused__))
40 1.37 matt vax_ffs(int reg)
41 1.16 ragge {
42 1.3 ragge register int val;
43 1.3 ragge
44 1.35 perry __asm volatile ("ffs $0,$32,%1,%0;"
45 1.25 matt "bneq 1f;"
46 1.25 matt "mnegl $1,%0;"
47 1.25 matt "1:;"
48 1.25 matt "incl %0"
49 1.19 matt : "=&r" (val)
50 1.3 ragge : "r" (reg) );
51 1.3 ragge return val;
52 1.3 ragge }
53 1.37 matt #define ffs vax_ffs
54 1.40 matt #endif
55 1.3 ragge
56 1.36 perry static __inline void __attribute__((__unused__))
57 1.37 matt vax_remque(void *p)
58 1.16 ragge {
59 1.35 perry __asm volatile ("remque (%0),%0;clrl 4(%0)"
60 1.3 ragge :
61 1.3 ragge : "r" (p)
62 1.3 ragge : "memory" );
63 1.3 ragge }
64 1.3 ragge
65 1.36 perry static __inline void __attribute__((__unused__))
66 1.37 matt vax_insque(void *p, void *q)
67 1.16 ragge {
68 1.35 perry __asm volatile ("insque (%0),(%1)"
69 1.15 ragge :
70 1.15 ragge : "r" (p),"r" (q)
71 1.15 ragge : "memory" );
72 1.3 ragge }
73 1.3 ragge
74 1.37 matt #if 0
75 1.38 christos static __inline void *__attribute__((__unused__))
76 1.37 matt vax_memcpy(void *to, const void *from, size_t len)
77 1.16 ragge {
78 1.29 ragge if (len > 65535) {
79 1.29 ragge __blkcpy(from, to, len);
80 1.29 ragge } else {
81 1.37 matt __asm volatile ("movc3 %1,(%2),%0"
82 1.37 matt : "=m" (*(char *)to)
83 1.37 matt : "g" (len), "r" (*(const char *)from)
84 1.16 ragge :"r0","r1","r2","r3","r4","r5","memory","cc");
85 1.29 ragge }
86 1.22 matt return to;
87 1.16 ragge }
88 1.37 matt #define memcpy vax_memcpy
89 1.37 matt
90 1.38 christos static __inline void *__attribute__((__unused__))
91 1.37 matt vax_memmove(void *to, const void *from, size_t len)
92 1.16 ragge {
93 1.29 ragge if (len > 65535) {
94 1.29 ragge __blkcpy(from, to, len);
95 1.29 ragge } else {
96 1.39 matt __asm __volatile ("movc3 %1,%2,%0"
97 1.37 matt : "=m" (*(char *)to)
98 1.37 matt : "g" (len), "mo" (*(const char *)from)
99 1.16 ragge :"r0","r1","r2","r3","r4","r5","memory","cc");
100 1.29 ragge }
101 1.22 matt return to;
102 1.16 ragge }
103 1.37 matt #define memmove vax_memmove
104 1.15 ragge
105 1.38 christos static __inline void *__attribute__((__unused__))
106 1.37 matt vax_memset(void *block, int c, size_t len)
107 1.16 ragge {
108 1.29 ragge if (len > 65535) {
109 1.24 thorpej __blkset(block, c, len);
110 1.29 ragge } else {
111 1.39 matt __asm __volatile ("movc5 $0,(%%sp),%2,%1,%0"
112 1.37 matt : "=m" (*(char *)block)
113 1.37 matt : "g" (len), "g" (c)
114 1.16 ragge :"r0","r1","r2","r3","r4","r5","memory","cc");
115 1.16 ragge }
116 1.16 ragge return block;
117 1.16 ragge }
118 1.37 matt #define memset vax_memset
119 1.45 chs #endif
120 1.3 ragge
121 1.29 ragge #ifdef notdef
122 1.16 ragge /* XXX - the return syntax of memcmp is wrong */
123 1.36 perry static __inline int __attribute__((__unused__))
124 1.16 ragge memcmp(const void *b1, const void *b2, size_t len)
125 1.16 ragge {
126 1.17 ragge register int ret;
127 1.16 ragge
128 1.35 perry __asm volatile("cmpc3 %3,(%1),(%2);"
129 1.25 matt "movl %%r0,%0"
130 1.16 ragge : "=r" (ret)
131 1.16 ragge : "r" (b1), "r" (b2), "r" (len)
132 1.16 ragge : "r0","r1","r2","r3" );
133 1.16 ragge return ret;
134 1.16 ragge }
135 1.16 ragge
136 1.36 perry static __inline int __attribute__((__unused__))
137 1.16 ragge bcmp(const void *b1, const void *b2, size_t len)
138 1.16 ragge {
139 1.17 ragge register int ret;
140 1.3 ragge
141 1.35 perry __asm volatile("cmpc3 %3,(%1),(%2);"
142 1.25 matt "movl %%r0,%0"
143 1.3 ragge : "=r" (ret)
144 1.3 ragge : "r" (b1), "r" (b2), "r" (len)
145 1.3 ragge : "r0","r1","r2","r3" );
146 1.3 ragge return ret;
147 1.3 ragge }
148 1.3 ragge
149 1.16 ragge /* Begin nya */
150 1.36 perry static __inline size_t __attribute__((__unused__))
151 1.16 ragge strlen(const char *cp)
152 1.16 ragge {
153 1.17 ragge register size_t ret;
154 1.16 ragge
155 1.35 perry __asm volatile("locc $0,$65535,(%1);"
156 1.25 matt "subl3 %%r0,$65535,%0"
157 1.16 ragge : "=r" (ret)
158 1.16 ragge : "r" (cp)
159 1.16 ragge : "r0","r1","cc" );
160 1.16 ragge return ret;
161 1.16 ragge }
162 1.16 ragge
163 1.36 perry static __inline char * __attribute__((__unused__))
164 1.16 ragge strcat(char *cp, const char *c2)
165 1.16 ragge {
166 1.35 perry __asm volatile("locc $0,$65535,(%1);"
167 1.25 matt "subl3 %%r0,$65535,%%r2;"
168 1.25 matt "incl %%r2;"
169 1.25 matt "locc $0,$65535,(%0);"
170 1.25 matt "movc3 %%r2,(%1),(%%r1)"
171 1.16 ragge :
172 1.16 ragge : "r" (cp), "r" (c2)
173 1.16 ragge : "r0","r1","r2","r3","r4","r5","memory","cc");
174 1.16 ragge return cp;
175 1.16 ragge }
176 1.16 ragge
177 1.36 perry static __inline char * __attribute__((__unused__))
178 1.16 ragge strncat(char *cp, const char *c2, size_t count)
179 1.16 ragge {
180 1.35 perry __asm volatile("locc $0,%2,(%1);"
181 1.28 thorpej "subl3 %%r0,%2,%%r2;"
182 1.25 matt "locc $0,$65535,(%0);"
183 1.25 matt "movc3 %%r2,(%1),(%%r1);"
184 1.25 matt "movb $0,(%%r3)"
185 1.16 ragge :
186 1.16 ragge : "r" (cp), "r" (c2), "g"(count)
187 1.16 ragge : "r0","r1","r2","r3","r4","r5","memory","cc");
188 1.16 ragge return cp;
189 1.16 ragge }
190 1.16 ragge
191 1.36 perry static __inline char * __attribute__((__unused__))
192 1.16 ragge strcpy(char *cp, const char *c2)
193 1.16 ragge {
194 1.35 perry __asm volatile("locc $0,$65535,(%1);"
195 1.25 matt "subl3 %%r0,$65535,%%r2;"
196 1.25 matt "movc3 %%r2,(%1),(%0);"
197 1.25 matt "movb $0,(%%r3)"
198 1.16 ragge :
199 1.16 ragge : "r" (cp), "r" (c2)
200 1.16 ragge : "r0","r1","r2","r3","r4","r5","memory","cc");
201 1.16 ragge return cp;
202 1.16 ragge }
203 1.16 ragge
204 1.36 perry static __inline char * __attribute__((__unused__))
205 1.16 ragge strncpy(char *cp, const char *c2, size_t len)
206 1.16 ragge {
207 1.35 perry __asm volatile("movl %2,%%r2;"
208 1.25 matt "locc $0,%%r2,(%1);"
209 1.25 matt "beql 1f;"
210 1.25 matt "subl3 %%r0,%2,%%r2;"
211 1.25 matt "clrb (%0)[%%r2];"
212 1.25 matt "1:;"
213 1.25 matt "movc3 %%r2,(%1),(%0)"
214 1.16 ragge :
215 1.16 ragge : "r" (cp), "r" (c2), "g"(len)
216 1.16 ragge : "r0","r1","r2","r3","r4","r5","memory","cc");
217 1.16 ragge return cp;
218 1.16 ragge }
219 1.16 ragge
220 1.38 christos static __inline void *__attribute__((__unused__))
221 1.16 ragge memchr(const void *cp, int c, size_t len)
222 1.16 ragge {
223 1.16 ragge void *ret;
224 1.35 perry __asm volatile("locc %2,%3,(%1);"
225 1.25 matt "bneq 1f;"
226 1.25 matt "clrl %%r1;"
227 1.25 matt "1:;"
228 1.25 matt "movl %%r1,%0"
229 1.16 ragge : "=g"(ret)
230 1.16 ragge : "r" (cp), "r" (c), "g"(len)
231 1.16 ragge : "r0","r1","cc");
232 1.16 ragge return ret;
233 1.16 ragge }
234 1.16 ragge
235 1.36 perry static __inline int __attribute__((__unused__))
236 1.16 ragge strcmp(const char *cp, const char *c2)
237 1.16 ragge {
238 1.17 ragge register int ret;
239 1.35 perry __asm volatile("locc $0,$65535,(%1);"
240 1.25 matt "subl3 %%r0,$65535,%%r0;"
241 1.25 matt "incl %%r0;"
242 1.25 matt "cmpc3 %%r0,(%1),(%2);"
243 1.25 matt "beql 1f;"
244 1.25 matt "movl $1,%%r2;"
245 1.25 matt "cmpb (%%r1),(%%r3);"
246 1.25 matt "bcc 1f;"
247 1.25 matt "mnegl $1,%%r2;"
248 1.25 matt "1:;"
249 1.25 matt "movl %%r2,%0"
250 1.16 ragge : "=g"(ret)
251 1.16 ragge : "r" (cp), "r" (c2)
252 1.16 ragge : "r0","r1","r2","r3","cc");
253 1.16 ragge return ret;
254 1.16 ragge }
255 1.29 ragge #endif
256 1.16 ragge
257 1.9 cgd #if 0 /* unused, but no point in deleting it since it _is_ an instruction */
258 1.36 perry static __inline int __attribute__((__unused__))
259 1.24 thorpej locc(int mask, char *cp, size_t size){
260 1.3 ragge register ret;
261 1.3 ragge
262 1.35 perry __asm volatile("locc %1,%2,(%3);"
263 1.25 matt "movl %%r0,%0"
264 1.3 ragge : "=r" (ret)
265 1.3 ragge : "r" (mask),"r"(size),"r"(cp)
266 1.3 ragge : "r0","r1" );
267 1.3 ragge return ret;
268 1.3 ragge }
269 1.9 cgd #endif
270 1.3 ragge
271 1.36 perry static __inline int __attribute__((__unused__))
272 1.37 matt vax_scanc(u_int size, const u_char *cp, const u_char *table, int mask)
273 1.16 ragge {
274 1.17 ragge register int ret;
275 1.3 ragge
276 1.35 perry __asm volatile("scanc %1,(%2),(%3),%4;"
277 1.25 matt "movl %%r0,%0"
278 1.3 ragge : "=g"(ret)
279 1.3 ragge : "r"(size),"r"(cp),"r"(table),"r"(mask)
280 1.3 ragge : "r0","r1","r2","r3" );
281 1.3 ragge return ret;
282 1.3 ragge }
283 1.37 matt #define scanc vax_scanc
284 1.3 ragge
285 1.36 perry static __inline int __attribute__((__unused__))
286 1.37 matt vax_skpc(int mask, size_t size, u_char *cp)
287 1.16 ragge {
288 1.17 ragge register int ret;
289 1.3 ragge
290 1.35 perry __asm volatile("skpc %1,%2,(%3);"
291 1.25 matt "movl %%r0,%0"
292 1.3 ragge : "=g"(ret)
293 1.3 ragge : "r"(mask),"r"(size),"r"(cp)
294 1.3 ragge : "r0","r1" );
295 1.3 ragge return ret;
296 1.23 ragge }
297 1.37 matt #define skpc vax_skpc
298 1.23 ragge
299 1.23 ragge /*
300 1.23 ragge * Set/clear a bit at a memory position; interlocked.
301 1.23 ragge * Return 0 if already set, 1 otherwise.
302 1.23 ragge */
303 1.36 perry static __inline int __attribute__((__unused__))
304 1.23 ragge bbssi(int bitnr, long *addr)
305 1.23 ragge {
306 1.23 ragge register int ret;
307 1.23 ragge
308 1.35 perry __asm volatile("clrl %%r0;"
309 1.25 matt "bbssi %1,%2,1f;"
310 1.25 matt "incl %%r0;"
311 1.25 matt "1:;"
312 1.25 matt "movl %%r0,%0"
313 1.23 ragge : "=&r"(ret)
314 1.23 ragge : "g"(bitnr),"m"(*addr)
315 1.23 ragge : "r0","cc","memory");
316 1.23 ragge return ret;
317 1.23 ragge }
318 1.23 ragge
319 1.36 perry static __inline int __attribute__((__unused__))
320 1.23 ragge bbcci(int bitnr, long *addr)
321 1.23 ragge {
322 1.23 ragge register int ret;
323 1.23 ragge
324 1.35 perry __asm volatile("clrl %%r0;"
325 1.25 matt "bbcci %1,%2,1f;"
326 1.25 matt "incl %%r0;"
327 1.25 matt "1:;"
328 1.25 matt "movl %%r0,%0"
329 1.23 ragge : "=&r"(ret)
330 1.23 ragge : "g"(bitnr),"m"(*addr)
331 1.23 ragge : "r0","cc","memory");
332 1.23 ragge return ret;
333 1.3 ragge }
334 1.3 ragge
335 1.41 yamt static inline struct lwp *
336 1.43 yamt cpu_switchto(struct lwp *oldlwp, struct lwp *newlwp, bool returning)
337 1.41 yamt {
338 1.41 yamt struct lwp *prevlwp;
339 1.41 yamt __asm volatile(
340 1.41 yamt "movl %1,%%r0;"
341 1.41 yamt "movl %2,%%r1;"
342 1.41 yamt "movpsl -(%%sp);"
343 1.41 yamt "jsb Swtchto;"
344 1.41 yamt "movl %%r0,%0"
345 1.41 yamt : "=g"(prevlwp)
346 1.41 yamt : "g" (oldlwp), "g" (newlwp)
347 1.41 yamt : "r0", "r1");
348 1.41 yamt return prevlwp;
349 1.41 yamt }
350 1.18 ragge
351 1.18 ragge /*
352 1.18 ragge * Interlock instructions. Used both in multiprocessor environments to
353 1.18 ragge * lock between CPUs and in uniprocessor systems when locking is required
354 1.18 ragge * between I/O devices and the master CPU.
355 1.18 ragge */
356 1.18 ragge /*
357 1.18 ragge * Insqti() locks and inserts an element into the end of a queue.
358 1.18 ragge * Returns -1 if interlock failed, 1 if inserted OK and 0 if first in queue.
359 1.18 ragge */
360 1.36 perry static __inline int __attribute__((__unused__))
361 1.18 ragge insqti(void *entry, void *header) {
362 1.18 ragge register int ret;
363 1.18 ragge
364 1.35 perry __asm volatile(
365 1.26 matt " mnegl $1,%0;"
366 1.26 matt " insqti (%1),(%2);"
367 1.26 matt " bcs 1f;" /* failed insert */
368 1.26 matt " beql 2f;" /* jump if first entry */
369 1.26 matt " movl $1,%0;"
370 1.26 matt " brb 1f;"
371 1.26 matt "2: clrl %0;"
372 1.26 matt " 1:;"
373 1.19 matt : "=&g"(ret)
374 1.18 ragge : "r"(entry), "r"(header)
375 1.18 ragge : "memory");
376 1.18 ragge
377 1.18 ragge return ret;
378 1.18 ragge }
379 1.18 ragge
380 1.18 ragge /*
381 1.18 ragge * Remqhi() removes an element from the head of the queue.
382 1.18 ragge * Returns -1 if interlock failed, 0 if queue empty, address of the
383 1.18 ragge * removed element otherwise.
384 1.18 ragge */
385 1.38 christos static __inline void *__attribute__((__unused__))
386 1.18 ragge remqhi(void *header) {
387 1.18 ragge register void *ret;
388 1.18 ragge
389 1.35 perry __asm volatile(
390 1.26 matt " remqhi (%1),%0;"
391 1.26 matt " bcs 1f;" /* failed interlock */
392 1.26 matt " bvs 2f;" /* nothing was removed */
393 1.26 matt " brb 3f;"
394 1.26 matt "1: mnegl $1,%0;"
395 1.26 matt " brb 3f;"
396 1.26 matt "2: clrl %0;"
397 1.26 matt " 3:;"
398 1.19 matt : "=&g"(ret)
399 1.18 ragge : "r"(header)
400 1.18 ragge : "memory");
401 1.18 ragge
402 1.18 ragge return ret;
403 1.18 ragge }
404 1.18 ragge #define ILCK_FAILED -1 /* Interlock failed */
405 1.18 ragge #define Q_EMPTY 0 /* Queue is/was empty */
406 1.18 ragge #define Q_OK 1 /* Inserted OK */
407 1.18 ragge
408 1.31 kleink #endif /* !_VAX_MACROS_H_ && !__lint__ */
409