macros.h revision 1.29 1 1.29 ragge /* $NetBSD: macros.h,v 1.29 2003/08/13 11:30:50 ragge Exp $ */
2 1.2 cgd
3 1.1 ragge /*
4 1.18 ragge * Copyright (c) 1994, 1998, 2000 Ludd, University of Lule}, Sweden.
5 1.1 ragge * All rights reserved.
6 1.1 ragge *
7 1.1 ragge * Redistribution and use in source and binary forms, with or without
8 1.1 ragge * modification, are permitted provided that the following conditions
9 1.1 ragge * are met:
10 1.1 ragge * 1. Redistributions of source code must retain the above copyright
11 1.1 ragge * notice, this list of conditions and the following disclaimer.
12 1.1 ragge * 2. Redistributions in binary form must reproduce the above copyright
13 1.1 ragge * notice, this list of conditions and the following disclaimer in the
14 1.1 ragge * documentation and/or other materials provided with the distribution.
15 1.1 ragge * 3. All advertising materials mentioning features or use of this software
16 1.1 ragge * must display the following acknowledgement:
17 1.1 ragge * This product includes software developed at Ludd, University of Lule}.
18 1.1 ragge * 4. The name of the author may not be used to endorse or promote products
19 1.1 ragge * derived from this software without specific prior written permission
20 1.1 ragge *
21 1.1 ragge * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
22 1.1 ragge * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
23 1.1 ragge * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
24 1.1 ragge * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
25 1.1 ragge * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
26 1.1 ragge * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 1.1 ragge * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 1.1 ragge * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 1.1 ragge * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
30 1.1 ragge * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 1.1 ragge */
32 1.1 ragge
33 1.1 ragge /* All bugs are subject to removal without further notice */
34 1.3 ragge
35 1.17 ragge #if !defined(_VAX_MACROS_H_) && !defined(lint)
36 1.15 ragge #define _VAX_MACROS_H_
37 1.1 ragge
38 1.29 ragge void __blkset(void *, int, size_t);
39 1.29 ragge void __blkcpy(const void *, void *, size_t);
40 1.29 ragge
41 1.1 ragge /* Here general macros are supposed to be stored */
42 1.1 ragge
43 1.24 thorpej static __inline__ int __attribute__((__unused__))
44 1.16 ragge ffs(int reg)
45 1.16 ragge {
46 1.3 ragge register int val;
47 1.3 ragge
48 1.25 matt __asm__ __volatile ("ffs $0,$32,%1,%0;"
49 1.25 matt "bneq 1f;"
50 1.25 matt "mnegl $1,%0;"
51 1.25 matt "1:;"
52 1.25 matt "incl %0"
53 1.19 matt : "=&r" (val)
54 1.3 ragge : "r" (reg) );
55 1.3 ragge return val;
56 1.3 ragge }
57 1.3 ragge
58 1.24 thorpej static __inline__ void __attribute__((__unused__))
59 1.16 ragge _remque(void *p)
60 1.16 ragge {
61 1.13 thorpej __asm__ __volatile ("remque (%0),%0;clrl 4(%0)"
62 1.3 ragge :
63 1.3 ragge : "r" (p)
64 1.3 ragge : "memory" );
65 1.3 ragge }
66 1.3 ragge
67 1.24 thorpej static __inline__ void __attribute__((__unused__))
68 1.16 ragge _insque(void *p, void *q)
69 1.16 ragge {
70 1.22 matt __asm__ __volatile ("insque (%0),(%1)"
71 1.15 ragge :
72 1.15 ragge : "r" (p),"r" (q)
73 1.15 ragge : "memory" );
74 1.3 ragge }
75 1.3 ragge
76 1.24 thorpej static __inline__ void * __attribute__((__unused__))
77 1.22 matt memcpy(void *to, const void *from, size_t len)
78 1.16 ragge {
79 1.29 ragge if (len > 65535) {
80 1.29 ragge __blkcpy(from, to, len);
81 1.29 ragge } else {
82 1.29 ragge __asm__ __volatile ("movc3 %0,%1,%2"
83 1.16 ragge :
84 1.22 matt : "g" (len), "m" (*(char *)from), "m" (*(char *)to)
85 1.16 ragge :"r0","r1","r2","r3","r4","r5","memory","cc");
86 1.29 ragge }
87 1.22 matt return to;
88 1.16 ragge }
89 1.24 thorpej static __inline__ void * __attribute__((__unused__))
90 1.22 matt memmove(void *to, const void *from, size_t len)
91 1.16 ragge {
92 1.29 ragge if (len > 65535) {
93 1.29 ragge __blkcpy(from, to, len);
94 1.29 ragge } else {
95 1.29 ragge __asm__ __volatile ("movc3 %0,%1,%2"
96 1.3 ragge :
97 1.22 matt : "g" (len), "m" (*(char *)from), "m" (*(char *)to)
98 1.16 ragge :"r0","r1","r2","r3","r4","r5","memory","cc");
99 1.29 ragge }
100 1.22 matt return to;
101 1.16 ragge }
102 1.16 ragge
103 1.29 ragge #ifdef notdef /* bcopy() is obsoleted in kernel */
104 1.24 thorpej static __inline__ void __attribute__((__unused__))
105 1.22 matt bcopy(const void *from, void *to, size_t len)
106 1.16 ragge {
107 1.22 matt __asm__ __volatile ("movc3 %0,%1,%2"
108 1.16 ragge :
109 1.22 matt : "g" (len), "m" (*(char *)from), "m" (*(char *)to)
110 1.16 ragge :"r0","r1","r2","r3","r4","r5","memory","cc");
111 1.3 ragge }
112 1.29 ragge #endif
113 1.15 ragge
114 1.24 thorpej static __inline__ void * __attribute__((__unused__))
115 1.16 ragge memset(void *block, int c, size_t len)
116 1.16 ragge {
117 1.29 ragge if (len > 65535) {
118 1.24 thorpej __blkset(block, c, len);
119 1.29 ragge } else {
120 1.25 matt __asm__ __volatile ("movc5 $0,(%%sp),%2,%1,%0"
121 1.16 ragge :
122 1.22 matt : "m" (*(char *)block), "g" (len), "g" (c)
123 1.16 ragge :"r0","r1","r2","r3","r4","r5","memory","cc");
124 1.16 ragge }
125 1.16 ragge return block;
126 1.16 ragge }
127 1.16 ragge
128 1.29 ragge #ifdef notdef /* bzero() is obsoleted in kernel */
129 1.24 thorpej static __inline__ void __attribute__((__unused__))
130 1.20 matt bzero(void *block, size_t len)
131 1.16 ragge {
132 1.15 ragge if (len > 65535)
133 1.24 thorpej __blkset(block, 0, len);
134 1.15 ragge else {
135 1.25 matt __asm__ __volatile ("movc5 $0,(%%sp),$0,%1,%0"
136 1.3 ragge :
137 1.22 matt : "m" (*(char *)block), "g" (len)
138 1.16 ragge :"r0","r1","r2","r3","r4","r5","memory","cc");
139 1.15 ragge }
140 1.3 ragge }
141 1.29 ragge #endif
142 1.3 ragge
143 1.29 ragge #ifdef notdef
144 1.16 ragge /* XXX - the return syntax of memcmp is wrong */
145 1.24 thorpej static __inline__ int __attribute__((__unused__))
146 1.16 ragge memcmp(const void *b1, const void *b2, size_t len)
147 1.16 ragge {
148 1.17 ragge register int ret;
149 1.16 ragge
150 1.25 matt __asm__ __volatile("cmpc3 %3,(%1),(%2);"
151 1.25 matt "movl %%r0,%0"
152 1.16 ragge : "=r" (ret)
153 1.16 ragge : "r" (b1), "r" (b2), "r" (len)
154 1.16 ragge : "r0","r1","r2","r3" );
155 1.16 ragge return ret;
156 1.16 ragge }
157 1.16 ragge
158 1.24 thorpej static __inline__ int __attribute__((__unused__))
159 1.16 ragge bcmp(const void *b1, const void *b2, size_t len)
160 1.16 ragge {
161 1.17 ragge register int ret;
162 1.3 ragge
163 1.25 matt __asm__ __volatile("cmpc3 %3,(%1),(%2);"
164 1.25 matt "movl %%r0,%0"
165 1.3 ragge : "=r" (ret)
166 1.3 ragge : "r" (b1), "r" (b2), "r" (len)
167 1.3 ragge : "r0","r1","r2","r3" );
168 1.3 ragge return ret;
169 1.3 ragge }
170 1.3 ragge
171 1.16 ragge /* Begin nya */
172 1.24 thorpej static __inline__ size_t __attribute__((__unused__))
173 1.16 ragge strlen(const char *cp)
174 1.16 ragge {
175 1.17 ragge register size_t ret;
176 1.16 ragge
177 1.25 matt __asm__ __volatile("locc $0,$65535,(%1);"
178 1.25 matt "subl3 %%r0,$65535,%0"
179 1.16 ragge : "=r" (ret)
180 1.16 ragge : "r" (cp)
181 1.16 ragge : "r0","r1","cc" );
182 1.16 ragge return ret;
183 1.16 ragge }
184 1.16 ragge
185 1.24 thorpej static __inline__ char * __attribute__((__unused__))
186 1.16 ragge strcat(char *cp, const char *c2)
187 1.16 ragge {
188 1.25 matt __asm__ __volatile("locc $0,$65535,(%1);"
189 1.25 matt "subl3 %%r0,$65535,%%r2;"
190 1.25 matt "incl %%r2;"
191 1.25 matt "locc $0,$65535,(%0);"
192 1.25 matt "movc3 %%r2,(%1),(%%r1)"
193 1.16 ragge :
194 1.16 ragge : "r" (cp), "r" (c2)
195 1.16 ragge : "r0","r1","r2","r3","r4","r5","memory","cc");
196 1.16 ragge return cp;
197 1.16 ragge }
198 1.16 ragge
199 1.24 thorpej static __inline__ char * __attribute__((__unused__))
200 1.16 ragge strncat(char *cp, const char *c2, size_t count)
201 1.16 ragge {
202 1.25 matt __asm__ __volatile("locc $0,%2,(%1);"
203 1.28 thorpej "subl3 %%r0,%2,%%r2;"
204 1.25 matt "locc $0,$65535,(%0);"
205 1.25 matt "movc3 %%r2,(%1),(%%r1);"
206 1.25 matt "movb $0,(%%r3)"
207 1.16 ragge :
208 1.16 ragge : "r" (cp), "r" (c2), "g"(count)
209 1.16 ragge : "r0","r1","r2","r3","r4","r5","memory","cc");
210 1.16 ragge return cp;
211 1.16 ragge }
212 1.16 ragge
213 1.24 thorpej static __inline__ char * __attribute__((__unused__))
214 1.16 ragge strcpy(char *cp, const char *c2)
215 1.16 ragge {
216 1.25 matt __asm__ __volatile("locc $0,$65535,(%1);"
217 1.25 matt "subl3 %%r0,$65535,%%r2;"
218 1.25 matt "movc3 %%r2,(%1),(%0);"
219 1.25 matt "movb $0,(%%r3)"
220 1.16 ragge :
221 1.16 ragge : "r" (cp), "r" (c2)
222 1.16 ragge : "r0","r1","r2","r3","r4","r5","memory","cc");
223 1.16 ragge return cp;
224 1.16 ragge }
225 1.16 ragge
226 1.24 thorpej static __inline__ char * __attribute__((__unused__))
227 1.16 ragge strncpy(char *cp, const char *c2, size_t len)
228 1.16 ragge {
229 1.25 matt __asm__ __volatile("movl %2,%%r2;"
230 1.25 matt "locc $0,%%r2,(%1);"
231 1.25 matt "beql 1f;"
232 1.25 matt "subl3 %%r0,%2,%%r2;"
233 1.25 matt "clrb (%0)[%%r2];"
234 1.25 matt "1:;"
235 1.25 matt "movc3 %%r2,(%1),(%0)"
236 1.16 ragge :
237 1.16 ragge : "r" (cp), "r" (c2), "g"(len)
238 1.16 ragge : "r0","r1","r2","r3","r4","r5","memory","cc");
239 1.16 ragge return cp;
240 1.16 ragge }
241 1.16 ragge
242 1.24 thorpej static __inline__ void * __attribute__((__unused__))
243 1.16 ragge memchr(const void *cp, int c, size_t len)
244 1.16 ragge {
245 1.16 ragge void *ret;
246 1.25 matt __asm__ __volatile("locc %2,%3,(%1);"
247 1.25 matt "bneq 1f;"
248 1.25 matt "clrl %%r1;"
249 1.25 matt "1:;"
250 1.25 matt "movl %%r1,%0"
251 1.16 ragge : "=g"(ret)
252 1.16 ragge : "r" (cp), "r" (c), "g"(len)
253 1.16 ragge : "r0","r1","cc");
254 1.16 ragge return ret;
255 1.16 ragge }
256 1.16 ragge
257 1.24 thorpej static __inline__ int __attribute__((__unused__))
258 1.16 ragge strcmp(const char *cp, const char *c2)
259 1.16 ragge {
260 1.17 ragge register int ret;
261 1.25 matt __asm__ __volatile("locc $0,$65535,(%1);"
262 1.25 matt "subl3 %%r0,$65535,%%r0;"
263 1.25 matt "incl %%r0;"
264 1.25 matt "cmpc3 %%r0,(%1),(%2);"
265 1.25 matt "beql 1f;"
266 1.25 matt "movl $1,%%r2;"
267 1.25 matt "cmpb (%%r1),(%%r3);"
268 1.25 matt "bcc 1f;"
269 1.25 matt "mnegl $1,%%r2;"
270 1.25 matt "1:;"
271 1.25 matt "movl %%r2,%0"
272 1.16 ragge : "=g"(ret)
273 1.16 ragge : "r" (cp), "r" (c2)
274 1.16 ragge : "r0","r1","r2","r3","cc");
275 1.16 ragge return ret;
276 1.16 ragge }
277 1.29 ragge #endif
278 1.16 ragge
279 1.9 cgd #if 0 /* unused, but no point in deleting it since it _is_ an instruction */
280 1.24 thorpej static __inline__ int __attribute__((__unused__))
281 1.24 thorpej locc(int mask, char *cp, size_t size){
282 1.3 ragge register ret;
283 1.3 ragge
284 1.25 matt __asm__ __volatile("locc %1,%2,(%3);"
285 1.25 matt "movl %%r0,%0"
286 1.3 ragge : "=r" (ret)
287 1.3 ragge : "r" (mask),"r"(size),"r"(cp)
288 1.3 ragge : "r0","r1" );
289 1.3 ragge return ret;
290 1.3 ragge }
291 1.9 cgd #endif
292 1.3 ragge
293 1.24 thorpej static __inline__ int __attribute__((__unused__))
294 1.16 ragge scanc(u_int size, const u_char *cp, const u_char *table, int mask)
295 1.16 ragge {
296 1.17 ragge register int ret;
297 1.3 ragge
298 1.25 matt __asm__ __volatile("scanc %1,(%2),(%3),%4;"
299 1.25 matt "movl %%r0,%0"
300 1.3 ragge : "=g"(ret)
301 1.3 ragge : "r"(size),"r"(cp),"r"(table),"r"(mask)
302 1.3 ragge : "r0","r1","r2","r3" );
303 1.3 ragge return ret;
304 1.3 ragge }
305 1.3 ragge
306 1.24 thorpej static __inline__ int __attribute__((__unused__))
307 1.16 ragge skpc(int mask, size_t size, u_char *cp)
308 1.16 ragge {
309 1.17 ragge register int ret;
310 1.3 ragge
311 1.25 matt __asm__ __volatile("skpc %1,%2,(%3);"
312 1.25 matt "movl %%r0,%0"
313 1.3 ragge : "=g"(ret)
314 1.3 ragge : "r"(mask),"r"(size),"r"(cp)
315 1.3 ragge : "r0","r1" );
316 1.3 ragge return ret;
317 1.23 ragge }
318 1.23 ragge
319 1.23 ragge /*
320 1.23 ragge * Set/clear a bit at a memory position; interlocked.
321 1.23 ragge * Return 0 if already set, 1 otherwise.
322 1.23 ragge */
323 1.24 thorpej static __inline__ int __attribute__((__unused__))
324 1.23 ragge bbssi(int bitnr, long *addr)
325 1.23 ragge {
326 1.23 ragge register int ret;
327 1.23 ragge
328 1.25 matt __asm__ __volatile("clrl %%r0;"
329 1.25 matt "bbssi %1,%2,1f;"
330 1.25 matt "incl %%r0;"
331 1.25 matt "1:;"
332 1.25 matt "movl %%r0,%0"
333 1.23 ragge : "=&r"(ret)
334 1.23 ragge : "g"(bitnr),"m"(*addr)
335 1.23 ragge : "r0","cc","memory");
336 1.23 ragge return ret;
337 1.23 ragge }
338 1.23 ragge
339 1.24 thorpej static __inline__ int __attribute__((__unused__))
340 1.23 ragge bbcci(int bitnr, long *addr)
341 1.23 ragge {
342 1.23 ragge register int ret;
343 1.23 ragge
344 1.25 matt __asm__ __volatile("clrl %%r0;"
345 1.25 matt "bbcci %1,%2,1f;"
346 1.25 matt "incl %%r0;"
347 1.25 matt "1:;"
348 1.25 matt "movl %%r0,%0"
349 1.23 ragge : "=&r"(ret)
350 1.23 ragge : "g"(bitnr),"m"(*addr)
351 1.23 ragge : "r0","cc","memory");
352 1.23 ragge return ret;
353 1.3 ragge }
354 1.3 ragge
355 1.15 ragge #define setrunqueue(p) \
356 1.25 matt __asm__ __volatile("movl %0,%%r0;jsb Setrq" :: "g"(p):"r0","r1","r2");
357 1.12 ragge
358 1.15 ragge #define remrunqueue(p) \
359 1.25 matt __asm__ __volatile("movl %0,%%r0;jsb Remrq" :: "g"(p):"r0","r1","r2");
360 1.12 ragge
361 1.28 thorpej #define cpu_switch(p, newp) ({ \
362 1.28 thorpej register int ret; \
363 1.28 thorpej __asm__ __volatile("movpsl -(%%sp);jsb Swtch; movl %%r0,%0" \
364 1.28 thorpej : "=g"(ret) ::"r0","r1","r2","r3","r4","r5"); \
365 1.28 thorpej ret; })
366 1.28 thorpej
367 1.28 thorpej #define cpu_switchto(p, newp) \
368 1.28 thorpej __asm __volatile("movpsl -(%%sp); movl %0,%%r2; jsb Swtchto" \
369 1.28 thorpej :: "g" (newp) : "r0", "r1", "r2", "r3", "r4", "r5")
370 1.18 ragge
371 1.18 ragge /*
372 1.18 ragge * Interlock instructions. Used both in multiprocessor environments to
373 1.18 ragge * lock between CPUs and in uniprocessor systems when locking is required
374 1.18 ragge * between I/O devices and the master CPU.
375 1.18 ragge */
376 1.18 ragge /*
377 1.18 ragge * Insqti() locks and inserts an element into the end of a queue.
378 1.18 ragge * Returns -1 if interlock failed, 1 if inserted OK and 0 if first in queue.
379 1.18 ragge */
380 1.24 thorpej static __inline__ int __attribute__((__unused__))
381 1.18 ragge insqti(void *entry, void *header) {
382 1.18 ragge register int ret;
383 1.18 ragge
384 1.26 matt __asm__ __volatile(
385 1.26 matt " mnegl $1,%0;"
386 1.26 matt " insqti (%1),(%2);"
387 1.26 matt " bcs 1f;" /* failed insert */
388 1.26 matt " beql 2f;" /* jump if first entry */
389 1.26 matt " movl $1,%0;"
390 1.26 matt " brb 1f;"
391 1.26 matt "2: clrl %0;"
392 1.26 matt " 1:;"
393 1.19 matt : "=&g"(ret)
394 1.18 ragge : "r"(entry), "r"(header)
395 1.18 ragge : "memory");
396 1.18 ragge
397 1.18 ragge return ret;
398 1.18 ragge }
399 1.18 ragge
400 1.18 ragge /*
401 1.18 ragge * Remqhi() removes an element from the head of the queue.
402 1.18 ragge * Returns -1 if interlock failed, 0 if queue empty, address of the
403 1.18 ragge * removed element otherwise.
404 1.18 ragge */
405 1.24 thorpej static __inline__ void * __attribute__((__unused__))
406 1.18 ragge remqhi(void *header) {
407 1.18 ragge register void *ret;
408 1.18 ragge
409 1.26 matt __asm__ __volatile(
410 1.26 matt " remqhi (%1),%0;"
411 1.26 matt " bcs 1f;" /* failed interlock */
412 1.26 matt " bvs 2f;" /* nothing was removed */
413 1.26 matt " brb 3f;"
414 1.26 matt "1: mnegl $1,%0;"
415 1.26 matt " brb 3f;"
416 1.26 matt "2: clrl %0;"
417 1.26 matt " 3:;"
418 1.19 matt : "=&g"(ret)
419 1.18 ragge : "r"(header)
420 1.18 ragge : "memory");
421 1.18 ragge
422 1.18 ragge return ret;
423 1.18 ragge }
424 1.18 ragge #define ILCK_FAILED -1 /* Interlock failed */
425 1.18 ragge #define Q_EMPTY 0 /* Queue is/was empty */
426 1.18 ragge #define Q_OK 1 /* Inserted OK */
427 1.18 ragge
428 1.3 ragge #endif /* _VAX_MACROS_H_ */
429