macros.h revision 1.28 1 /* $NetBSD: macros.h,v 1.28 2003/01/18 07:10:33 thorpej Exp $ */
2
3 /*
4 * Copyright (c) 1994, 1998, 2000 Ludd, University of Lule}, Sweden.
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 * 3. All advertising materials mentioning features or use of this software
16 * must display the following acknowledgement:
17 * This product includes software developed at Ludd, University of Lule}.
18 * 4. The name of the author may not be used to endorse or promote products
19 * derived from this software without specific prior written permission
20 *
21 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
22 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
23 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
24 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
25 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
26 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
30 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 */
32
33 /* All bugs are subject to removal without further notice */
34
35 #if !defined(_VAX_MACROS_H_) && !defined(lint)
36 #define _VAX_MACROS_H_
37
38 /* Here general macros are supposed to be stored */
39
40 static __inline__ int __attribute__((__unused__))
41 ffs(int reg)
42 {
43 register int val;
44
45 __asm__ __volatile ("ffs $0,$32,%1,%0;"
46 "bneq 1f;"
47 "mnegl $1,%0;"
48 "1:;"
49 "incl %0"
50 : "=&r" (val)
51 : "r" (reg) );
52 return val;
53 }
54
55 static __inline__ void __attribute__((__unused__))
56 _remque(void *p)
57 {
58 __asm__ __volatile ("remque (%0),%0;clrl 4(%0)"
59 :
60 : "r" (p)
61 : "memory" );
62 }
63
64 static __inline__ void __attribute__((__unused__))
65 _insque(void *p, void *q)
66 {
67 __asm__ __volatile ("insque (%0),(%1)"
68 :
69 : "r" (p),"r" (q)
70 : "memory" );
71 }
72
73 static __inline__ void * __attribute__((__unused__))
74 memcpy(void *to, const void *from, size_t len)
75 {
76 __asm__ __volatile ("movc3 %0,%1,%2"
77 :
78 : "g" (len), "m" (*(char *)from), "m" (*(char *)to)
79 :"r0","r1","r2","r3","r4","r5","memory","cc");
80 return to;
81 }
82 static __inline__ void * __attribute__((__unused__))
83 memmove(void *to, const void *from, size_t len)
84 {
85 __asm__ __volatile ("movc3 %0,%1,%2"
86 :
87 : "g" (len), "m" (*(char *)from), "m" (*(char *)to)
88 :"r0","r1","r2","r3","r4","r5","memory","cc");
89 return to;
90 }
91
92 static __inline__ void __attribute__((__unused__))
93 bcopy(const void *from, void *to, size_t len)
94 {
95 __asm__ __volatile ("movc3 %0,%1,%2"
96 :
97 : "g" (len), "m" (*(char *)from), "m" (*(char *)to)
98 :"r0","r1","r2","r3","r4","r5","memory","cc");
99 }
100
101 void __blkset(void *, int, size_t);
102
103 static __inline__ void * __attribute__((__unused__))
104 memset(void *block, int c, size_t len)
105 {
106 if (len > 65535)
107 __blkset(block, c, len);
108 else {
109 __asm__ __volatile ("movc5 $0,(%%sp),%2,%1,%0"
110 :
111 : "m" (*(char *)block), "g" (len), "g" (c)
112 :"r0","r1","r2","r3","r4","r5","memory","cc");
113 }
114 return block;
115 }
116
117 static __inline__ void __attribute__((__unused__))
118 bzero(void *block, size_t len)
119 {
120 if (len > 65535)
121 __blkset(block, 0, len);
122 else {
123 __asm__ __volatile ("movc5 $0,(%%sp),$0,%1,%0"
124 :
125 : "m" (*(char *)block), "g" (len)
126 :"r0","r1","r2","r3","r4","r5","memory","cc");
127 }
128 }
129
130 /* XXX - the return syntax of memcmp is wrong */
131 static __inline__ int __attribute__((__unused__))
132 memcmp(const void *b1, const void *b2, size_t len)
133 {
134 register int ret;
135
136 __asm__ __volatile("cmpc3 %3,(%1),(%2);"
137 "movl %%r0,%0"
138 : "=r" (ret)
139 : "r" (b1), "r" (b2), "r" (len)
140 : "r0","r1","r2","r3" );
141 return ret;
142 }
143
144 static __inline__ int __attribute__((__unused__))
145 bcmp(const void *b1, const void *b2, size_t len)
146 {
147 register int ret;
148
149 __asm__ __volatile("cmpc3 %3,(%1),(%2);"
150 "movl %%r0,%0"
151 : "=r" (ret)
152 : "r" (b1), "r" (b2), "r" (len)
153 : "r0","r1","r2","r3" );
154 return ret;
155 }
156
157 /* Begin nya */
158 static __inline__ size_t __attribute__((__unused__))
159 strlen(const char *cp)
160 {
161 register size_t ret;
162
163 __asm__ __volatile("locc $0,$65535,(%1);"
164 "subl3 %%r0,$65535,%0"
165 : "=r" (ret)
166 : "r" (cp)
167 : "r0","r1","cc" );
168 return ret;
169 }
170
171 static __inline__ char * __attribute__((__unused__))
172 strcat(char *cp, const char *c2)
173 {
174 __asm__ __volatile("locc $0,$65535,(%1);"
175 "subl3 %%r0,$65535,%%r2;"
176 "incl %%r2;"
177 "locc $0,$65535,(%0);"
178 "movc3 %%r2,(%1),(%%r1)"
179 :
180 : "r" (cp), "r" (c2)
181 : "r0","r1","r2","r3","r4","r5","memory","cc");
182 return cp;
183 }
184
185 static __inline__ char * __attribute__((__unused__))
186 strncat(char *cp, const char *c2, size_t count)
187 {
188 __asm__ __volatile("locc $0,%2,(%1);"
189 "subl3 %%r0,%2,%%r2;"
190 "locc $0,$65535,(%0);"
191 "movc3 %%r2,(%1),(%%r1);"
192 "movb $0,(%%r3)"
193 :
194 : "r" (cp), "r" (c2), "g"(count)
195 : "r0","r1","r2","r3","r4","r5","memory","cc");
196 return cp;
197 }
198
199 static __inline__ char * __attribute__((__unused__))
200 strcpy(char *cp, const char *c2)
201 {
202 __asm__ __volatile("locc $0,$65535,(%1);"
203 "subl3 %%r0,$65535,%%r2;"
204 "movc3 %%r2,(%1),(%0);"
205 "movb $0,(%%r3)"
206 :
207 : "r" (cp), "r" (c2)
208 : "r0","r1","r2","r3","r4","r5","memory","cc");
209 return cp;
210 }
211
212 static __inline__ char * __attribute__((__unused__))
213 strncpy(char *cp, const char *c2, size_t len)
214 {
215 __asm__ __volatile("movl %2,%%r2;"
216 "locc $0,%%r2,(%1);"
217 "beql 1f;"
218 "subl3 %%r0,%2,%%r2;"
219 "clrb (%0)[%%r2];"
220 "1:;"
221 "movc3 %%r2,(%1),(%0)"
222 :
223 : "r" (cp), "r" (c2), "g"(len)
224 : "r0","r1","r2","r3","r4","r5","memory","cc");
225 return cp;
226 }
227
228 static __inline__ void * __attribute__((__unused__))
229 memchr(const void *cp, int c, size_t len)
230 {
231 void *ret;
232 __asm__ __volatile("locc %2,%3,(%1);"
233 "bneq 1f;"
234 "clrl %%r1;"
235 "1:;"
236 "movl %%r1,%0"
237 : "=g"(ret)
238 : "r" (cp), "r" (c), "g"(len)
239 : "r0","r1","cc");
240 return ret;
241 }
242
243 static __inline__ int __attribute__((__unused__))
244 strcmp(const char *cp, const char *c2)
245 {
246 register int ret;
247 __asm__ __volatile("locc $0,$65535,(%1);"
248 "subl3 %%r0,$65535,%%r0;"
249 "incl %%r0;"
250 "cmpc3 %%r0,(%1),(%2);"
251 "beql 1f;"
252 "movl $1,%%r2;"
253 "cmpb (%%r1),(%%r3);"
254 "bcc 1f;"
255 "mnegl $1,%%r2;"
256 "1:;"
257 "movl %%r2,%0"
258 : "=g"(ret)
259 : "r" (cp), "r" (c2)
260 : "r0","r1","r2","r3","cc");
261 return ret;
262 }
263 /* End nya */
264
265 #if 0 /* unused, but no point in deleting it since it _is_ an instruction */
266 static __inline__ int __attribute__((__unused__))
267 locc(int mask, char *cp, size_t size){
268 register ret;
269
270 __asm__ __volatile("locc %1,%2,(%3);"
271 "movl %%r0,%0"
272 : "=r" (ret)
273 : "r" (mask),"r"(size),"r"(cp)
274 : "r0","r1" );
275 return ret;
276 }
277 #endif
278
279 static __inline__ int __attribute__((__unused__))
280 scanc(u_int size, const u_char *cp, const u_char *table, int mask)
281 {
282 register int ret;
283
284 __asm__ __volatile("scanc %1,(%2),(%3),%4;"
285 "movl %%r0,%0"
286 : "=g"(ret)
287 : "r"(size),"r"(cp),"r"(table),"r"(mask)
288 : "r0","r1","r2","r3" );
289 return ret;
290 }
291
292 static __inline__ int __attribute__((__unused__))
293 skpc(int mask, size_t size, u_char *cp)
294 {
295 register int ret;
296
297 __asm__ __volatile("skpc %1,%2,(%3);"
298 "movl %%r0,%0"
299 : "=g"(ret)
300 : "r"(mask),"r"(size),"r"(cp)
301 : "r0","r1" );
302 return ret;
303 }
304
305 /*
306 * Set/clear a bit at a memory position; interlocked.
307 * Return 0 if already set, 1 otherwise.
308 */
309 static __inline__ int __attribute__((__unused__))
310 bbssi(int bitnr, long *addr)
311 {
312 register int ret;
313
314 __asm__ __volatile("clrl %%r0;"
315 "bbssi %1,%2,1f;"
316 "incl %%r0;"
317 "1:;"
318 "movl %%r0,%0"
319 : "=&r"(ret)
320 : "g"(bitnr),"m"(*addr)
321 : "r0","cc","memory");
322 return ret;
323 }
324
325 static __inline__ int __attribute__((__unused__))
326 bbcci(int bitnr, long *addr)
327 {
328 register int ret;
329
330 __asm__ __volatile("clrl %%r0;"
331 "bbcci %1,%2,1f;"
332 "incl %%r0;"
333 "1:;"
334 "movl %%r0,%0"
335 : "=&r"(ret)
336 : "g"(bitnr),"m"(*addr)
337 : "r0","cc","memory");
338 return ret;
339 }
340
341 #define setrunqueue(p) \
342 __asm__ __volatile("movl %0,%%r0;jsb Setrq" :: "g"(p):"r0","r1","r2");
343
344 #define remrunqueue(p) \
345 __asm__ __volatile("movl %0,%%r0;jsb Remrq" :: "g"(p):"r0","r1","r2");
346
347 #define cpu_switch(p, newp) ({ \
348 register int ret; \
349 __asm__ __volatile("movpsl -(%%sp);jsb Swtch; movl %%r0,%0" \
350 : "=g"(ret) ::"r0","r1","r2","r3","r4","r5"); \
351 ret; })
352
353 #define cpu_switchto(p, newp) \
354 __asm __volatile("movpsl -(%%sp); movl %0,%%r2; jsb Swtchto" \
355 :: "g" (newp) : "r0", "r1", "r2", "r3", "r4", "r5")
356
357 /*
358 * Interlock instructions. Used both in multiprocessor environments to
359 * lock between CPUs and in uniprocessor systems when locking is required
360 * between I/O devices and the master CPU.
361 */
362 /*
363 * Insqti() locks and inserts an element into the end of a queue.
364 * Returns -1 if interlock failed, 1 if inserted OK and 0 if first in queue.
365 */
366 static __inline__ int __attribute__((__unused__))
367 insqti(void *entry, void *header) {
368 register int ret;
369
370 __asm__ __volatile(
371 " mnegl $1,%0;"
372 " insqti (%1),(%2);"
373 " bcs 1f;" /* failed insert */
374 " beql 2f;" /* jump if first entry */
375 " movl $1,%0;"
376 " brb 1f;"
377 "2: clrl %0;"
378 " 1:;"
379 : "=&g"(ret)
380 : "r"(entry), "r"(header)
381 : "memory");
382
383 return ret;
384 }
385
386 /*
387 * Remqhi() removes an element from the head of the queue.
388 * Returns -1 if interlock failed, 0 if queue empty, address of the
389 * removed element otherwise.
390 */
391 static __inline__ void * __attribute__((__unused__))
392 remqhi(void *header) {
393 register void *ret;
394
395 __asm__ __volatile(
396 " remqhi (%1),%0;"
397 " bcs 1f;" /* failed interlock */
398 " bvs 2f;" /* nothing was removed */
399 " brb 3f;"
400 "1: mnegl $1,%0;"
401 " brb 3f;"
402 "2: clrl %0;"
403 " 3:;"
404 : "=&g"(ret)
405 : "r"(header)
406 : "memory");
407
408 return ret;
409 }
410 #define ILCK_FAILED -1 /* Interlock failed */
411 #define Q_EMPTY 0 /* Queue is/was empty */
412 #define Q_OK 1 /* Inserted OK */
413
414 #endif /* _VAX_MACROS_H_ */
415