macros.h revision 1.23.8.3 1 1.23.8.3 ragge /* $NetBSD: macros.h,v 1.23.8.3 2002/03/29 23:31:41 ragge Exp $ */
2 1.23.8.2 nathanw
3 1.23.8.2 nathanw /*
4 1.23.8.2 nathanw * Copyright (c) 1994, 1998, 2000 Ludd, University of Lule}, Sweden.
5 1.23.8.2 nathanw * All rights reserved.
6 1.23.8.2 nathanw *
7 1.23.8.2 nathanw * Redistribution and use in source and binary forms, with or without
8 1.23.8.2 nathanw * modification, are permitted provided that the following conditions
9 1.23.8.2 nathanw * are met:
10 1.23.8.2 nathanw * 1. Redistributions of source code must retain the above copyright
11 1.23.8.2 nathanw * notice, this list of conditions and the following disclaimer.
12 1.23.8.2 nathanw * 2. Redistributions in binary form must reproduce the above copyright
13 1.23.8.2 nathanw * notice, this list of conditions and the following disclaimer in the
14 1.23.8.2 nathanw * documentation and/or other materials provided with the distribution.
15 1.23.8.2 nathanw * 3. All advertising materials mentioning features or use of this software
16 1.23.8.2 nathanw * must display the following acknowledgement:
17 1.23.8.2 nathanw * This product includes software developed at Ludd, University of Lule}.
18 1.23.8.2 nathanw * 4. The name of the author may not be used to endorse or promote products
19 1.23.8.2 nathanw * derived from this software without specific prior written permission
20 1.23.8.2 nathanw *
21 1.23.8.2 nathanw * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
22 1.23.8.2 nathanw * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
23 1.23.8.2 nathanw * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
24 1.23.8.2 nathanw * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
25 1.23.8.2 nathanw * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
26 1.23.8.2 nathanw * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 1.23.8.2 nathanw * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 1.23.8.2 nathanw * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 1.23.8.2 nathanw * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
30 1.23.8.2 nathanw * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 1.23.8.2 nathanw */
32 1.23.8.2 nathanw
33 1.23.8.2 nathanw /* All bugs are subject to removal without further notice */
34 1.23.8.2 nathanw
35 1.23.8.2 nathanw #if !defined(_VAX_MACROS_H_) && !defined(lint)
36 1.23.8.2 nathanw #define _VAX_MACROS_H_
37 1.23.8.2 nathanw
38 1.23.8.2 nathanw /* Here general macros are supposed to be stored */
39 1.23.8.2 nathanw
40 1.23.8.2 nathanw static __inline__ int __attribute__((__unused__))
41 1.23.8.2 nathanw ffs(int reg)
42 1.23.8.2 nathanw {
43 1.23.8.2 nathanw register int val;
44 1.23.8.2 nathanw
45 1.23.8.2 nathanw __asm__ __volatile ("ffs $0,$32,%1,%0;"
46 1.23.8.2 nathanw "bneq 1f;"
47 1.23.8.2 nathanw "mnegl $1,%0;"
48 1.23.8.2 nathanw "1:;"
49 1.23.8.2 nathanw "incl %0"
50 1.23.8.2 nathanw : "=&r" (val)
51 1.23.8.2 nathanw : "r" (reg) );
52 1.23.8.2 nathanw return val;
53 1.23.8.2 nathanw }
54 1.23.8.2 nathanw
55 1.23.8.2 nathanw static __inline__ void __attribute__((__unused__))
56 1.23.8.2 nathanw _remque(void *p)
57 1.23.8.2 nathanw {
58 1.23.8.2 nathanw __asm__ __volatile ("remque (%0),%0;clrl 4(%0)"
59 1.23.8.2 nathanw :
60 1.23.8.2 nathanw : "r" (p)
61 1.23.8.2 nathanw : "memory" );
62 1.23.8.2 nathanw }
63 1.23.8.2 nathanw
64 1.23.8.2 nathanw static __inline__ void __attribute__((__unused__))
65 1.23.8.2 nathanw _insque(void *p, void *q)
66 1.23.8.2 nathanw {
67 1.23.8.2 nathanw __asm__ __volatile ("insque (%0),(%1)"
68 1.23.8.2 nathanw :
69 1.23.8.2 nathanw : "r" (p),"r" (q)
70 1.23.8.2 nathanw : "memory" );
71 1.23.8.2 nathanw }
72 1.23.8.2 nathanw
73 1.23.8.2 nathanw static __inline__ void * __attribute__((__unused__))
74 1.23.8.2 nathanw memcpy(void *to, const void *from, size_t len)
75 1.23.8.2 nathanw {
76 1.23.8.2 nathanw __asm__ __volatile ("movc3 %0,%1,%2"
77 1.23.8.2 nathanw :
78 1.23.8.2 nathanw : "g" (len), "m" (*(char *)from), "m" (*(char *)to)
79 1.23.8.2 nathanw :"r0","r1","r2","r3","r4","r5","memory","cc");
80 1.23.8.2 nathanw return to;
81 1.23.8.2 nathanw }
82 1.23.8.2 nathanw static __inline__ void * __attribute__((__unused__))
83 1.23.8.2 nathanw memmove(void *to, const void *from, size_t len)
84 1.23.8.2 nathanw {
85 1.23.8.2 nathanw __asm__ __volatile ("movc3 %0,%1,%2"
86 1.23.8.2 nathanw :
87 1.23.8.2 nathanw : "g" (len), "m" (*(char *)from), "m" (*(char *)to)
88 1.23.8.2 nathanw :"r0","r1","r2","r3","r4","r5","memory","cc");
89 1.23.8.2 nathanw return to;
90 1.23.8.2 nathanw }
91 1.23.8.2 nathanw
92 1.23.8.2 nathanw static __inline__ void __attribute__((__unused__))
93 1.23.8.2 nathanw bcopy(const void *from, void *to, size_t len)
94 1.23.8.2 nathanw {
95 1.23.8.2 nathanw __asm__ __volatile ("movc3 %0,%1,%2"
96 1.23.8.2 nathanw :
97 1.23.8.2 nathanw : "g" (len), "m" (*(char *)from), "m" (*(char *)to)
98 1.23.8.2 nathanw :"r0","r1","r2","r3","r4","r5","memory","cc");
99 1.23.8.2 nathanw }
100 1.23.8.2 nathanw
101 1.23.8.2 nathanw void __blkset(void *, int, size_t);
102 1.23.8.2 nathanw
103 1.23.8.2 nathanw static __inline__ void * __attribute__((__unused__))
104 1.23.8.2 nathanw memset(void *block, int c, size_t len)
105 1.23.8.2 nathanw {
106 1.23.8.2 nathanw if (len > 65535)
107 1.23.8.2 nathanw __blkset(block, c, len);
108 1.23.8.2 nathanw else {
109 1.23.8.2 nathanw __asm__ __volatile ("movc5 $0,(%%sp),%2,%1,%0"
110 1.23.8.2 nathanw :
111 1.23.8.2 nathanw : "m" (*(char *)block), "g" (len), "g" (c)
112 1.23.8.2 nathanw :"r0","r1","r2","r3","r4","r5","memory","cc");
113 1.23.8.2 nathanw }
114 1.23.8.2 nathanw return block;
115 1.23.8.2 nathanw }
116 1.23.8.2 nathanw
117 1.23.8.2 nathanw static __inline__ void __attribute__((__unused__))
118 1.23.8.2 nathanw bzero(void *block, size_t len)
119 1.23.8.2 nathanw {
120 1.23.8.2 nathanw if (len > 65535)
121 1.23.8.2 nathanw __blkset(block, 0, len);
122 1.23.8.2 nathanw else {
123 1.23.8.2 nathanw __asm__ __volatile ("movc5 $0,(%%sp),$0,%1,%0"
124 1.23.8.2 nathanw :
125 1.23.8.2 nathanw : "m" (*(char *)block), "g" (len)
126 1.23.8.2 nathanw :"r0","r1","r2","r3","r4","r5","memory","cc");
127 1.23.8.2 nathanw }
128 1.23.8.2 nathanw }
129 1.23.8.2 nathanw
130 1.23.8.2 nathanw /* XXX - the return syntax of memcmp is wrong */
131 1.23.8.2 nathanw static __inline__ int __attribute__((__unused__))
132 1.23.8.2 nathanw memcmp(const void *b1, const void *b2, size_t len)
133 1.23.8.2 nathanw {
134 1.23.8.2 nathanw register int ret;
135 1.23.8.2 nathanw
136 1.23.8.2 nathanw __asm__ __volatile("cmpc3 %3,(%1),(%2);"
137 1.23.8.2 nathanw "movl %%r0,%0"
138 1.23.8.2 nathanw : "=r" (ret)
139 1.23.8.2 nathanw : "r" (b1), "r" (b2), "r" (len)
140 1.23.8.2 nathanw : "r0","r1","r2","r3" );
141 1.23.8.2 nathanw return ret;
142 1.23.8.2 nathanw }
143 1.23.8.2 nathanw
144 1.23.8.2 nathanw static __inline__ int __attribute__((__unused__))
145 1.23.8.2 nathanw bcmp(const void *b1, const void *b2, size_t len)
146 1.23.8.2 nathanw {
147 1.23.8.2 nathanw register int ret;
148 1.23.8.2 nathanw
149 1.23.8.2 nathanw __asm__ __volatile("cmpc3 %3,(%1),(%2);"
150 1.23.8.2 nathanw "movl %%r0,%0"
151 1.23.8.2 nathanw : "=r" (ret)
152 1.23.8.2 nathanw : "r" (b1), "r" (b2), "r" (len)
153 1.23.8.2 nathanw : "r0","r1","r2","r3" );
154 1.23.8.2 nathanw return ret;
155 1.23.8.2 nathanw }
156 1.23.8.2 nathanw
157 1.23.8.2 nathanw /* Begin nya */
158 1.23.8.2 nathanw static __inline__ size_t __attribute__((__unused__))
159 1.23.8.2 nathanw strlen(const char *cp)
160 1.23.8.2 nathanw {
161 1.23.8.2 nathanw register size_t ret;
162 1.23.8.2 nathanw
163 1.23.8.2 nathanw __asm__ __volatile("locc $0,$65535,(%1);"
164 1.23.8.2 nathanw "subl3 %%r0,$65535,%0"
165 1.23.8.2 nathanw : "=r" (ret)
166 1.23.8.2 nathanw : "r" (cp)
167 1.23.8.2 nathanw : "r0","r1","cc" );
168 1.23.8.2 nathanw return ret;
169 1.23.8.2 nathanw }
170 1.23.8.2 nathanw
171 1.23.8.2 nathanw static __inline__ char * __attribute__((__unused__))
172 1.23.8.2 nathanw strcat(char *cp, const char *c2)
173 1.23.8.2 nathanw {
174 1.23.8.2 nathanw __asm__ __volatile("locc $0,$65535,(%1);"
175 1.23.8.2 nathanw "subl3 %%r0,$65535,%%r2;"
176 1.23.8.2 nathanw "incl %%r2;"
177 1.23.8.2 nathanw "locc $0,$65535,(%0);"
178 1.23.8.2 nathanw "movc3 %%r2,(%1),(%%r1)"
179 1.23.8.2 nathanw :
180 1.23.8.2 nathanw : "r" (cp), "r" (c2)
181 1.23.8.2 nathanw : "r0","r1","r2","r3","r4","r5","memory","cc");
182 1.23.8.2 nathanw return cp;
183 1.23.8.2 nathanw }
184 1.23.8.2 nathanw
185 1.23.8.2 nathanw static __inline__ char * __attribute__((__unused__))
186 1.23.8.2 nathanw strncat(char *cp, const char *c2, size_t count)
187 1.23.8.2 nathanw {
188 1.23.8.2 nathanw __asm__ __volatile("locc $0,%2,(%1);"
189 1.23.8.3 ragge "subl3 %%r0,%2,%%r2;"
190 1.23.8.2 nathanw "locc $0,$65535,(%0);"
191 1.23.8.2 nathanw "movc3 %%r2,(%1),(%%r1);"
192 1.23.8.2 nathanw "movb $0,(%%r3)"
193 1.23.8.2 nathanw :
194 1.23.8.2 nathanw : "r" (cp), "r" (c2), "g"(count)
195 1.23.8.2 nathanw : "r0","r1","r2","r3","r4","r5","memory","cc");
196 1.23.8.2 nathanw return cp;
197 1.23.8.2 nathanw }
198 1.23.8.2 nathanw
199 1.23.8.2 nathanw static __inline__ char * __attribute__((__unused__))
200 1.23.8.2 nathanw strcpy(char *cp, const char *c2)
201 1.23.8.2 nathanw {
202 1.23.8.2 nathanw __asm__ __volatile("locc $0,$65535,(%1);"
203 1.23.8.2 nathanw "subl3 %%r0,$65535,%%r2;"
204 1.23.8.2 nathanw "movc3 %%r2,(%1),(%0);"
205 1.23.8.2 nathanw "movb $0,(%%r3)"
206 1.23.8.2 nathanw :
207 1.23.8.2 nathanw : "r" (cp), "r" (c2)
208 1.23.8.2 nathanw : "r0","r1","r2","r3","r4","r5","memory","cc");
209 1.23.8.2 nathanw return cp;
210 1.23.8.2 nathanw }
211 1.23.8.2 nathanw
212 1.23.8.2 nathanw static __inline__ char * __attribute__((__unused__))
213 1.23.8.2 nathanw strncpy(char *cp, const char *c2, size_t len)
214 1.23.8.2 nathanw {
215 1.23.8.2 nathanw __asm__ __volatile("movl %2,%%r2;"
216 1.23.8.2 nathanw "locc $0,%%r2,(%1);"
217 1.23.8.2 nathanw "beql 1f;"
218 1.23.8.2 nathanw "subl3 %%r0,%2,%%r2;"
219 1.23.8.2 nathanw "clrb (%0)[%%r2];"
220 1.23.8.2 nathanw "1:;"
221 1.23.8.2 nathanw "movc3 %%r2,(%1),(%0)"
222 1.23.8.2 nathanw :
223 1.23.8.2 nathanw : "r" (cp), "r" (c2), "g"(len)
224 1.23.8.2 nathanw : "r0","r1","r2","r3","r4","r5","memory","cc");
225 1.23.8.2 nathanw return cp;
226 1.23.8.2 nathanw }
227 1.23.8.2 nathanw
228 1.23.8.2 nathanw static __inline__ void * __attribute__((__unused__))
229 1.23.8.2 nathanw memchr(const void *cp, int c, size_t len)
230 1.23.8.2 nathanw {
231 1.23.8.2 nathanw void *ret;
232 1.23.8.2 nathanw __asm__ __volatile("locc %2,%3,(%1);"
233 1.23.8.2 nathanw "bneq 1f;"
234 1.23.8.2 nathanw "clrl %%r1;"
235 1.23.8.2 nathanw "1:;"
236 1.23.8.2 nathanw "movl %%r1,%0"
237 1.23.8.2 nathanw : "=g"(ret)
238 1.23.8.2 nathanw : "r" (cp), "r" (c), "g"(len)
239 1.23.8.2 nathanw : "r0","r1","cc");
240 1.23.8.2 nathanw return ret;
241 1.23.8.2 nathanw }
242 1.23.8.2 nathanw
243 1.23.8.2 nathanw static __inline__ int __attribute__((__unused__))
244 1.23.8.2 nathanw strcmp(const char *cp, const char *c2)
245 1.23.8.2 nathanw {
246 1.23.8.2 nathanw register int ret;
247 1.23.8.2 nathanw __asm__ __volatile("locc $0,$65535,(%1);"
248 1.23.8.2 nathanw "subl3 %%r0,$65535,%%r0;"
249 1.23.8.2 nathanw "incl %%r0;"
250 1.23.8.2 nathanw "cmpc3 %%r0,(%1),(%2);"
251 1.23.8.2 nathanw "beql 1f;"
252 1.23.8.2 nathanw "movl $1,%%r2;"
253 1.23.8.2 nathanw "cmpb (%%r1),(%%r3);"
254 1.23.8.2 nathanw "bcc 1f;"
255 1.23.8.2 nathanw "mnegl $1,%%r2;"
256 1.23.8.2 nathanw "1:;"
257 1.23.8.2 nathanw "movl %%r2,%0"
258 1.23.8.2 nathanw : "=g"(ret)
259 1.23.8.2 nathanw : "r" (cp), "r" (c2)
260 1.23.8.2 nathanw : "r0","r1","r2","r3","cc");
261 1.23.8.2 nathanw return ret;
262 1.23.8.2 nathanw }
263 1.23.8.2 nathanw /* End nya */
264 1.23.8.2 nathanw
265 1.23.8.2 nathanw #if 0 /* unused, but no point in deleting it since it _is_ an instruction */
266 1.23.8.2 nathanw static __inline__ int __attribute__((__unused__))
267 1.23.8.2 nathanw locc(int mask, char *cp, size_t size){
268 1.23.8.2 nathanw register ret;
269 1.23.8.2 nathanw
270 1.23.8.2 nathanw __asm__ __volatile("locc %1,%2,(%3);"
271 1.23.8.2 nathanw "movl %%r0,%0"
272 1.23.8.2 nathanw : "=r" (ret)
273 1.23.8.2 nathanw : "r" (mask),"r"(size),"r"(cp)
274 1.23.8.2 nathanw : "r0","r1" );
275 1.23.8.2 nathanw return ret;
276 1.23.8.2 nathanw }
277 1.23.8.2 nathanw #endif
278 1.23.8.2 nathanw
279 1.23.8.2 nathanw static __inline__ int __attribute__((__unused__))
280 1.23.8.2 nathanw scanc(u_int size, const u_char *cp, const u_char *table, int mask)
281 1.23.8.2 nathanw {
282 1.23.8.2 nathanw register int ret;
283 1.23.8.2 nathanw
284 1.23.8.2 nathanw __asm__ __volatile("scanc %1,(%2),(%3),%4;"
285 1.23.8.2 nathanw "movl %%r0,%0"
286 1.23.8.2 nathanw : "=g"(ret)
287 1.23.8.2 nathanw : "r"(size),"r"(cp),"r"(table),"r"(mask)
288 1.23.8.2 nathanw : "r0","r1","r2","r3" );
289 1.23.8.2 nathanw return ret;
290 1.23.8.2 nathanw }
291 1.23.8.2 nathanw
292 1.23.8.2 nathanw static __inline__ int __attribute__((__unused__))
293 1.23.8.2 nathanw skpc(int mask, size_t size, u_char *cp)
294 1.23.8.2 nathanw {
295 1.23.8.2 nathanw register int ret;
296 1.23.8.2 nathanw
297 1.23.8.2 nathanw __asm__ __volatile("skpc %1,%2,(%3);"
298 1.23.8.2 nathanw "movl %%r0,%0"
299 1.23.8.2 nathanw : "=g"(ret)
300 1.23.8.2 nathanw : "r"(mask),"r"(size),"r"(cp)
301 1.23.8.2 nathanw : "r0","r1" );
302 1.23.8.2 nathanw return ret;
303 1.23.8.2 nathanw }
304 1.23.8.2 nathanw
305 1.23.8.2 nathanw /*
306 1.23.8.2 nathanw * Set/clear a bit at a memory position; interlocked.
307 1.23.8.2 nathanw * Return 0 if already set, 1 otherwise.
308 1.23.8.2 nathanw */
309 1.23.8.2 nathanw static __inline__ int __attribute__((__unused__))
310 1.23.8.2 nathanw bbssi(int bitnr, long *addr)
311 1.23.8.2 nathanw {
312 1.23.8.2 nathanw register int ret;
313 1.23.8.2 nathanw
314 1.23.8.2 nathanw __asm__ __volatile("clrl %%r0;"
315 1.23.8.2 nathanw "bbssi %1,%2,1f;"
316 1.23.8.2 nathanw "incl %%r0;"
317 1.23.8.2 nathanw "1:;"
318 1.23.8.2 nathanw "movl %%r0,%0"
319 1.23.8.2 nathanw : "=&r"(ret)
320 1.23.8.2 nathanw : "g"(bitnr),"m"(*addr)
321 1.23.8.2 nathanw : "r0","cc","memory");
322 1.23.8.2 nathanw return ret;
323 1.23.8.2 nathanw }
324 1.23.8.2 nathanw
325 1.23.8.2 nathanw static __inline__ int __attribute__((__unused__))
326 1.23.8.2 nathanw bbcci(int bitnr, long *addr)
327 1.23.8.2 nathanw {
328 1.23.8.2 nathanw register int ret;
329 1.23.8.2 nathanw
330 1.23.8.2 nathanw __asm__ __volatile("clrl %%r0;"
331 1.23.8.2 nathanw "bbcci %1,%2,1f;"
332 1.23.8.2 nathanw "incl %%r0;"
333 1.23.8.2 nathanw "1:;"
334 1.23.8.2 nathanw "movl %%r0,%0"
335 1.23.8.2 nathanw : "=&r"(ret)
336 1.23.8.2 nathanw : "g"(bitnr),"m"(*addr)
337 1.23.8.2 nathanw : "r0","cc","memory");
338 1.23.8.2 nathanw return ret;
339 1.23.8.2 nathanw }
340 1.23.8.2 nathanw
341 1.23.8.2 nathanw #define setrunqueue(p) \
342 1.23.8.2 nathanw __asm__ __volatile("movl %0,%%r0;jsb Setrq" :: "g"(p):"r0","r1","r2");
343 1.23.8.2 nathanw
344 1.23.8.2 nathanw #define remrunqueue(p) \
345 1.23.8.2 nathanw __asm__ __volatile("movl %0,%%r0;jsb Remrq" :: "g"(p):"r0","r1","r2");
346 1.23.8.2 nathanw
347 1.23.8.3 ragge #define cpu_switch(p) ({ \
348 1.23.8.3 ragge register ret; \
349 1.23.8.3 ragge __asm__ __volatile("movpsl -(%%sp);jsb Swtch; movl %%r0,%0" \
350 1.23.8.3 ragge : "=g"(ret) ::"r0","r1","r2","r3","r4","r5"); \
351 1.23.8.3 ragge ret; })
352 1.23.8.3 ragge
353 1.23.8.2 nathanw
354 1.23.8.2 nathanw /*
355 1.23.8.2 nathanw * Interlock instructions. Used both in multiprocessor environments to
356 1.23.8.2 nathanw * lock between CPUs and in uniprocessor systems when locking is required
357 1.23.8.2 nathanw * between I/O devices and the master CPU.
358 1.23.8.2 nathanw */
359 1.23.8.2 nathanw /*
360 1.23.8.2 nathanw * Insqti() locks and inserts an element into the end of a queue.
361 1.23.8.2 nathanw * Returns -1 if interlock failed, 1 if inserted OK and 0 if first in queue.
362 1.23.8.2 nathanw */
363 1.23.8.2 nathanw static __inline__ int __attribute__((__unused__))
364 1.23.8.2 nathanw insqti(void *entry, void *header) {
365 1.23.8.2 nathanw register int ret;
366 1.23.8.2 nathanw
367 1.23.8.2 nathanw __asm__ __volatile("
368 1.23.8.2 nathanw mnegl $1,%0;
369 1.23.8.2 nathanw insqti (%1),(%2);
370 1.23.8.2 nathanw bcs 1f; # failed insert
371 1.23.8.2 nathanw beql 2f; # jump if first entry
372 1.23.8.2 nathanw movl $1,%0;
373 1.23.8.2 nathanw brb 1f;
374 1.23.8.2 nathanw 2: clrl %0;
375 1.23.8.2 nathanw 1:;"
376 1.23.8.2 nathanw : "=&g"(ret)
377 1.23.8.2 nathanw : "r"(entry), "r"(header)
378 1.23.8.2 nathanw : "memory");
379 1.23.8.2 nathanw
380 1.23.8.2 nathanw return ret;
381 1.23.8.2 nathanw }
382 1.23.8.2 nathanw
383 1.23.8.2 nathanw /*
384 1.23.8.2 nathanw * Remqhi() removes an element from the head of the queue.
385 1.23.8.2 nathanw * Returns -1 if interlock failed, 0 if queue empty, address of the
386 1.23.8.2 nathanw * removed element otherwise.
387 1.23.8.2 nathanw */
388 1.23.8.2 nathanw static __inline__ void * __attribute__((__unused__))
389 1.23.8.2 nathanw remqhi(void *header) {
390 1.23.8.2 nathanw register void *ret;
391 1.23.8.2 nathanw
392 1.23.8.2 nathanw __asm__ __volatile("
393 1.23.8.2 nathanw remqhi (%1),%0;
394 1.23.8.2 nathanw bcs 1f; # failed interlock
395 1.23.8.2 nathanw bvs 2f; # nothing was removed
396 1.23.8.2 nathanw brb 3f;
397 1.23.8.2 nathanw 1: mnegl $1,%0;
398 1.23.8.2 nathanw brb 3f;
399 1.23.8.2 nathanw 2: clrl %0;
400 1.23.8.2 nathanw 3:;"
401 1.23.8.2 nathanw : "=&g"(ret)
402 1.23.8.2 nathanw : "r"(header)
403 1.23.8.2 nathanw : "memory");
404 1.23.8.2 nathanw
405 1.23.8.2 nathanw return ret;
406 1.23.8.2 nathanw }
407 1.23.8.2 nathanw #define ILCK_FAILED -1 /* Interlock failed */
408 1.23.8.2 nathanw #define Q_EMPTY 0 /* Queue is/was empty */
409 1.23.8.2 nathanw #define Q_OK 1 /* Inserted OK */
410 1.23.8.2 nathanw
411 1.23.8.2 nathanw #endif /* _VAX_MACROS_H_ */
412