macros.h revision 1.3 1 /* $NetBSD: macros.h,v 1.3 1995/02/13 00:43:23 ragge Exp $ */
2
3 /*
4 * Copyright (c) 1994 Ludd, University of Lule}, Sweden.
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 * notice, this list of conditions and the following disclaimer.
12 * 2. Redistributions in binary form must reproduce the above copyright
13 * notice, this list of conditions and the following disclaimer in the
14 * documentation and/or other materials provided with the distribution.
15 * 3. All advertising materials mentioning features or use of this software
16 * must display the following acknowledgement:
17 * This product includes software developed at Ludd, University of Lule}.
18 * 4. The name of the author may not be used to endorse or promote products
19 * derived from this software without specific prior written permission
20 *
21 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
22 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
23 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
24 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
25 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
26 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
30 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 */
32
33 /* All bugs are subject to removal without further notice */
34
35 #if !defined(_VAX_MACROS_H_)&&!defined(ASSEMBLER)
36 #define _VAX_MACROS_H_
37
38 /* Here general macros are supposed to be stored */
39
40 static __inline__ int ffs(int reg){
41 register int val;
42
43 asm __volatile ("ffs $0,$32,%1,%0
44 bneq 1f
45 mnegl $1,%0
46 1: incl %0"
47 : "&=r" (val)
48 : "r" (reg) );
49 return val;
50 }
51
52 static __inline__ void _remque(void*p){
53 asm __volatile ("remque (%0),%0;clrl 4(%0)"
54 :
55 : "r" (p)
56 : "memory" );
57 }
58
59 static __inline__ void _insque(void*p, void*q) {
60 asm __volatile ("insque (%0), (%1)"
61 :
62 : "r" (p),"r" (q)
63 : "memory" );
64 }
65
66 #define bitset(bitnr,var) \
67 ({ asm __volatile ("bbss %0,%1,1f;1:;" \
68 : \
69 : "g" (bitnr), "g" (var)); \
70 })
71
72 #define bitclear(bitnr,var) \
73 ({ asm __volatile ("bbsc %0,%1,1f;1:;" \
74 : \
75 : "g" (bitnr), "g" (var)); \
76 })
77
78 #define bitisset(bitnr,var) \
79 ({ \
80 register int val; \
81 asm __volatile ("clrl %0;bbc %1,%2,1f;incl %0;1:;" \
82 : "=g" (val) \
83 : "g" (bitnr), "g" (var)); \
84 val; \
85 })
86
87 #define bitisclear(bitnr,var) \
88 ({ \
89 register int val; \
90 asm __volatile ("clrl %0;bbs %1,%2,1f;incl %0;1:;" \
91 : "=g" (val) \
92 : "g" (bitnr), "g" (var)); \
93 val; \
94 })
95 static __inline__ void bcopy(const void*from, void*toe, u_int len) {
96 asm __volatile ("movc3 %0,(%1),(%2)"
97 :
98 : "r" (len),"r" (from),"r"(toe)
99 :"r0","r1","r2","r3","r4","r5");
100 }
101
102 static __inline__ void bzero(void*block, u_int len){
103 asm __volatile ("movc5 $0,(%0),$0,%1,(%0)"
104 :
105 : "r" (block), "r" (len)
106 :"r0","r1","r2","r3","r4","r5");
107 }
108
109 static __inline__ int bcmp(const void *b1, const void *b2, size_t len){
110 register ret;
111
112 asm __volatile("cmpc3 %3,(%1),(%2);movl r0,%0"
113 : "=r" (ret)
114 : "r" (b1), "r" (b2), "r" (len)
115 : "r0","r1","r2","r3" );
116 return ret;
117 }
118
119 static __inline__ int locc(int mask, char *cp,u_int size){
120 register ret;
121
122 asm __volatile("locc %1,%2,(%3);movl r0,%0"
123 : "=r" (ret)
124 : "r" (mask),"r"(size),"r"(cp)
125 : "r0","r1" );
126 return ret;
127 }
128
129 static __inline__ int scanc(u_int size, u_char *cp,u_char *table, int mask){
130 register ret;
131
132 asm __volatile("scanc %1,(%2),(%3),%4;movl r0,%0"
133 : "=g"(ret)
134 : "r"(size),"r"(cp),"r"(table),"r"(mask)
135 : "r0","r1","r2","r3" );
136 return ret;
137 }
138
139 static __inline__ int skpc(int mask, int size, char *cp){
140 register ret;
141
142 asm __volatile("skpc %1,%2,(%3);movl r0,%0"
143 : "=g"(ret)
144 : "r"(mask),"r"(size),"r"(cp)
145 : "r0","r1" );
146 return ret;
147 }
148 #if 0
149 static __inline__ int imin(int a, int b){
150 asm __volatile("cmpl %0,%2;bleq 1f;movl %2,%0;1:"
151 : "=r"(a)
152 : "r"(a),"r"(b) );
153 return a;
154 }
155
156 static __inline__ int imax(int a, int b){
157 asm __volatile("cmpl %0,%2;bgeq 1f;movl %2,%0;1:"
158 : "=r"(a)
159 : "r"(a),"r"(b) );
160 return a;
161 }
162
163 static __inline__ int min(int a, int b){
164 asm __volatile("cmpl %0,%2;bleq 1f;movl %2,%0;1:"
165 : "=r"(a)
166 : "r"(a),"r"(b) );
167 return a;
168 }
169
170 static __inline__ int max(int a, int b){
171 asm __volatile("cmpl %0,%2;bgeq 1f;movl %2,%0;1:"
172 : "=r"(a)
173 : "r"(a),"r"(b) );
174 return a;
175 }
176 #endif
177
178 #define waitabit(tid) \
179 ({ \
180 asm __volatile ("mfpr $27,r0;addl2 %0,r0;1:;mfpr $27,r1; \
181 cmpl r0,r1;bneq 1b;" \
182 : \
183 : "g"(tid) \
184 : "r0","r1"); \
185 })
186
187 static __inline__ void blkcpy(const void*from, void*to, u_int len) {
188 asm __volatile("
189 movl %0,r1
190 movl %1,r3
191 movl %2,r6
192 jbr 2f
193 1: subl2 r0,r6
194 movc3 r0,(r1),(r3)
195 2: movzwl $65535,r0
196 cmpl r6,r0
197 jgtr 1b
198 movc3 r6,(r1),(r3)"
199 :
200 : "g" (from), "g" (to), "g" (len)
201 : "r0","r1","r2","r3","r4","r5", "r6" );
202 }
203
204 static __inline__ void blkclr(void *blk, int len) {
205 asm __volatile("
206 movl %0, r3
207 movl %1, r6
208 jbr 2f
209 1: subl2 r0, r6
210 movc5 $0,(r3),$0,r0,(r3)
211 2: movzwl $65535,r0
212 cmpl r6, r0
213 jgtr 1b
214 movc5 $0,(r3),$0,r6,(r3)"
215 :
216 : "g" (blk), "g" (len)
217 : "r0","r1","r2","r3","r4","r5", "r6" );
218 }
219
220 static __inline__ unsigned long htonl(unsigned long x){
221 register unsigned long ret;
222
223 asm __volatile("rotl $-8,%1,%0
224 insv %0,$16,$8,%0
225 rotl $8,%1,r1
226 movb r1,%0"
227 : "&=r" (ret)
228 : "r" (x)
229 : "r1","cc" );
230 return ret;
231 }
232
233 static __inline__ unsigned short htons(unsigned short x){
234 register unsigned short ret;
235
236 asm __volatile("rotl $8,%1,%0
237 rotl $-8,%1,r1
238 movb r1,%0
239 movzwl %0,%0"
240 : "&=r" (ret)
241 : "r" (x)
242 : "r1","cc" );
243 return ret;
244 }
245 #define ntohl(x) htonl(x)
246 #define ntohs(x) htons(x)
247
248
249 #endif /* _VAX_MACROS_H_ */
250