macros.h revision 1.15 1 1.15 ragge /* $NetBSD: macros.h,v 1.15 1998/03/02 17:00:01 ragge Exp $ */
2 1.2 cgd
3 1.1 ragge /*
4 1.1 ragge * Copyright (c) 1994 Ludd, University of Lule}, Sweden.
5 1.1 ragge * All rights reserved.
6 1.1 ragge *
7 1.1 ragge * Redistribution and use in source and binary forms, with or without
8 1.1 ragge * modification, are permitted provided that the following conditions
9 1.1 ragge * are met:
10 1.1 ragge * 1. Redistributions of source code must retain the above copyright
11 1.1 ragge * notice, this list of conditions and the following disclaimer.
12 1.1 ragge * 2. Redistributions in binary form must reproduce the above copyright
13 1.1 ragge * notice, this list of conditions and the following disclaimer in the
14 1.1 ragge * documentation and/or other materials provided with the distribution.
15 1.1 ragge * 3. All advertising materials mentioning features or use of this software
16 1.1 ragge * must display the following acknowledgement:
17 1.1 ragge * This product includes software developed at Ludd, University of Lule}.
18 1.1 ragge * 4. The name of the author may not be used to endorse or promote products
19 1.1 ragge * derived from this software without specific prior written permission
20 1.1 ragge *
21 1.1 ragge * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
22 1.1 ragge * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
23 1.1 ragge * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
24 1.1 ragge * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
25 1.1 ragge * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
26 1.1 ragge * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 1.1 ragge * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 1.1 ragge * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 1.1 ragge * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
30 1.1 ragge * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 1.1 ragge */
32 1.1 ragge
33 1.1 ragge /* All bugs are subject to removal without further notice */
34 1.3 ragge
35 1.11 ragge #if !defined(_VAX_MACROS_H_) && !defined(STANDALONE) && \
36 1.11 ragge (!defined(_LOCORE) && defined(_VAX_INLINE_))
37 1.15 ragge #define _VAX_MACROS_H_
38 1.1 ragge
39 1.1 ragge /* Here general macros are supposed to be stored */
40 1.1 ragge
41 1.3 ragge static __inline__ int ffs(int reg){
42 1.3 ragge register int val;
43 1.3 ragge
44 1.13 thorpej __asm__ __volatile ("ffs $0,$32,%1,%0
45 1.3 ragge bneq 1f
46 1.3 ragge mnegl $1,%0
47 1.15 ragge 1: incl %0"
48 1.3 ragge : "&=r" (val)
49 1.3 ragge : "r" (reg) );
50 1.3 ragge return val;
51 1.3 ragge }
52 1.3 ragge
53 1.3 ragge static __inline__ void _remque(void*p){
54 1.13 thorpej __asm__ __volatile ("remque (%0),%0;clrl 4(%0)"
55 1.3 ragge :
56 1.3 ragge : "r" (p)
57 1.3 ragge : "memory" );
58 1.3 ragge }
59 1.3 ragge
60 1.3 ragge static __inline__ void _insque(void*p, void*q) {
61 1.15 ragge __asm__ __volatile ("insque (%0), (%1)"
62 1.15 ragge :
63 1.15 ragge : "r" (p),"r" (q)
64 1.15 ragge : "memory" );
65 1.3 ragge }
66 1.3 ragge
67 1.3 ragge static __inline__ void bcopy(const void*from, void*toe, u_int len) {
68 1.13 thorpej __asm__ __volatile ("movc3 %0,(%1),(%2)"
69 1.3 ragge :
70 1.3 ragge : "r" (len),"r" (from),"r"(toe)
71 1.3 ragge :"r0","r1","r2","r3","r4","r5");
72 1.3 ragge }
73 1.3 ragge
74 1.15 ragge void blkclr __P((void *, u_int));
75 1.15 ragge
76 1.3 ragge static __inline__ void bzero(void*block, u_int len){
77 1.15 ragge if (len > 65535)
78 1.15 ragge blkclr(block, len);
79 1.15 ragge else {
80 1.15 ragge __asm__ __volatile ("movc5 $0,(%0),$0,%1,(%0)"
81 1.3 ragge :
82 1.3 ragge : "r" (block), "r" (len)
83 1.3 ragge :"r0","r1","r2","r3","r4","r5");
84 1.15 ragge }
85 1.3 ragge }
86 1.3 ragge
87 1.3 ragge static __inline__ int bcmp(const void *b1, const void *b2, size_t len){
88 1.3 ragge register ret;
89 1.3 ragge
90 1.13 thorpej __asm__ __volatile("cmpc3 %3,(%1),(%2);movl r0,%0"
91 1.3 ragge : "=r" (ret)
92 1.3 ragge : "r" (b1), "r" (b2), "r" (len)
93 1.3 ragge : "r0","r1","r2","r3" );
94 1.3 ragge return ret;
95 1.3 ragge }
96 1.3 ragge
97 1.9 cgd #if 0 /* unused, but no point in deleting it since it _is_ an instruction */
98 1.3 ragge static __inline__ int locc(int mask, char *cp,u_int size){
99 1.3 ragge register ret;
100 1.3 ragge
101 1.13 thorpej __asm__ __volatile("locc %1,%2,(%3);movl r0,%0"
102 1.3 ragge : "=r" (ret)
103 1.3 ragge : "r" (mask),"r"(size),"r"(cp)
104 1.3 ragge : "r0","r1" );
105 1.3 ragge return ret;
106 1.3 ragge }
107 1.9 cgd #endif
108 1.3 ragge
109 1.10 ragge static __inline__ int
110 1.10 ragge scanc(u_int size, const u_char *cp, const u_char *table, int mask){
111 1.3 ragge register ret;
112 1.3 ragge
113 1.13 thorpej __asm__ __volatile("scanc %1,(%2),(%3),%4;movl r0,%0"
114 1.3 ragge : "=g"(ret)
115 1.3 ragge : "r"(size),"r"(cp),"r"(table),"r"(mask)
116 1.3 ragge : "r0","r1","r2","r3" );
117 1.3 ragge return ret;
118 1.3 ragge }
119 1.3 ragge
120 1.8 ragge static __inline__ int skpc(int mask, size_t size, u_char *cp){
121 1.3 ragge register ret;
122 1.3 ragge
123 1.13 thorpej __asm__ __volatile("skpc %1,%2,(%3);movl r0,%0"
124 1.3 ragge : "=g"(ret)
125 1.3 ragge : "r"(mask),"r"(size),"r"(cp)
126 1.3 ragge : "r0","r1" );
127 1.3 ragge return ret;
128 1.3 ragge }
129 1.3 ragge
130 1.15 ragge #define setrunqueue(p) \
131 1.13 thorpej __asm__ __volatile("movl %0,r0;jsb Setrq":: "g"(p):"r0","r1","r2");
132 1.12 ragge
133 1.15 ragge #define remrunqueue(p) \
134 1.13 thorpej __asm__ __volatile("movl %0,r0;jsb Remrq":: "g"(p):"r0","r1","r2");
135 1.12 ragge
136 1.15 ragge #define cpu_switch(p) \
137 1.13 thorpej __asm__ __volatile("movl %0,r0;movpsl -(sp);jsb Swtch" \
138 1.12 ragge ::"g"(p):"r0","r1","r2","r3");
139 1.3 ragge #endif /* _VAX_MACROS_H_ */
140