macros.h revision 1.14 1 1.14 ragge /* $NetBSD: macros.h,v 1.14 1998/01/18 22:06:02 ragge Exp $ */
2 1.2 cgd
3 1.1 ragge /*
4 1.1 ragge * Copyright (c) 1994 Ludd, University of Lule}, Sweden.
5 1.1 ragge * All rights reserved.
6 1.1 ragge *
7 1.1 ragge * Redistribution and use in source and binary forms, with or without
8 1.1 ragge * modification, are permitted provided that the following conditions
9 1.1 ragge * are met:
10 1.1 ragge * 1. Redistributions of source code must retain the above copyright
11 1.1 ragge * notice, this list of conditions and the following disclaimer.
12 1.1 ragge * 2. Redistributions in binary form must reproduce the above copyright
13 1.1 ragge * notice, this list of conditions and the following disclaimer in the
14 1.1 ragge * documentation and/or other materials provided with the distribution.
15 1.1 ragge * 3. All advertising materials mentioning features or use of this software
16 1.1 ragge * must display the following acknowledgement:
17 1.1 ragge * This product includes software developed at Ludd, University of Lule}.
18 1.1 ragge * 4. The name of the author may not be used to endorse or promote products
19 1.1 ragge * derived from this software without specific prior written permission
20 1.1 ragge *
21 1.1 ragge * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
22 1.1 ragge * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
23 1.1 ragge * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
24 1.1 ragge * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
25 1.1 ragge * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
26 1.1 ragge * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
27 1.1 ragge * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
28 1.1 ragge * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
29 1.1 ragge * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
30 1.1 ragge * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
31 1.1 ragge */
32 1.1 ragge
33 1.1 ragge /* All bugs are subject to removal without further notice */
34 1.3 ragge
35 1.11 ragge #if !defined(_VAX_MACROS_H_) && !defined(STANDALONE) && \
36 1.11 ragge (!defined(_LOCORE) && defined(_VAX_INLINE_))
37 1.3 ragge #define _VAX_MACROS_H_
38 1.1 ragge
39 1.1 ragge /* Here general macros are supposed to be stored */
40 1.1 ragge
41 1.3 ragge static __inline__ int ffs(int reg){
42 1.3 ragge register int val;
43 1.3 ragge
44 1.13 thorpej __asm__ __volatile ("ffs $0,$32,%1,%0
45 1.3 ragge bneq 1f
46 1.3 ragge mnegl $1,%0
47 1.3 ragge 1: incl %0"
48 1.3 ragge : "&=r" (val)
49 1.3 ragge : "r" (reg) );
50 1.3 ragge return val;
51 1.3 ragge }
52 1.3 ragge
53 1.3 ragge static __inline__ void _remque(void*p){
54 1.13 thorpej __asm__ __volatile ("remque (%0),%0;clrl 4(%0)"
55 1.3 ragge :
56 1.3 ragge : "r" (p)
57 1.3 ragge : "memory" );
58 1.3 ragge }
59 1.3 ragge
60 1.3 ragge static __inline__ void _insque(void*p, void*q) {
61 1.13 thorpej __asm__ __volatile ("insque (%0), (%1)"
62 1.3 ragge :
63 1.3 ragge : "r" (p),"r" (q)
64 1.3 ragge : "memory" );
65 1.3 ragge }
66 1.3 ragge
67 1.3 ragge static __inline__ void bcopy(const void*from, void*toe, u_int len) {
68 1.13 thorpej __asm__ __volatile ("movc3 %0,(%1),(%2)"
69 1.3 ragge :
70 1.3 ragge : "r" (len),"r" (from),"r"(toe)
71 1.3 ragge :"r0","r1","r2","r3","r4","r5");
72 1.3 ragge }
73 1.3 ragge
74 1.3 ragge static __inline__ void bzero(void*block, u_int len){
75 1.13 thorpej __asm__ __volatile ("movc5 $0,(%0),$0,%1,(%0)"
76 1.3 ragge :
77 1.3 ragge : "r" (block), "r" (len)
78 1.3 ragge :"r0","r1","r2","r3","r4","r5");
79 1.3 ragge }
80 1.3 ragge
81 1.3 ragge static __inline__ int bcmp(const void *b1, const void *b2, size_t len){
82 1.3 ragge register ret;
83 1.3 ragge
84 1.13 thorpej __asm__ __volatile("cmpc3 %3,(%1),(%2);movl r0,%0"
85 1.3 ragge : "=r" (ret)
86 1.3 ragge : "r" (b1), "r" (b2), "r" (len)
87 1.3 ragge : "r0","r1","r2","r3" );
88 1.3 ragge return ret;
89 1.3 ragge }
90 1.3 ragge
91 1.9 cgd #if 0 /* unused, but no point in deleting it since it _is_ an instruction */
92 1.3 ragge static __inline__ int locc(int mask, char *cp,u_int size){
93 1.3 ragge register ret;
94 1.3 ragge
95 1.13 thorpej __asm__ __volatile("locc %1,%2,(%3);movl r0,%0"
96 1.3 ragge : "=r" (ret)
97 1.3 ragge : "r" (mask),"r"(size),"r"(cp)
98 1.3 ragge : "r0","r1" );
99 1.3 ragge return ret;
100 1.3 ragge }
101 1.9 cgd #endif
102 1.3 ragge
103 1.10 ragge static __inline__ int
104 1.10 ragge scanc(u_int size, const u_char *cp, const u_char *table, int mask){
105 1.3 ragge register ret;
106 1.3 ragge
107 1.13 thorpej __asm__ __volatile("scanc %1,(%2),(%3),%4;movl r0,%0"
108 1.3 ragge : "=g"(ret)
109 1.3 ragge : "r"(size),"r"(cp),"r"(table),"r"(mask)
110 1.3 ragge : "r0","r1","r2","r3" );
111 1.3 ragge return ret;
112 1.3 ragge }
113 1.3 ragge
114 1.8 ragge static __inline__ int skpc(int mask, size_t size, u_char *cp){
115 1.3 ragge register ret;
116 1.3 ragge
117 1.13 thorpej __asm__ __volatile("skpc %1,%2,(%3);movl r0,%0"
118 1.3 ragge : "=g"(ret)
119 1.3 ragge : "r"(mask),"r"(size),"r"(cp)
120 1.3 ragge : "r0","r1" );
121 1.3 ragge return ret;
122 1.3 ragge }
123 1.3 ragge
124 1.12 ragge #define setrunqueue(p) \
125 1.13 thorpej __asm__ __volatile("movl %0,r0;jsb Setrq":: "g"(p):"r0","r1","r2");
126 1.12 ragge
127 1.12 ragge #define remrunqueue(p) \
128 1.13 thorpej __asm__ __volatile("movl %0,r0;jsb Remrq":: "g"(p):"r0","r1","r2");
129 1.12 ragge
130 1.12 ragge #define cpu_switch(p) \
131 1.13 thorpej __asm__ __volatile("movl %0,r0;movpsl -(sp);jsb Swtch" \
132 1.12 ragge ::"g"(p):"r0","r1","r2","r3");
133 1.3 ragge #endif /* _VAX_MACROS_H_ */
134