Home | History | Annotate | Line # | Download | only in include
macros.h revision 1.18
      1 /*	$NetBSD: macros.h,v 1.18 2000/04/09 16:37:09 ragge Exp $	*/
      2 
      3 /*
      4  * Copyright (c) 1994, 1998, 2000 Ludd, University of Lule}, Sweden.
      5  * All rights reserved.
      6  *
      7  * Redistribution and use in source and binary forms, with or without
      8  * modification, are permitted provided that the following conditions
      9  * are met:
     10  * 1. Redistributions of source code must retain the above copyright
     11  *    notice, this list of conditions and the following disclaimer.
     12  * 2. Redistributions in binary form must reproduce the above copyright
     13  *    notice, this list of conditions and the following disclaimer in the
     14  *    documentation and/or other materials provided with the distribution.
     15  * 3. All advertising materials mentioning features or use of this software
     16  *    must display the following acknowledgement:
     17  *     This product includes software developed at Ludd, University of Lule}.
     18  * 4. The name of the author may not be used to endorse or promote products
     19  *    derived from this software without specific prior written permission
     20  *
     21  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
     22  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
     23  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
     24  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
     25  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
     26  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     27  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     28  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     29  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
     30  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     31  */
     32 
     33  /* All bugs are subject to removal without further notice */
     34 
     35 #if !defined(_VAX_MACROS_H_) && !defined(lint)
     36 #define _VAX_MACROS_H_
     37 
     38 /* Here general macros are supposed to be stored */
     39 
     40 static __inline__ int
     41 ffs(int reg)
     42 {
     43 	register int val;
     44 
     45 	__asm__ __volatile ("ffs	$0,$32,%1,%0
     46 			bneq	1f
     47 			mnegl	$1,%0
     48 		1:	incl	%0"
     49 			: "&=r" (val)
     50 			: "r" (reg) );
     51 	return	val;
     52 }
     53 
     54 static __inline__ void
     55 _remque(void *p)
     56 {
     57 	__asm__ __volatile ("remque (%0),%0;clrl 4(%0)"
     58 			:
     59 			: "r" (p)
     60 			: "memory" );
     61 }
     62 
     63 static __inline__ void
     64 _insque(void *p, void *q)
     65 {
     66 	__asm__ __volatile ("insque (%0), (%1)"
     67 			:
     68 			: "r" (p),"r" (q)
     69 			: "memory" );
     70 }
     71 
     72 static __inline__ void *
     73 memcpy(void *toe, const void *from, u_int len)
     74 {
     75 	__asm__ __volatile ("movc3 %0,(%1),(%2)"
     76 			:
     77 			: "r" (len),"r" (from),"r"(toe)
     78 			:"r0","r1","r2","r3","r4","r5","memory","cc");
     79 	return toe;
     80 }
     81 static __inline__ void *
     82 memmove(void *toe, const void *from, u_int len)
     83 {
     84 	__asm__ __volatile ("movc3 %0,(%1),(%2)"
     85 			:
     86 			: "r" (len),"r" (from),"r"(toe)
     87 			:"r0","r1","r2","r3","r4","r5","memory","cc");
     88 	return toe;
     89 }
     90 
     91 static __inline__ void
     92 bcopy(const void *from, void *toe, u_int len)
     93 {
     94 	__asm__ __volatile ("movc3 %0,(%1),(%2)"
     95 			:
     96 			: "r" (len),"r" (from),"r"(toe)
     97 			:"r0","r1","r2","r3","r4","r5","memory","cc");
     98 }
     99 
    100 void	blkclr __P((void *, u_int));
    101 
    102 static __inline__ void *
    103 memset(void *block, int c, size_t len)
    104 {
    105 	if (len > 65535)
    106 		blkclr(block, len);
    107 	else {
    108 		__asm__ __volatile ("movc5 $0,(%0),%2,%1,(%0)"
    109 			:
    110 			: "r" (block), "r" (len), "r"(c)
    111 			:"r0","r1","r2","r3","r4","r5","memory","cc");
    112 	}
    113 	return block;
    114 }
    115 
    116 static __inline__ void
    117 bzero(void *block, u_int len)
    118 {
    119 	if (len > 65535)
    120 		blkclr(block, len);
    121 	else {
    122 		__asm__ __volatile ("movc5 $0,(%0),$0,%1,(%0)"
    123 			:
    124 			: "r" (block), "r" (len)
    125 			:"r0","r1","r2","r3","r4","r5","memory","cc");
    126 	}
    127 }
    128 
    129 /* XXX - the return syntax of memcmp is wrong */
    130 static __inline__ int
    131 memcmp(const void *b1, const void *b2, size_t len)
    132 {
    133 	register int ret;
    134 
    135 	__asm__ __volatile("cmpc3 %3,(%1),(%2);movl r0,%0"
    136 			: "=r" (ret)
    137 			: "r" (b1), "r" (b2), "r" (len)
    138 			: "r0","r1","r2","r3" );
    139 	return ret;
    140 }
    141 
    142 static __inline__ int
    143 bcmp(const void *b1, const void *b2, size_t len)
    144 {
    145 	register int ret;
    146 
    147 	__asm__ __volatile("cmpc3 %3,(%1),(%2);movl r0,%0"
    148 			: "=r" (ret)
    149 			: "r" (b1), "r" (b2), "r" (len)
    150 			: "r0","r1","r2","r3" );
    151 	return ret;
    152 }
    153 
    154 /* Begin nya */
    155 static __inline__ size_t
    156 strlen(const char *cp)
    157 {
    158         register size_t ret;
    159 
    160         __asm__ __volatile("locc $0,$65535,(%1);subl3 r0,$65535,%0"
    161                         : "=r" (ret)
    162                         : "r" (cp)
    163                         : "r0","r1","cc" );
    164         return  ret;
    165 }
    166 
    167 static __inline__ char *
    168 strcat(char *cp, const char *c2)
    169 {
    170         __asm__ __volatile("locc $0,$65535,(%1);subl3 r0,$65535,r2;incl r2;
    171                             locc $0,$65535,(%0);movc3 r2,(%1),(r1)"
    172                         :
    173                         : "r" (cp), "r" (c2)
    174                         : "r0","r1","r2","r3","r4","r5","memory","cc");
    175         return  cp;
    176 }
    177 
    178 static __inline__ char *
    179 strncat(char *cp, const char *c2, size_t count)
    180 {
    181         __asm__ __volatile("locc $0,%2,(%1);subl3 r0,%2,r2;
    182                             locc $0,$65535,(%0);movc3 r2,(%1),(r1);movb $0,(r3)"
    183                         :
    184                         : "r" (cp), "r" (c2), "g"(count)
    185                         : "r0","r1","r2","r3","r4","r5","memory","cc");
    186         return  cp;
    187 }
    188 
    189 static __inline__ char *
    190 strcpy(char *cp, const char *c2)
    191 {
    192         __asm__ __volatile("locc $0,$65535,(%1);subl3 r0,$65535,r2;
    193                             movc3 r2,(%1),(%0);movb $0,(r3)"
    194                         :
    195                         : "r" (cp), "r" (c2)
    196                         : "r0","r1","r2","r3","r4","r5","memory","cc");
    197         return  cp;
    198 }
    199 
    200 static __inline__ char *
    201 strncpy(char *cp, const char *c2, size_t len)
    202 {
    203         __asm__ __volatile("movl %2,r2;locc $0,r2,(%1);beql 1f;subl3 r0,%2,r2;
    204                             clrb (%0)[r2];1:;movc3 r2,(%1),(%0)"
    205                         :
    206                         : "r" (cp), "r" (c2), "g"(len)
    207                         : "r0","r1","r2","r3","r4","r5","memory","cc");
    208         return  cp;
    209 }
    210 
    211 static __inline__ void *
    212 memchr(const void *cp, int c, size_t len)
    213 {
    214         void *ret;
    215         __asm__ __volatile("locc %2,%3,(%1);bneq 1f;clrl r1;1:movl r1,%0"
    216                         : "=g"(ret)
    217                         : "r" (cp), "r" (c), "g"(len)
    218                         : "r0","r1","cc");
    219         return  ret;
    220 }
    221 
    222 static __inline__ int
    223 strcmp(const char *cp, const char *c2)
    224 {
    225         register int ret;
    226         __asm__ __volatile("locc $0,$65535,(%1);subl3 r0,$65535,r0;incl r0;
    227                             cmpc3 r0,(%1),(%2);beql 1f;movl $1,r2;
    228                             cmpb (r1),(r3);bcc 1f;movl $-1,r2;1:movl r2,%0"
    229                         : "=g"(ret)
    230                         : "r" (cp), "r" (c2)
    231                         : "r0","r1","r2","r3","cc");
    232         return  ret;
    233 }
    234 /* End nya */
    235 
    236 #if 0 /* unused, but no point in deleting it since it _is_ an instruction */
    237 static __inline__ int locc(int mask, char *cp,u_int size){
    238 	register ret;
    239 
    240 	__asm__ __volatile("locc %1,%2,(%3);movl r0,%0"
    241 			: "=r" (ret)
    242 			: "r" (mask),"r"(size),"r"(cp)
    243 			: "r0","r1" );
    244 	return	ret;
    245 }
    246 #endif
    247 
    248 static __inline__ int
    249 scanc(u_int size, const u_char *cp, const u_char *table, int mask)
    250 {
    251 	register int ret;
    252 
    253 	__asm__ __volatile("scanc	%1,(%2),(%3),%4;movl r0,%0"
    254 			: "=g"(ret)
    255 			: "r"(size),"r"(cp),"r"(table),"r"(mask)
    256 			: "r0","r1","r2","r3" );
    257 	return ret;
    258 }
    259 
    260 static __inline__ int
    261 skpc(int mask, size_t size, u_char *cp)
    262 {
    263 	register int ret;
    264 
    265 	__asm__ __volatile("skpc %1,%2,(%3);movl r0,%0"
    266 			: "=g"(ret)
    267 			: "r"(mask),"r"(size),"r"(cp)
    268 			: "r0","r1" );
    269 	return	ret;
    270 }
    271 
    272 #define setrunqueue(p)	\
    273 	__asm__ __volatile("movl %0,r0;jsb Setrq":: "g"(p):"r0","r1","r2");
    274 
    275 #define remrunqueue(p)	\
    276 	__asm__ __volatile("movl %0,r0;jsb Remrq":: "g"(p):"r0","r1","r2");
    277 
    278 #define cpu_switch(p) \
    279 	__asm__ __volatile("movl %0,r0;movpsl -(sp);jsb Swtch" \
    280 	    ::"g"(p):"r0","r1","r2","r3");
    281 
    282 /*
    283  * Interlock instructions. Used both in multiprocessor environments to
    284  * lock between CPUs and in uniprocessor systems when locking is required
    285  * between I/O devices and the master CPU.
    286  */
    287 /*
    288  * Insqti() locks and inserts an element into the end of a queue.
    289  * Returns -1 if interlock failed, 1 if inserted OK and 0 if first in queue.
    290  */
    291 static __inline__ int
    292 insqti(void *entry, void *header) {
    293 	register int ret;
    294 
    295 	__asm__ __volatile("
    296 			mnegl $1,%0;
    297 			insqti (%1),(%2);
    298 			bcs 1f;			# failed insert
    299 			beql 2f;		# jump if first entry
    300 			movl $1,%0;
    301 			brb 1f;
    302 		2:	clrl %0;
    303 			1:;"
    304 			: "&=g"(ret)
    305 			: "r"(entry), "r"(header)
    306 			: "memory");
    307 
    308 	return ret;
    309 }
    310 
    311 /*
    312  * Remqhi() removes an element from the head of the queue.
    313  * Returns -1 if interlock failed, 0 if queue empty, address of the
    314  * removed element otherwise.
    315  */
    316 static __inline__ void *
    317 remqhi(void *header) {
    318 	register void *ret;
    319 
    320 	__asm__ __volatile("
    321 			remqhi (%1),%0;
    322 			bcs 1f;			# failed interlock
    323 			bvs 2f;			# nothing was removed
    324 			brb 3f;
    325 		1:	mnegl $1,%0;
    326 			brb 3f;
    327 		2:	clrl %0;
    328 			3:;"
    329 			: "&=g"(ret)
    330 			: "r"(header)
    331 			: "memory");
    332 
    333 	return ret;
    334 }
    335 #define	ILCK_FAILED	-1	/* Interlock failed */
    336 #define	Q_EMPTY		0	/* Queue is/was empty */
    337 #define	Q_OK		1	/* Inserted OK */
    338 
    339 #endif	/* _VAX_MACROS_H_ */
    340