Home | History | Annotate | Line # | Download | only in include
macros.h revision 1.15
      1 /*	$NetBSD: macros.h,v 1.15 1998/03/02 17:00:01 ragge Exp $	*/
      2 
      3 /*
      4  * Copyright (c) 1994 Ludd, University of Lule}, Sweden.
      5  * All rights reserved.
      6  *
      7  * Redistribution and use in source and binary forms, with or without
      8  * modification, are permitted provided that the following conditions
      9  * are met:
     10  * 1. Redistributions of source code must retain the above copyright
     11  *    notice, this list of conditions and the following disclaimer.
     12  * 2. Redistributions in binary form must reproduce the above copyright
     13  *    notice, this list of conditions and the following disclaimer in the
     14  *    documentation and/or other materials provided with the distribution.
     15  * 3. All advertising materials mentioning features or use of this software
     16  *    must display the following acknowledgement:
     17  *     This product includes software developed at Ludd, University of Lule}.
     18  * 4. The name of the author may not be used to endorse or promote products
     19  *    derived from this software without specific prior written permission
     20  *
     21  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
     22  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
     23  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
     24  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
     25  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
     26  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     27  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     28  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     29  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
     30  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     31  */
     32 
     33  /* All bugs are subject to removal without further notice */
     34 
     35 #if !defined(_VAX_MACROS_H_) && !defined(STANDALONE) && \
     36 	(!defined(_LOCORE) && defined(_VAX_INLINE_))
     37 #define _VAX_MACROS_H_
     38 
     39 /* Here general macros are supposed to be stored */
     40 
     41 static __inline__ int ffs(int reg){
     42 	register int val;
     43 
     44 	__asm__ __volatile ("ffs	$0,$32,%1,%0
     45 			bneq	1f
     46 			mnegl	$1,%0
     47 		1:	incl	%0"
     48 			: "&=r" (val)
     49 			: "r" (reg) );
     50 	return	val;
     51 }
     52 
     53 static __inline__ void _remque(void*p){
     54 	__asm__ __volatile ("remque (%0),%0;clrl 4(%0)"
     55 			:
     56 			: "r" (p)
     57 			: "memory" );
     58 }
     59 
     60 static __inline__ void _insque(void*p, void*q) {
     61 	__asm__ __volatile ("insque (%0), (%1)"
     62 			:
     63 			: "r" (p),"r" (q)
     64 			: "memory" );
     65 }
     66 
     67 static __inline__ void bcopy(const void*from, void*toe, u_int len) {
     68 	__asm__ __volatile ("movc3 %0,(%1),(%2)"
     69 			:
     70 			: "r" (len),"r" (from),"r"(toe)
     71 			:"r0","r1","r2","r3","r4","r5");
     72 }
     73 
     74 void	blkclr __P((void *, u_int));
     75 
     76 static __inline__ void bzero(void*block, u_int len){
     77 	if (len > 65535)
     78 		blkclr(block, len);
     79 	else {
     80 		__asm__ __volatile ("movc5 $0,(%0),$0,%1,(%0)"
     81 			:
     82 			: "r" (block), "r" (len)
     83 			:"r0","r1","r2","r3","r4","r5");
     84 	}
     85 }
     86 
     87 static __inline__ int bcmp(const void *b1, const void *b2, size_t len){
     88 	register ret;
     89 
     90 	__asm__ __volatile("cmpc3 %3,(%1),(%2);movl r0,%0"
     91 			: "=r" (ret)
     92 			: "r" (b1), "r" (b2), "r" (len)
     93 			: "r0","r1","r2","r3" );
     94 	return ret;
     95 }
     96 
     97 #if 0 /* unused, but no point in deleting it since it _is_ an instruction */
     98 static __inline__ int locc(int mask, char *cp,u_int size){
     99 	register ret;
    100 
    101 	__asm__ __volatile("locc %1,%2,(%3);movl r0,%0"
    102 			: "=r" (ret)
    103 			: "r" (mask),"r"(size),"r"(cp)
    104 			: "r0","r1" );
    105 	return	ret;
    106 }
    107 #endif
    108 
    109 static __inline__ int
    110 scanc(u_int size, const u_char *cp, const u_char *table, int mask){
    111 	register ret;
    112 
    113 	__asm__ __volatile("scanc	%1,(%2),(%3),%4;movl r0,%0"
    114 			: "=g"(ret)
    115 			: "r"(size),"r"(cp),"r"(table),"r"(mask)
    116 			: "r0","r1","r2","r3" );
    117 	return ret;
    118 }
    119 
    120 static __inline__ int skpc(int mask, size_t size, u_char *cp){
    121 	register ret;
    122 
    123 	__asm__ __volatile("skpc %1,%2,(%3);movl r0,%0"
    124 			: "=g"(ret)
    125 			: "r"(mask),"r"(size),"r"(cp)
    126 			: "r0","r1" );
    127 	return	ret;
    128 }
    129 
    130 #define setrunqueue(p)	\
    131 	__asm__ __volatile("movl %0,r0;jsb Setrq":: "g"(p):"r0","r1","r2");
    132 
    133 #define remrunqueue(p)	\
    134 	__asm__ __volatile("movl %0,r0;jsb Remrq":: "g"(p):"r0","r1","r2");
    135 
    136 #define cpu_switch(p) \
    137 	__asm__ __volatile("movl %0,r0;movpsl -(sp);jsb Swtch" \
    138 	    ::"g"(p):"r0","r1","r2","r3");
    139 #endif	/* _VAX_MACROS_H_ */
    140