Home | History | Annotate | Line # | Download | only in include
macros.h revision 1.14
      1 /*	$NetBSD: macros.h,v 1.14 1998/01/18 22:06:02 ragge Exp $	*/
      2 
      3 /*
      4  * Copyright (c) 1994 Ludd, University of Lule}, Sweden.
      5  * All rights reserved.
      6  *
      7  * Redistribution and use in source and binary forms, with or without
      8  * modification, are permitted provided that the following conditions
      9  * are met:
     10  * 1. Redistributions of source code must retain the above copyright
     11  *    notice, this list of conditions and the following disclaimer.
     12  * 2. Redistributions in binary form must reproduce the above copyright
     13  *    notice, this list of conditions and the following disclaimer in the
     14  *    documentation and/or other materials provided with the distribution.
     15  * 3. All advertising materials mentioning features or use of this software
     16  *    must display the following acknowledgement:
     17  *     This product includes software developed at Ludd, University of Lule}.
     18  * 4. The name of the author may not be used to endorse or promote products
     19  *    derived from this software without specific prior written permission
     20  *
     21  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
     22  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
     23  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
     24  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
     25  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
     26  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     27  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     28  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     29  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
     30  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     31  */
     32 
     33  /* All bugs are subject to removal without further notice */
     34 
     35 #if !defined(_VAX_MACROS_H_) && !defined(STANDALONE) && \
     36 	(!defined(_LOCORE) && defined(_VAX_INLINE_))
     37 #define	_VAX_MACROS_H_
     38 
     39 /* Here general macros are supposed to be stored */
     40 
     41 static __inline__ int ffs(int reg){
     42 	register int val;
     43 
     44 	__asm__ __volatile ("ffs	$0,$32,%1,%0
     45 			bneq	1f
     46 			mnegl	$1,%0
     47 		1:	incl    %0"
     48 			: "&=r" (val)
     49 			: "r" (reg) );
     50 	return	val;
     51 }
     52 
     53 static __inline__ void _remque(void*p){
     54 	__asm__ __volatile ("remque (%0),%0;clrl 4(%0)"
     55 			:
     56 			: "r" (p)
     57 			: "memory" );
     58 }
     59 
     60 static __inline__ void _insque(void*p, void*q) {
     61         __asm__ __volatile ("insque (%0), (%1)"
     62                         :
     63                         : "r" (p),"r" (q)
     64                         : "memory" );
     65 }
     66 
     67 static __inline__ void bcopy(const void*from, void*toe, u_int len) {
     68 	__asm__ __volatile ("movc3 %0,(%1),(%2)"
     69 			:
     70 			: "r" (len),"r" (from),"r"(toe)
     71 			:"r0","r1","r2","r3","r4","r5");
     72 }
     73 
     74 static __inline__ void bzero(void*block, u_int len){
     75 	__asm__ __volatile ("movc5 $0,(%0),$0,%1,(%0)"
     76 			:
     77 			: "r" (block), "r" (len)
     78 			:"r0","r1","r2","r3","r4","r5");
     79 }
     80 
     81 static __inline__ int bcmp(const void *b1, const void *b2, size_t len){
     82 	register ret;
     83 
     84 	__asm__ __volatile("cmpc3 %3,(%1),(%2);movl r0,%0"
     85 			: "=r" (ret)
     86 			: "r" (b1), "r" (b2), "r" (len)
     87 			: "r0","r1","r2","r3" );
     88 	return ret;
     89 }
     90 
     91 #if 0 /* unused, but no point in deleting it since it _is_ an instruction */
     92 static __inline__ int locc(int mask, char *cp,u_int size){
     93 	register ret;
     94 
     95 	__asm__ __volatile("locc %1,%2,(%3);movl r0,%0"
     96 			: "=r" (ret)
     97 			: "r" (mask),"r"(size),"r"(cp)
     98 			: "r0","r1" );
     99 	return	ret;
    100 }
    101 #endif
    102 
    103 static __inline__ int
    104 scanc(u_int size, const u_char *cp, const u_char *table, int mask){
    105 	register ret;
    106 
    107 	__asm__ __volatile("scanc	%1,(%2),(%3),%4;movl r0,%0"
    108 			: "=g"(ret)
    109 			: "r"(size),"r"(cp),"r"(table),"r"(mask)
    110 			: "r0","r1","r2","r3" );
    111 	return ret;
    112 }
    113 
    114 static __inline__ int skpc(int mask, size_t size, u_char *cp){
    115 	register ret;
    116 
    117 	__asm__ __volatile("skpc %1,%2,(%3);movl r0,%0"
    118 			: "=g"(ret)
    119 			: "r"(mask),"r"(size),"r"(cp)
    120 			: "r0","r1" );
    121 	return	ret;
    122 }
    123 
    124 #define	setrunqueue(p)	\
    125 	__asm__ __volatile("movl %0,r0;jsb Setrq":: "g"(p):"r0","r1","r2");
    126 
    127 #define	remrunqueue(p)	\
    128 	__asm__ __volatile("movl %0,r0;jsb Remrq":: "g"(p):"r0","r1","r2");
    129 
    130 #define	cpu_switch(p) \
    131 	__asm__ __volatile("movl %0,r0;movpsl -(sp);jsb Swtch" \
    132 	    ::"g"(p):"r0","r1","r2","r3");
    133 #endif	/* _VAX_MACROS_H_ */
    134