Home | History | Annotate | Line # | Download | only in include
macros.h revision 1.37
      1 /*	$NetBSD: macros.h,v 1.37 2006/07/08 00:25:24 matt Exp $	*/
      2 
      3 /*
      4  * Copyright (c) 1994, 1998, 2000 Ludd, University of Lule}, Sweden.
      5  * All rights reserved.
      6  *
      7  * Redistribution and use in source and binary forms, with or without
      8  * modification, are permitted provided that the following conditions
      9  * are met:
     10  * 1. Redistributions of source code must retain the above copyright
     11  *    notice, this list of conditions and the following disclaimer.
     12  * 2. Redistributions in binary form must reproduce the above copyright
     13  *    notice, this list of conditions and the following disclaimer in the
     14  *    documentation and/or other materials provided with the distribution.
     15  * 3. All advertising materials mentioning features or use of this software
     16  *    must display the following acknowledgement:
     17  *     This product includes software developed at Ludd, University of Lule}.
     18  * 4. The name of the author may not be used to endorse or promote products
     19  *    derived from this software without specific prior written permission
     20  *
     21  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
     22  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
     23  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
     24  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
     25  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
     26  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     27  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     28  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     29  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
     30  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     31  */
     32 
     33  /* All bugs are subject to removal without further notice */
     34 
     35 #if !defined(_VAX_MACROS_H_) && !defined(__lint__)
     36 #define _VAX_MACROS_H_
     37 
     38 void	__blkset(void *, int, size_t);
     39 void	__blkcpy(const void *, void *, size_t);
     40 
     41 /* Here general macros are supposed to be stored */
     42 
     43 static __inline int __attribute__((__unused__))
     44 vax_ffs(int reg)
     45 {
     46 	register int val;
     47 
     48 	__asm volatile ("ffs $0,$32,%1,%0;"
     49 			    "bneq 1f;"
     50 			    "mnegl $1,%0;"
     51 			    "1:;"
     52 			    "incl %0"
     53 			: "=&r" (val)
     54 			: "r" (reg) );
     55 	return	val;
     56 }
     57 #define ffs vax_ffs
     58 
     59 static __inline void __attribute__((__unused__))
     60 vax_remque(void *p)
     61 {
     62 	__asm volatile ("remque (%0),%0;clrl 4(%0)"
     63 			:
     64 			: "r" (p)
     65 			: "memory" );
     66 }
     67 #define _remque vax_remque
     68 
     69 static __inline void  __attribute__((__unused__))
     70 vax_insque(void *p, void *q)
     71 {
     72 	__asm volatile ("insque (%0),(%1)"
     73 			:
     74 			: "r" (p),"r" (q)
     75 			: "memory" );
     76 }
     77 #define _insque vax_insque
     78 
     79 #if 0
     80 static __inline void * __attribute__((__unused__))
     81 vax_memcpy(void *to, const void *from, size_t len)
     82 {
     83 	if (len > 65535) {
     84 		__blkcpy(from, to, len);
     85 	} else {
     86 		__asm volatile ("movc3 %1,(%2),%0"
     87 			: "=m" (*(char *)to)
     88 			: "g" (len), "r" (*(const char *)from)
     89 			:"r0","r1","r2","r3","r4","r5","memory","cc");
     90 	}
     91 	return to;
     92 }
     93 #define memcpy vax_memcpy
     94 
     95 static __inline void * __attribute__((__unused__))
     96 vax_memmove(void *to, const void *from, size_t len)
     97 {
     98 	if (len > 65535) {
     99 		__blkcpy(from, to, len);
    100 	} else {
    101 		__asm volatile ("movc3 %1,%2,%0"
    102 			: "=m" (*(char *)to)
    103 			: "g" (len), "mo" (*(const char *)from)
    104 			:"r0","r1","r2","r3","r4","r5","memory","cc");
    105 	}
    106 	return to;
    107 }
    108 #define memmove vax_memmove
    109 #endif
    110 
    111 static __inline void * __attribute__((__unused__))
    112 vax_memset(void *block, int c, size_t len)
    113 {
    114 	if (len > 65535) {
    115 		__blkset(block, c, len);
    116 	} else {
    117 		__asm volatile ("movc5 $0,(%%sp),%2,%1,%0"
    118 			: "=m" (*(char *)block)
    119 			:  "g" (len), "g" (c)
    120 			:"r0","r1","r2","r3","r4","r5","memory","cc");
    121 	}
    122 	return block;
    123 }
    124 #define memset vax_memset
    125 
    126 #ifdef notdef
    127 /* XXX - the return syntax of memcmp is wrong */
    128 static __inline int __attribute__((__unused__))
    129 memcmp(const void *b1, const void *b2, size_t len)
    130 {
    131 	register int ret;
    132 
    133 	__asm volatile("cmpc3 %3,(%1),(%2);"
    134 			   "movl %%r0,%0"
    135 			: "=r" (ret)
    136 			: "r" (b1), "r" (b2), "r" (len)
    137 			: "r0","r1","r2","r3" );
    138 	return ret;
    139 }
    140 
    141 static __inline int __attribute__((__unused__))
    142 bcmp(const void *b1, const void *b2, size_t len)
    143 {
    144 	register int ret;
    145 
    146 	__asm volatile("cmpc3 %3,(%1),(%2);"
    147 			   "movl %%r0,%0"
    148 			: "=r" (ret)
    149 			: "r" (b1), "r" (b2), "r" (len)
    150 			: "r0","r1","r2","r3" );
    151 	return ret;
    152 }
    153 
    154 /* Begin nya */
    155 static __inline size_t __attribute__((__unused__))
    156 strlen(const char *cp)
    157 {
    158         register size_t ret;
    159 
    160         __asm volatile("locc $0,$65535,(%1);"
    161 			   "subl3 %%r0,$65535,%0"
    162                         : "=r" (ret)
    163                         : "r" (cp)
    164                         : "r0","r1","cc" );
    165         return  ret;
    166 }
    167 
    168 static __inline char * __attribute__((__unused__))
    169 strcat(char *cp, const char *c2)
    170 {
    171         __asm volatile("locc $0,$65535,(%1);"
    172 			   "subl3 %%r0,$65535,%%r2;"
    173 			   "incl %%r2;"
    174                            "locc $0,$65535,(%0);"
    175 			   "movc3 %%r2,(%1),(%%r1)"
    176                         :
    177                         : "r" (cp), "r" (c2)
    178                         : "r0","r1","r2","r3","r4","r5","memory","cc");
    179         return  cp;
    180 }
    181 
    182 static __inline char * __attribute__((__unused__))
    183 strncat(char *cp, const char *c2, size_t count)
    184 {
    185         __asm volatile("locc $0,%2,(%1);"
    186 			   "subl3 %%r0,%2,%%r2;"
    187                            "locc $0,$65535,(%0);"
    188 			   "movc3 %%r2,(%1),(%%r1);"
    189 			   "movb $0,(%%r3)"
    190                         :
    191                         : "r" (cp), "r" (c2), "g"(count)
    192                         : "r0","r1","r2","r3","r4","r5","memory","cc");
    193         return  cp;
    194 }
    195 
    196 static __inline char * __attribute__((__unused__))
    197 strcpy(char *cp, const char *c2)
    198 {
    199         __asm volatile("locc $0,$65535,(%1);"
    200 			   "subl3 %%r0,$65535,%%r2;"
    201                            "movc3 %%r2,(%1),(%0);"
    202 			   "movb $0,(%%r3)"
    203                         :
    204                         : "r" (cp), "r" (c2)
    205                         : "r0","r1","r2","r3","r4","r5","memory","cc");
    206         return  cp;
    207 }
    208 
    209 static __inline char * __attribute__((__unused__))
    210 strncpy(char *cp, const char *c2, size_t len)
    211 {
    212         __asm volatile("movl %2,%%r2;"
    213 			   "locc $0,%%r2,(%1);"
    214 			   "beql 1f;"
    215 			   "subl3 %%r0,%2,%%r2;"
    216                            "clrb (%0)[%%r2];"
    217 			   "1:;"
    218 			   "movc3 %%r2,(%1),(%0)"
    219                         :
    220                         : "r" (cp), "r" (c2), "g"(len)
    221                         : "r0","r1","r2","r3","r4","r5","memory","cc");
    222         return  cp;
    223 }
    224 
    225 static __inline void * __attribute__((__unused__))
    226 memchr(const void *cp, int c, size_t len)
    227 {
    228         void *ret;
    229         __asm volatile("locc %2,%3,(%1);"
    230 			   "bneq 1f;"
    231 			   "clrl %%r1;"
    232 			   "1:;"
    233 			   "movl %%r1,%0"
    234                         : "=g"(ret)
    235                         : "r" (cp), "r" (c), "g"(len)
    236                         : "r0","r1","cc");
    237         return  ret;
    238 }
    239 
    240 static __inline int __attribute__((__unused__))
    241 strcmp(const char *cp, const char *c2)
    242 {
    243         register int ret;
    244         __asm volatile("locc $0,$65535,(%1);"
    245 			   "subl3 %%r0,$65535,%%r0;"
    246 			   "incl %%r0;"
    247                            "cmpc3 %%r0,(%1),(%2);"
    248 			   "beql 1f;"
    249 			   "movl $1,%%r2;"
    250                            "cmpb (%%r1),(%%r3);"
    251 			   "bcc 1f;"
    252 			   "mnegl $1,%%r2;"
    253 			   "1:;"
    254 			   "movl %%r2,%0"
    255                         : "=g"(ret)
    256                         : "r" (cp), "r" (c2)
    257                         : "r0","r1","r2","r3","cc");
    258         return  ret;
    259 }
    260 #endif
    261 
    262 #if 0 /* unused, but no point in deleting it since it _is_ an instruction */
    263 static __inline int __attribute__((__unused__))
    264 locc(int mask, char *cp, size_t size){
    265 	register ret;
    266 
    267 	__asm volatile("locc %1,%2,(%3);"
    268 			   "movl %%r0,%0"
    269 			: "=r" (ret)
    270 			: "r" (mask),"r"(size),"r"(cp)
    271 			: "r0","r1" );
    272 	return	ret;
    273 }
    274 #endif
    275 
    276 static __inline int __attribute__((__unused__))
    277 vax_scanc(u_int size, const u_char *cp, const u_char *table, int mask)
    278 {
    279 	register int ret;
    280 
    281 	__asm volatile("scanc %1,(%2),(%3),%4;"
    282 			   "movl %%r0,%0"
    283 			: "=g"(ret)
    284 			: "r"(size),"r"(cp),"r"(table),"r"(mask)
    285 			: "r0","r1","r2","r3" );
    286 	return ret;
    287 }
    288 #define scanc vax_scanc
    289 
    290 static __inline int __attribute__((__unused__))
    291 vax_skpc(int mask, size_t size, u_char *cp)
    292 {
    293 	register int ret;
    294 
    295 	__asm volatile("skpc %1,%2,(%3);"
    296 			   "movl %%r0,%0"
    297 			: "=g"(ret)
    298 			: "r"(mask),"r"(size),"r"(cp)
    299 			: "r0","r1" );
    300 	return	ret;
    301 }
    302 #define skpc vax_skpc
    303 
    304 /*
    305  * Set/clear a bit at a memory position; interlocked.
    306  * Return 0 if already set, 1 otherwise.
    307  */
    308 static __inline int __attribute__((__unused__))
    309 bbssi(int bitnr, long *addr)
    310 {
    311 	register int ret;
    312 
    313 	__asm volatile("clrl %%r0;"
    314 			   "bbssi %1,%2,1f;"
    315 			   "incl %%r0;"
    316 			   "1:;"
    317 			   "movl %%r0,%0"
    318 		: "=&r"(ret)
    319 		: "g"(bitnr),"m"(*addr)
    320 		: "r0","cc","memory");
    321 	return ret;
    322 }
    323 
    324 static __inline int __attribute__((__unused__))
    325 bbcci(int bitnr, long *addr)
    326 {
    327 	register int ret;
    328 
    329 	__asm volatile("clrl %%r0;"
    330 			   "bbcci %1,%2,1f;"
    331 			   "incl %%r0;"
    332 			   "1:;"
    333 			   "movl %%r0,%0"
    334 		: "=&r"(ret)
    335 		: "g"(bitnr),"m"(*addr)
    336 		: "r0","cc","memory");
    337 	return ret;
    338 }
    339 
    340 #define setrunqueue(p)	\
    341 	__asm volatile("movl %0,%%r0;jsb Setrq" :: "g"(p):"r0","r1","r2")
    342 
    343 #define remrunqueue(p)	\
    344 	__asm volatile("movl %0,%%r0;jsb Remrq" :: "g"(p):"r0","r1","r2")
    345 
    346 #define cpu_switch(p, newp) ({ 						\
    347 	register int ret;						\
    348 	__asm volatile("movpsl -(%%sp);jsb Swtch; movl %%r0,%0"	\
    349 	    : "=g"(ret) ::"r0","r1","r2","r3","r4","r5");		\
    350 	ret; })
    351 
    352 #define	cpu_switchto(p, newp)						\
    353 	__asm volatile("movpsl -(%%sp); movl %0,%%r2; jsb Swtchto"	\
    354 	    :: "g" (newp) : "r0", "r1", "r2", "r3", "r4", "r5")
    355 
    356 /*
    357  * Interlock instructions. Used both in multiprocessor environments to
    358  * lock between CPUs and in uniprocessor systems when locking is required
    359  * between I/O devices and the master CPU.
    360  */
    361 /*
    362  * Insqti() locks and inserts an element into the end of a queue.
    363  * Returns -1 if interlock failed, 1 if inserted OK and 0 if first in queue.
    364  */
    365 static __inline int __attribute__((__unused__))
    366 insqti(void *entry, void *header) {
    367 	register int ret;
    368 
    369 	__asm volatile(
    370 		"	mnegl $1,%0;"
    371 		"	insqti (%1),(%2);"
    372 		"	bcs 1f;"		/* failed insert */
    373 		"	beql 2f;"		/* jump if first entry */
    374 		"	movl $1,%0;"
    375 		"	brb 1f;"
    376 		"2:	clrl %0;"
    377 		"	1:;"
    378 			: "=&g"(ret)
    379 			: "r"(entry), "r"(header)
    380 			: "memory");
    381 
    382 	return ret;
    383 }
    384 
    385 /*
    386  * Remqhi() removes an element from the head of the queue.
    387  * Returns -1 if interlock failed, 0 if queue empty, address of the
    388  * removed element otherwise.
    389  */
    390 static __inline void * __attribute__((__unused__))
    391 remqhi(void *header) {
    392 	register void *ret;
    393 
    394 	__asm volatile(
    395 		"	remqhi (%1),%0;"
    396 		"	bcs 1f;"		/* failed interlock */
    397 		"	bvs 2f;"		/* nothing was removed */
    398 		"	brb 3f;"
    399 		"1:	mnegl $1,%0;"
    400 		"	brb 3f;"
    401 		"2:	clrl %0;"
    402 		"	3:;"
    403 			: "=&g"(ret)
    404 			: "r"(header)
    405 			: "memory");
    406 
    407 	return ret;
    408 }
    409 #define	ILCK_FAILED	-1	/* Interlock failed */
    410 #define	Q_EMPTY		0	/* Queue is/was empty */
    411 #define	Q_OK		1	/* Inserted OK */
    412 
    413 #endif	/* !_VAX_MACROS_H_ && !__lint__ */
    414