Home | History | Annotate | Line # | Download | only in include
macros.h revision 1.43
      1 /*	$NetBSD: macros.h,v 1.43 2007/11/20 13:37:37 yamt Exp $	*/
      2 
      3 /*
      4  * Copyright (c) 1994, 1998, 2000 Ludd, University of Lule}, Sweden.
      5  * All rights reserved.
      6  *
      7  * Redistribution and use in source and binary forms, with or without
      8  * modification, are permitted provided that the following conditions
      9  * are met:
     10  * 1. Redistributions of source code must retain the above copyright
     11  *    notice, this list of conditions and the following disclaimer.
     12  * 2. Redistributions in binary form must reproduce the above copyright
     13  *    notice, this list of conditions and the following disclaimer in the
     14  *    documentation and/or other materials provided with the distribution.
     15  * 3. All advertising materials mentioning features or use of this software
     16  *    must display the following acknowledgement:
     17  *     This product includes software developed at Ludd, University of Lule}.
     18  * 4. The name of the author may not be used to endorse or promote products
     19  *    derived from this software without specific prior written permission
     20  *
     21  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
     22  * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
     23  * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
     24  * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
     25  * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
     26  * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
     27  * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
     28  * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
     29  * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
     30  * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     31  */
     32 
     33  /* All bugs are subject to removal without further notice */
     34 
     35 #if !defined(_VAX_MACROS_H_) && !defined(__lint__)
     36 #define _VAX_MACROS_H_
     37 
     38 void	__blkset(void *, int, size_t);
     39 void	__blkcpy(const void *, void *, size_t);
     40 
     41 #if !__GNUC_PREREQ__(4, 1)
     42 /* Here general macros are supposed to be stored */
     43 
     44 static __inline int __attribute__((__unused__))
     45 vax_ffs(int reg)
     46 {
     47 	register int val;
     48 
     49 	__asm volatile ("ffs $0,$32,%1,%0;"
     50 			    "bneq 1f;"
     51 			    "mnegl $1,%0;"
     52 			    "1:;"
     53 			    "incl %0"
     54 			: "=&r" (val)
     55 			: "r" (reg) );
     56 	return	val;
     57 }
     58 #define ffs vax_ffs
     59 #endif
     60 
     61 static __inline void __attribute__((__unused__))
     62 vax_remque(void *p)
     63 {
     64 	__asm volatile ("remque (%0),%0;clrl 4(%0)"
     65 			:
     66 			: "r" (p)
     67 			: "memory" );
     68 }
     69 #define _remque vax_remque
     70 
     71 static __inline void  __attribute__((__unused__))
     72 vax_insque(void *p, void *q)
     73 {
     74 	__asm volatile ("insque (%0),(%1)"
     75 			:
     76 			: "r" (p),"r" (q)
     77 			: "memory" );
     78 }
     79 #define _insque vax_insque
     80 
     81 #if 0
     82 static __inline void *__attribute__((__unused__))
     83 vax_memcpy(void *to, const void *from, size_t len)
     84 {
     85 	if (len > 65535) {
     86 		__blkcpy(from, to, len);
     87 	} else {
     88 		__asm volatile ("movc3 %1,(%2),%0"
     89 			: "=m" (*(char *)to)
     90 			: "g" (len), "r" (*(const char *)from)
     91 			:"r0","r1","r2","r3","r4","r5","memory","cc");
     92 	}
     93 	return to;
     94 }
     95 #define memcpy vax_memcpy
     96 
     97 static __inline void *__attribute__((__unused__))
     98 vax_memmove(void *to, const void *from, size_t len)
     99 {
    100 	if (len > 65535) {
    101 		__blkcpy(from, to, len);
    102 	} else {
    103 		__asm __volatile ("movc3 %1,%2,%0"
    104 			: "=m" (*(char *)to)
    105 			: "g" (len), "mo" (*(const char *)from)
    106 			:"r0","r1","r2","r3","r4","r5","memory","cc");
    107 	}
    108 	return to;
    109 }
    110 #define memmove vax_memmove
    111 #endif
    112 
    113 static __inline void *__attribute__((__unused__))
    114 vax_memset(void *block, int c, size_t len)
    115 {
    116 	if (len > 65535) {
    117 		__blkset(block, c, len);
    118 	} else {
    119 		__asm __volatile ("movc5 $0,(%%sp),%2,%1,%0"
    120 			: "=m" (*(char *)block)
    121 			:  "g" (len), "g" (c)
    122 			:"r0","r1","r2","r3","r4","r5","memory","cc");
    123 	}
    124 	return block;
    125 }
    126 #define memset vax_memset
    127 
    128 #ifdef notdef
    129 /* XXX - the return syntax of memcmp is wrong */
    130 static __inline int __attribute__((__unused__))
    131 memcmp(const void *b1, const void *b2, size_t len)
    132 {
    133 	register int ret;
    134 
    135 	__asm volatile("cmpc3 %3,(%1),(%2);"
    136 			   "movl %%r0,%0"
    137 			: "=r" (ret)
    138 			: "r" (b1), "r" (b2), "r" (len)
    139 			: "r0","r1","r2","r3" );
    140 	return ret;
    141 }
    142 
    143 static __inline int __attribute__((__unused__))
    144 bcmp(const void *b1, const void *b2, size_t len)
    145 {
    146 	register int ret;
    147 
    148 	__asm volatile("cmpc3 %3,(%1),(%2);"
    149 			   "movl %%r0,%0"
    150 			: "=r" (ret)
    151 			: "r" (b1), "r" (b2), "r" (len)
    152 			: "r0","r1","r2","r3" );
    153 	return ret;
    154 }
    155 
    156 /* Begin nya */
    157 static __inline size_t __attribute__((__unused__))
    158 strlen(const char *cp)
    159 {
    160         register size_t ret;
    161 
    162         __asm volatile("locc $0,$65535,(%1);"
    163 			   "subl3 %%r0,$65535,%0"
    164                         : "=r" (ret)
    165                         : "r" (cp)
    166                         : "r0","r1","cc" );
    167         return  ret;
    168 }
    169 
    170 static __inline char * __attribute__((__unused__))
    171 strcat(char *cp, const char *c2)
    172 {
    173         __asm volatile("locc $0,$65535,(%1);"
    174 			   "subl3 %%r0,$65535,%%r2;"
    175 			   "incl %%r2;"
    176                            "locc $0,$65535,(%0);"
    177 			   "movc3 %%r2,(%1),(%%r1)"
    178                         :
    179                         : "r" (cp), "r" (c2)
    180                         : "r0","r1","r2","r3","r4","r5","memory","cc");
    181         return  cp;
    182 }
    183 
    184 static __inline char * __attribute__((__unused__))
    185 strncat(char *cp, const char *c2, size_t count)
    186 {
    187         __asm volatile("locc $0,%2,(%1);"
    188 			   "subl3 %%r0,%2,%%r2;"
    189                            "locc $0,$65535,(%0);"
    190 			   "movc3 %%r2,(%1),(%%r1);"
    191 			   "movb $0,(%%r3)"
    192                         :
    193                         : "r" (cp), "r" (c2), "g"(count)
    194                         : "r0","r1","r2","r3","r4","r5","memory","cc");
    195         return  cp;
    196 }
    197 
    198 static __inline char * __attribute__((__unused__))
    199 strcpy(char *cp, const char *c2)
    200 {
    201         __asm volatile("locc $0,$65535,(%1);"
    202 			   "subl3 %%r0,$65535,%%r2;"
    203                            "movc3 %%r2,(%1),(%0);"
    204 			   "movb $0,(%%r3)"
    205                         :
    206                         : "r" (cp), "r" (c2)
    207                         : "r0","r1","r2","r3","r4","r5","memory","cc");
    208         return  cp;
    209 }
    210 
    211 static __inline char * __attribute__((__unused__))
    212 strncpy(char *cp, const char *c2, size_t len)
    213 {
    214         __asm volatile("movl %2,%%r2;"
    215 			   "locc $0,%%r2,(%1);"
    216 			   "beql 1f;"
    217 			   "subl3 %%r0,%2,%%r2;"
    218                            "clrb (%0)[%%r2];"
    219 			   "1:;"
    220 			   "movc3 %%r2,(%1),(%0)"
    221                         :
    222                         : "r" (cp), "r" (c2), "g"(len)
    223                         : "r0","r1","r2","r3","r4","r5","memory","cc");
    224         return  cp;
    225 }
    226 
    227 static __inline void *__attribute__((__unused__))
    228 memchr(const void *cp, int c, size_t len)
    229 {
    230         void *ret;
    231         __asm volatile("locc %2,%3,(%1);"
    232 			   "bneq 1f;"
    233 			   "clrl %%r1;"
    234 			   "1:;"
    235 			   "movl %%r1,%0"
    236                         : "=g"(ret)
    237                         : "r" (cp), "r" (c), "g"(len)
    238                         : "r0","r1","cc");
    239         return  ret;
    240 }
    241 
    242 static __inline int __attribute__((__unused__))
    243 strcmp(const char *cp, const char *c2)
    244 {
    245         register int ret;
    246         __asm volatile("locc $0,$65535,(%1);"
    247 			   "subl3 %%r0,$65535,%%r0;"
    248 			   "incl %%r0;"
    249                            "cmpc3 %%r0,(%1),(%2);"
    250 			   "beql 1f;"
    251 			   "movl $1,%%r2;"
    252                            "cmpb (%%r1),(%%r3);"
    253 			   "bcc 1f;"
    254 			   "mnegl $1,%%r2;"
    255 			   "1:;"
    256 			   "movl %%r2,%0"
    257                         : "=g"(ret)
    258                         : "r" (cp), "r" (c2)
    259                         : "r0","r1","r2","r3","cc");
    260         return  ret;
    261 }
    262 #endif
    263 
    264 #if 0 /* unused, but no point in deleting it since it _is_ an instruction */
    265 static __inline int __attribute__((__unused__))
    266 locc(int mask, char *cp, size_t size){
    267 	register ret;
    268 
    269 	__asm volatile("locc %1,%2,(%3);"
    270 			   "movl %%r0,%0"
    271 			: "=r" (ret)
    272 			: "r" (mask),"r"(size),"r"(cp)
    273 			: "r0","r1" );
    274 	return	ret;
    275 }
    276 #endif
    277 
    278 static __inline int __attribute__((__unused__))
    279 vax_scanc(u_int size, const u_char *cp, const u_char *table, int mask)
    280 {
    281 	register int ret;
    282 
    283 	__asm volatile("scanc %1,(%2),(%3),%4;"
    284 			   "movl %%r0,%0"
    285 			: "=g"(ret)
    286 			: "r"(size),"r"(cp),"r"(table),"r"(mask)
    287 			: "r0","r1","r2","r3" );
    288 	return ret;
    289 }
    290 #define scanc vax_scanc
    291 
    292 static __inline int __attribute__((__unused__))
    293 vax_skpc(int mask, size_t size, u_char *cp)
    294 {
    295 	register int ret;
    296 
    297 	__asm volatile("skpc %1,%2,(%3);"
    298 			   "movl %%r0,%0"
    299 			: "=g"(ret)
    300 			: "r"(mask),"r"(size),"r"(cp)
    301 			: "r0","r1" );
    302 	return	ret;
    303 }
    304 #define skpc vax_skpc
    305 
    306 /*
    307  * Set/clear a bit at a memory position; interlocked.
    308  * Return 0 if already set, 1 otherwise.
    309  */
    310 static __inline int __attribute__((__unused__))
    311 bbssi(int bitnr, long *addr)
    312 {
    313 	register int ret;
    314 
    315 	__asm volatile("clrl %%r0;"
    316 			   "bbssi %1,%2,1f;"
    317 			   "incl %%r0;"
    318 			   "1:;"
    319 			   "movl %%r0,%0"
    320 		: "=&r"(ret)
    321 		: "g"(bitnr),"m"(*addr)
    322 		: "r0","cc","memory");
    323 	return ret;
    324 }
    325 
    326 static __inline int __attribute__((__unused__))
    327 bbcci(int bitnr, long *addr)
    328 {
    329 	register int ret;
    330 
    331 	__asm volatile("clrl %%r0;"
    332 			   "bbcci %1,%2,1f;"
    333 			   "incl %%r0;"
    334 			   "1:;"
    335 			   "movl %%r0,%0"
    336 		: "=&r"(ret)
    337 		: "g"(bitnr),"m"(*addr)
    338 		: "r0","cc","memory");
    339 	return ret;
    340 }
    341 
    342 static inline struct lwp *
    343 cpu_switchto(struct lwp *oldlwp, struct lwp *newlwp, bool returning)
    344 {
    345 	struct lwp *prevlwp;
    346 	__asm volatile(
    347 		"movl %1,%%r0;"
    348 		"movl %2,%%r1;"
    349 		"movpsl -(%%sp);"
    350 		"jsb Swtchto;"
    351 		"movl %%r0,%0"
    352 	    : "=g"(prevlwp)
    353 	    : "g" (oldlwp), "g" (newlwp)
    354 	    : "r0", "r1");
    355 	return prevlwp;
    356 }
    357 
    358 /*
    359  * Interlock instructions. Used both in multiprocessor environments to
    360  * lock between CPUs and in uniprocessor systems when locking is required
    361  * between I/O devices and the master CPU.
    362  */
    363 /*
    364  * Insqti() locks and inserts an element into the end of a queue.
    365  * Returns -1 if interlock failed, 1 if inserted OK and 0 if first in queue.
    366  */
    367 static __inline int __attribute__((__unused__))
    368 insqti(void *entry, void *header) {
    369 	register int ret;
    370 
    371 	__asm volatile(
    372 		"	mnegl $1,%0;"
    373 		"	insqti (%1),(%2);"
    374 		"	bcs 1f;"		/* failed insert */
    375 		"	beql 2f;"		/* jump if first entry */
    376 		"	movl $1,%0;"
    377 		"	brb 1f;"
    378 		"2:	clrl %0;"
    379 		"	1:;"
    380 			: "=&g"(ret)
    381 			: "r"(entry), "r"(header)
    382 			: "memory");
    383 
    384 	return ret;
    385 }
    386 
    387 /*
    388  * Remqhi() removes an element from the head of the queue.
    389  * Returns -1 if interlock failed, 0 if queue empty, address of the
    390  * removed element otherwise.
    391  */
    392 static __inline void *__attribute__((__unused__))
    393 remqhi(void *header) {
    394 	register void *ret;
    395 
    396 	__asm volatile(
    397 		"	remqhi (%1),%0;"
    398 		"	bcs 1f;"		/* failed interlock */
    399 		"	bvs 2f;"		/* nothing was removed */
    400 		"	brb 3f;"
    401 		"1:	mnegl $1,%0;"
    402 		"	brb 3f;"
    403 		"2:	clrl %0;"
    404 		"	3:;"
    405 			: "=&g"(ret)
    406 			: "r"(header)
    407 			: "memory");
    408 
    409 	return ret;
    410 }
    411 #define	ILCK_FAILED	-1	/* Interlock failed */
    412 #define	Q_EMPTY		0	/* Queue is/was empty */
    413 #define	Q_OK		1	/* Inserted OK */
    414 
    415 #endif	/* !_VAX_MACROS_H_ && !__lint__ */
    416