/src/tests/lib/csu/arch/hppa/ |
h_initfini_align.S | 8 extru %sp,31,6,%ret0 9 comiclr,<> 0, %ret0, %ret0 10 ldi 1,%ret0
|
/src/lib/libpthread/arch/hppa/ |
pthread_md.S | 36 ldi 1,%ret0 /* 1 == unlocked */ 37 stw %ret0,0(%arg0) 38 stw %ret0,4(%arg0) 39 stw %ret0,8(%arg0) 40 stw %ret0,12(%arg0) 52 ldw 0(%arg0),%ret0 56 comiclr,= 0,%ret0,%ret0 /* if locked return 0 */ 57 ldi 1,%ret0 /* else return 1 */ 65 ldi 1,%ret0 /* 1 == unlocked * [all...] |
/src/lib/libc/arch/hppa/sys/ |
fork.S | 41 and %ret0, %ret1, %ret0
|
__vfork14.S | 65 and %ret0, %ret1, %ret0
|
pipe.S | 54 stw %ret0, 0(%arg0) 57 copy %r0, %ret0
|
cerror.S | 48 stw %t1, 0(%ret0) 59 ldi -1, %ret0
|
__clone.S | 84 copy %ret0, %arg0
|
/src/lib/libc/compat/arch/hppa/sys/ |
compat_Ovfork.S | 41 and %ret1, %ret0, %ret0
|
/src/common/lib/libc/arch/hppa/atomic/ |
atomic_cas_up.S | 39 ldw 0(%arg0), %ret0 40 comb,<>,n %arg1, %ret0, 1f 52 ldh 0(%arg0),%ret0 53 comb,<>,n %arg1, %ret0, 1f 64 ldb 0(%arg0),%ret0 65 comb,<>,n %arg1, %ret0, 1f
|
/src/common/lib/libc/arch/ia64/string/ |
ffs.S | 50 * Initialize return value (ret0), and set up r15 so that it 54 mov ret0=1 ;; 68 add ret0=16,ret0 ;; 78 add ret0=8,ret0 ;; 90 add ret0=4,ret0 ;; 94 add ret0=2,ret0 ;; [all...] |
/src/lib/libc/arch/hppa/string/ |
ffs.S | 43 or %r0,%r0,%ret0 ; return 0 44 ldi 32,%ret0 ; Set return to high bit 46 addi,tr -16,%ret0,%ret0 ; subtract 16 from bitpos 49 addi,tr -8,%ret0,%ret0 ; subtract 8 from bitpos 52 addi,tr -4,%ret0,%ret0 ; subtract 4 from bitpos 55 addi,tr -2,%ret0,%ret0 ; subtract 2 from bitpo [all...] |
strlcpy.S | 54 copy %arg1, %ret0 83 sub %arg1, %ret0, %ret0
|
bcmp.S | 81 sub %t1,%t2,%ret0
|
/src/lib/libc/arch/ia64/sys/ |
cerror.S | 37 mov loc2=ret0 38 mov out0=ret0 41 st4 [ret0]=loc2 43 mov ret0=-1
|
pipe.S | 45 st4 [r14]=ret0,4 48 mov ret0=0
|
sbrk.S | 52 ld8 ret0 = [r14] 55 add r32 = ret0, r32 64 ld8 ret0 = [r14]
|
/src/tests/kernel/arch/hppa/ |
threadspfunc.S | 42 copy %sp, %ret0 /* return sp */
|
/src/sys/arch/ia64/stand/ia64/ski/ |
ssc.c | 50 register u_int64_t ret0 __asm("r8"); 55 : "=r"(ret0) 58 return ret0;
|
/src/tests/lib/csu/arch/ia64/ |
h_initfini_align.S | 14 mov ret0 = 1 25 (p0) mov ret0 = 0 35 (p0) mov ret0 = 0
|
/src/tests/lib/libc/arch/ia64/ |
return_one.S | 8 mov ret0=1
|
/src/sys/arch/hppa/hppa/ |
lock_stubs.S | 150 ldcw 0(%t1), %ret0 153 comib,= 0, %ret0, .Lenter_slowpath 173 copy %t1,%ret0 176 copy %arg1,%ret0 246 extru %arg0, 21+8-1, 8, %ret0 248 zdep %ret0, 27, 28, %ret0 251 addl %ret0, %r1, %ret0 252 ldo 15(%ret0), %ret [all...] |
/src/sys/lib/libkern/arch/hppa/ |
milli_extra.S | 41 copy %arg0, %ret0
|
/src/lib/libc/gdtoa/ |
g__fmt.c | 72 goto ret0; 100 goto ret0; 117 goto ret0; 136 ret0:
|
/src/sys/arch/ia64/ia64/ |
ssc.c | 53 register uint64_t ret0 __asm("r8"); 57 : "=r"(ret0) 59 return ret0;
|
/src/libexec/ld.elf_so/arch/hppa/ |
rtld_start.S | 112 bv %r0(%ret0) 169 * bound function. Note that this includes %ret0, %ret1, and %t1. 171 * %ret0 and %ret1 because they can have meaning on entry to a 182 stw %ret0, 4(%r3) 213 ldw 0(%ret0), %r21 214 ldw 4(%ret0), %r19 222 ldw 4(%r3), %ret0
|