/src/sys/arch/mvmeppc/stand/libsa/ |
srt0.S | 79 mfspr %r13,SPR_HID0 81 andc %r13,%r13,%r14 83 mtspr SPR_HID0,%r13 89 LDCONST(%r13,_C_LABEL(_start)) /* Where we'd like to be */ 94 sub %r15,%r14,%r13 /* Our size, in bytes */ 97 cmpw %r13,%r16 /* Do we need to relocate? */ 103 mr %r13,%r14 /* dest -> end */ 109 * %r13 -> dest 118 stw %r15,0(%r13) [all...] |
/src/lib/csu/arch/powerpc/ |
crt0.S | 47 /* SVR4 ABI says small data is in r13 */ 48 addis %r13,%r11,_SDA_BASE_-1b@ha 49 addi %r13,%r13,_SDA_BASE_-1b@l
|
/src/lib/libc/arch/x86_64/sys/ |
__clone.S | 52 pushq %r13 63 movq %rcx,%r13 73 movq %r13,%rdi /* restore argument */ 81 popq %r13 90 popq %r13
|
/src/common/lib/libc/arch/or1k/string/ |
memset.S | 75 l.slli r13, r4, 8 /* shift left 8 bits */ 76 l.or r4, r4, r13 /* merge the two bytes */ 77 l.slli r13, r4, 16 /* shift left 16 bits */ 78 l.or r4, r4, r13 /* merge the two halves */ 82 l.andi r13, r3, 3 /* get low bits of start */ 83 l.sfeqi r13, 0 /* word aligned? */ 87 l.add r5, r5, r13 /* increase length */ 88 l.sub r3, r3, r13 /* mask word aligned */ 89 l.slli r13, r13, 3 /* bytes to bits * [all...] |
memmove.S | 76 l.srli r13, r5, 2 /* How many words in total cnt */ 77 l.sfeqi r13, 0 95 l.addi r13, r13, -1 96 l.sfeqi r13, 0 104 l.addi r13, r13, -1 /* Decrement count */ 105 l.sfeqi r13, 0 /* last word? */ 153 l.srli r13, r5, 2 /* Words in total count */ 154 l.sfeqi r13, [all...] |
strlen.S | 68 l.movhi r13, 0x7f7f 70 l.ori r13, r13, 0x7f7f 92 4: l.or r7, r8, r13 /* t0 = x | 0x7f7f7f7f */ 101 l.and r7, r8, r13 /* t0 = x & 0x7f7f7f7f */ 102 l.or r6, r8, r13 /* t1 = x | 0x7f7f7f7f */ 103 l.add r5, r7, r13 /* t2 = t0 + 0x7f7f7f7f */
|
/src/sys/arch/acorn32/stand/nbfs/ |
rmheader.S | 86 stmfd r13!, {r14} 96 ldmfd r13!, {pc} 99 stmfd r13!, {r14} 105 ldmfd r13!, {pc} /* If that failed, so do we */ 108 stmfd r13!, {r14} 113 ldmfd r13!, {pc} 118 stmfd r13!, {r0-r3, r14} 120 ldmfd r13!, {r0-r3, pc} 124 ldmfdne r13!, {r14} /* If so, load up return address */ 129 ldmfd r13!, {pc} /* and return * [all...] |
/src/regress/sys/arch/arm/sigstackalign/ |
stackptr.S | 9 str r13, [r0]
|
/src/sys/arch/arm/iomd/ |
iomd_fiq.S | 63 * r13 - scratch (loop counter) 75 addne r13, r13, #0x00000001 94 * r13 - scratch (loop counter) 106 addne r13, r13, #0x00000001
|
/src/sys/external/bsd/drm2/dist/drm/nouveau/nvkm/subdev/pmu/fuc/ |
kernel.fuc | 50 // $r13 - data (return) 54 imm32($r13, NV_PPWR_MMIO_CTRL_OP_RD | NV_PPWR_MMIO_CTRL_TRIGGER) 55 nv_iowr(NV_PPWR_MMIO_CTRL, $r13) 57 nv_iord($r13, NV_PPWR_MMIO_CTRL) 58 and $r13 NV_PPWR_MMIO_CTRL_STATUS 60 nv_iord($r13, NV_PPWR_MMIO_DATA) 67 // $r13 - data 71 nv_iowr(NV_PPWR_MMIO_DATA, $r13) 72 imm32($r13, NV_PPWR_MMIO_CTRL_OP_WR | NV_PPWR_MMIO_CTRL_MASK_B32_0 | NV_PPWR_MMIO_CTRL_TRIGGER) 75 push $r13 [all...] |
arith.fuc | 48 // $r13 - B 59 shr b32 $r2 $r13 16 65 mulu $r12 $r14 $r13 68 mulu $r3 $r1 $r13 // tmp0 = A_hi * B_lo
|
/src/sys/external/bsd/gnu-efi/dist/lib/arm/ |
setjmp.S | 19 mov r3, r13
|
/src/tests/kernel/arch/x86_64/ |
execregs.c | 63 register long r13 __asm("r13") = nonnull(13); 85 "+r"(r13), 106 register long r13 __asm("r13") = nonnull(13); 139 "+r"(r13),
|
/src/sys/arch/amd64/amd64/ |
spl.S | 103 * %r13 address to return to 110 pushq %r13 204 jmp *%r13 /* back to Xspllower/Xdoreti */ 218 jmp *%r13 /* back to Xspllower/Xdoreti */ 245 jmp *%r13 /* back to Xspllower */ 262 jmp *%r13 /* back to Xdoreti */ 266 jmp *%r13 /* back to Xdoreti */ 307 * r13 - address to resume loop at 319 pushq %r13 322 leaq 1f(%rip),%r13 /* address to resume loop at * [all...] |
/src/sys/arch/i386/stand/efiboot/bootx64/ |
startprog64.S | 109 movq %rdi, %r13 110 subq %rsi, %r13 119 cmpq %r12, %r13 /* overlapping? */ 148 mov %rdi, %r13 /* target for first word */ 154 mov %r12, (%r13) /* write first word */ 185 cmpq %r12, %r13 /* overlapping? */
|
/src/sys/arch/amd64/include/ |
i82093reg.h | 74 movq IOAPIC_SC_DATA(%rdi),%r13 ;\ 76 movl (%r13),%r12d ;\ 79 movl %r12d,(%r13) ;\
|
/src/sys/external/isc/libsodium/dist/src/libsodium/crypto_scalarmult/curve25519/sandy2x/ |
fe51_mul.S | 28 movq %r13,16(%rsp) 58 mov %rdx,%r13 74 adc %rdx,%r13 91 adc %rdx,%r13 125 adc %rdx,%r13 137 adc %rdx,%r13 148 shld $13,%r12,%r13 153 add %r13,%r14 187 movq 16(%rsp),%r13
|
fe51_nsquare.S | 30 movq %r13,16(%rsp) 59 mov %rax,%r13 72 add %rax,%r13 112 add %rax,%r13 114 shld $13,%r13,%r14 127 and %rdx,%r13 128 add %r12,%r13 139 add %r13,%r9 164 movq 16(%rsp),%r13
|
/src/lib/libc/arch/sh3/gen/ |
_setjmp.S | 60 mov.l r13, @-r4 78 mov.l @r4+, r13
|
/src/lib/libc/arch/x86_64/gen/ |
_setjmp.S | 60 movq %r13,(_JB_R13 * 8)(%rdi) 74 movq (_JB_R13 * 8)(%rdi),%r13
|
/src/sys/arch/acorn32/stand/boot32/ |
rmheader.S | 106 stmfd r13!, {r14} 111 ldmfd r13!, {pc}
|
/src/sys/arch/arm/arm/ |
fiq_subr.S | 77 stmia r0, {r8-r13} 97 ldmia r0, {r8-r13}
|
/src/sys/arch/usermode/usermode/ |
cpufunc.S | 38 movq %r13,32(%rax) 58 movq 32(%rax),%r13
|
/src/sys/external/bsd/gnu-efi/dist/lib/x86_64/ |
setjmp.S | 17 movq %r13,0x20(%rdi) 36 movq 0x20(%rdi), %r13
|
/src/sys/arch/powerpc/powerpc/ |
lock_stubs.S | 80 stptrcx. %r13,0,%r3 95 cmpptr %r10,%r13 128 ori %r7,%r13,RW_WRITE_LOCKED 154 ori %r7,%r13,RW_WRITE_LOCKED 186 ori %r9,%r13,RW_WRITE_LOCKED
|