/src/sys/arch/usermode/usermode/ |
cpufunc.S | 33 movq %rdi,%rax 34 movq %rbx,(%rax) 35 movq %rsp,8(%rax) 36 movq %rbp,16(%rax) 37 movq %r12,24(%rax) 38 movq %r13,32(%rax) 39 movq %r14,40(%rax) 40 movq %r15,48(%rax) 42 movq %rdx,56(%rax) 53 movq %rdi,%rax [all...] |
/src/sys/external/bsd/compiler_rt/dist/lib/builtins/x86_64/ |
chkstk2.S | 8 // _chkstk (_alloca) routine - probe stack between %rsp and (%rsp-%rax) in 4k increments, 9 // then decrement %rsp by %rax. Preserves all registers except %rsp and flags. 16 mov %rcx,%rax // x64 _alloca is a normal function with parameter in rcx 20 cmp $0x1000,%rax 26 sub $0x1000,%rax 27 cmp $0x1000,%rax 30 sub %rax,%rcx 33 lea 8(%rsp),%rax // load pointer to the return address into rax 35 mov -8(%rax),%rcx // restore rc [all...] |
chkstk.S | 12 // themselves. It also does not clobber %rax so we can reuse it when 21 push %rax 22 cmp $0x1000,%rax 28 sub $0x1000,%rax 29 cmp $0x1000,%rax 32 sub %rax,%rcx 34 pop %rax
|
/src/tests/kernel/arch/x86_64/ |
threadspfunc.S | 51 movq %rsp,%rax 52 addq $-8,%rax
|
/src/common/lib/libc/arch/x86_64/string/ |
memchr.S | 65 movq (%rdi),%rax /* value to check */ 67 xorq %rsi,%rax /* now looking for zeros */ 69 mov %rax,%rcx 70 subq %r8,%rax /* x - 0x01 */ 72 andq %r9,%rax /* (x - 0x01) & 0x80 */ 73 andq %rcx,%rax /* ((x - 0x01) & 0x80) & ~x */ 77 bsf %rax,%rax 79 lea -8(%rax,%rdi),%rax [all...] |
strlen.S | 125 movq %rdi,%rax /* Buffer, %rdi unchanged */ 131 movq (%rax),%rdx /* get bytes to check */ 133 addq $8,%rax 145 sub %rdi,%rax /* length to next word */ 148 lea -8(%rax,%rdx),%rax 159 movq (%rax),%rdx /* first data in high bytes */ 173 mov %rdi,%rax 175 cmpb $0,(%rax) 177 inc %rax [all...] |
strchr.S | 44 * Registers %rdx, %rcx, %r8-%r11 and %rax are also usable 67 movq (%rdi),%rax /* bytes to check (x) */ 70 mov %rax,%r10 71 mov %rax,%r11 /* for 'char' check */ 75 subq %r8,%rax /* x - 0x10 */ 85 andq %r9,%rax /* (x - 0x10) & 0x80 */ 88 andq %r10,%rax /* (x - 0x10) & 0x80 & ~x */ 96 lea -8(%r11,%rdi),%rax 102 bsf %rax,%rax /* count to NUL * [all...] |
strrchr.S | 16 xorq %rax,%rax 27 cmoveq %rdi,%rax 68 leaq -8(%rdi),%rax 75 leaq -7(%rdi),%rax 82 leaq -6(%rdi),%rax 89 leaq -5(%rdi),%rax 96 leaq -4(%rdi),%rax 103 leaq -3(%rdi),%rax 110 leaq -2(%rdi),%rax [all...] |
/src/sys/external/isc/libsodium/dist/src/libsodium/crypto_scalarmult/curve25519/sandy2x/ |
fe51_mul.S | 36 imulq $19,%rdx,%rax 37 movq %rax,64(%rsp) 39 mov %rax,%r8 42 imulq $19,%rdx,%rax 43 movq %rax,72(%rsp) 45 add %rax,%r8 47 movq 0(%rsi),%rax 49 add %rax,%r8 51 movq 0(%rsi),%rax 53 mov %rax,%r1 [all...] |
fe51_nsquare.S | 38 movq 24(%rsi),%rax 41 movq %rax,24(%rdi) 48 mov %rcx,%rax 51 mov %rax,%r9 53 mov %rcx,%rax 55 mov %rax,%r11 57 mov %rcx,%rax 59 mov %rax,%r13 61 mov %rcx,%rax 63 mov %rax,%r1 [all...] |
/src/sys/external/bsd/gnu-efi/dist/lib/x86_64/ |
efi_stub.S | 108 mov 56+8(%rsp), %rax 109 mov %rax, 40(%rsp) 121 mov 56+16(%rsp), %rax 122 mov %rax, 48(%rsp) 123 mov 56+8(%rsp), %rax 124 mov %rax, 40(%rsp) 136 mov 72+24(%rsp), %rax 137 mov %rax, 56(%rsp) 138 mov 72+16(%rsp), %rax 139 mov %rax, 48(%rsp [all...] |
/src/tests/lib/libnvmm/ |
h_mem_assist_asm.S | 57 movq $0x1000,%rax 60 movq $0x1000,(%rax) 63 movq $0x2000,(%rax,%r11,8) 76 movq $0x1000,%rax 78 movq $0x1000,(%rax) 80 orb %bl,(%rax) 82 orw %cx,(%rax) 85 orq (%rax),%rcx 86 movq %rcx,(%rax) 93 movq $0x1000,%rax [all...] |
/src/sys/arch/amd64/acpi/ |
acpi_wakeup_low.S | 78 movq ACPI_SUSPEND_CR8(%r8),%rax 79 movq %rax,%cr8 80 movq ACPI_SUSPEND_CR4(%r8),%rax 81 movq %rax,%cr4 82 movq ACPI_SUSPEND_CR3(%r8),%rax 83 movq %rax,%cr3 84 movq ACPI_SUSPEND_CR2(%r8),%rax 85 movq %rax,%cr2 86 movq ACPI_SUSPEND_CR0(%r8),%rax 87 movq %rax,%cr [all...] |
/src/common/lib/libc/arch/x86_64/gen/ |
byte_swap_8.S | 16 movq %rdi,%rax
|
/src/lib/libc/arch/x86_64/stdlib/ |
labs.S | 27 movq %rdi,%rax 28 testq %rax,%rax 30 negq %rax
|
div.S | 18 orq %rdx,%rax
|
/src/sys/dev/nvmm/x86/ |
nvmm_x86_svmfunc.S | 67 pushq %rax 70 popq %rax ;\ 77 pushq %rax 80 popq %rax ;\ 82 movq CPUVAR(GDT),%rax ;\ 83 andq $~0x0200,4(%rax,%rdx, 1) ;\ 88 pushq %rax 91 popq %rax ;\ 95 * All GPRs except RAX and RSP, which are taken care of in VMCB. 145 movq $GSEL(GUDATA_SEL, SEL_UPL),%rax [all...] |
nvmm_x86_vmxfunc.S | 54 xorq %rax,%rax 57 movq $-1,%rax 68 xorq %rax,%rax 71 movq $-1,%rax 95 pushq %rax 98 popq %rax 102 pushq %rax 105 popq %rax ;\ [all...] |
/src/tests/lib/libc/arch/x86_64/ |
return_one.S | 8 movq $0x1, %rax
|
/src/sys/arch/amd64/amd64/ |
copy.S | 134 movq %rdi,%rax 135 subq %rsi,%rax 136 cmpq %rcx,%rax /* overlapping? */ 148 xorq %rax,%rax 174 xorq %rax,%rax 182 movq %rdx,%rax /* save transfer length (bytes) */ 192 movq %rax,%rcx /* length */ 212 movq %rdx,%rax [all...] |
spl.S | 102 * %rax intrsource 116 movq IS_LWP(%rax),%rdi /* switch to handler LWP */ 137 pushq %rax 143 popq %rax 147 pushq %rax 153 popq %rax 166 movl IS_MAXLEVEL(%rax),%esi /* ipl to run at */ 213 * %rax prevlwp from cpu_switchto() 217 CLI(ax) /* %rax not used by Xspllower/Xdoreti */ 282 movq CPUVAR(IPENDING),%rax [all...] |
cpufunc.S | 113 movq $INVPCID_ADDRESS,%rax 114 invpcid (%rsp),%rax 124 movq %rdi,%rax 125 lgdt (%rax) 153 movq %cr4, %rax 154 testq $CR4_PGE, %rax 156 movq %rax, %rdx 159 movq %rax, %cr4 168 xorq %rax,%rax [all...] |
/src/sys/lib/libkern/arch/x86_64/ |
scanc.S | 45 xorq %rax,%rax 49 testb %dl,(%rax,%rdi)
|
/src/sys/external/bsd/compiler_rt/dist/lib/xray/ |
xray_trampoline_x86_64.S | 35 movq %rax, 88(%rsp) 60 movq 88(%rsp), %rax 87 callq *%rax 111 movq ASM_SYMBOL(_ZN6__xray19XRayPatchedFunctionE)(%rip), %rax 112 testq %rax, %rax 143 movq %rax, 8(%rsp) 145 movq ASM_SYMBOL(_ZN6__xray19XRayPatchedFunctionE)(%rip), %rax 146 testq %rax,%rax [all...] |
/src/lib/libc/arch/x86_64/sys/ |
cerror.S | 52 movl %r12d,(%rax) 53 movq $-1,%rax
|