Home | History | Annotate | Download | only in x86

Lines Matching refs:movq

57 	movq	$-1,%rax
71 movq $-1,%rax
113 movq %rcx,(NVMM_X64_GPR_RCX * 8)(reg) ;\
114 movq %rdx,(NVMM_X64_GPR_RDX * 8)(reg) ;\
115 movq %rbx,(NVMM_X64_GPR_RBX * 8)(reg) ;\
116 movq %rbp,(NVMM_X64_GPR_RBP * 8)(reg) ;\
117 movq %rsi,(NVMM_X64_GPR_RSI * 8)(reg) ;\
118 movq %rdi,(NVMM_X64_GPR_RDI * 8)(reg) ;\
119 movq %r8,(NVMM_X64_GPR_R8 * 8)(reg) ;\
120 movq %r9,(NVMM_X64_GPR_R9 * 8)(reg) ;\
121 movq %r10,(NVMM_X64_GPR_R10 * 8)(reg) ;\
122 movq %r11,(NVMM_X64_GPR_R11 * 8)(reg) ;\
123 movq %r12,(NVMM_X64_GPR_R12 * 8)(reg) ;\
124 movq %r13,(NVMM_X64_GPR_R13 * 8)(reg) ;\
125 movq %r14,(NVMM_X64_GPR_R14 * 8)(reg) ;\
126 movq %r15,(NVMM_X64_GPR_R15 * 8)(reg)
129 movq (NVMM_X64_GPR_RCX * 8)(reg),%rcx ;\
130 movq (NVMM_X64_GPR_RDX * 8)(reg),%rdx ;\
131 movq (NVMM_X64_GPR_RBX * 8)(reg),%rbx ;\
132 movq (NVMM_X64_GPR_RBP * 8)(reg),%rbp ;\
133 movq (NVMM_X64_GPR_RSI * 8)(reg),%rsi ;\
134 movq (NVMM_X64_GPR_RDI * 8)(reg),%rdi ;\
135 movq (NVMM_X64_GPR_R8 * 8)(reg),%r8 ;\
136 movq (NVMM_X64_GPR_R9 * 8)(reg),%r9 ;\
137 movq (NVMM_X64_GPR_R10 * 8)(reg),%r10 ;\
138 movq (NVMM_X64_GPR_R11 * 8)(reg),%r11 ;\
139 movq (NVMM_X64_GPR_R12 * 8)(reg),%r12 ;\
140 movq (NVMM_X64_GPR_R13 * 8)(reg),%r13 ;\
141 movq (NVMM_X64_GPR_R14 * 8)(reg),%r14 ;\
142 movq (NVMM_X64_GPR_R15 * 8)(reg),%r15 ;\
143 movq (NVMM_X64_GPR_RAX * 8)(reg),%rax
156 movq %rdi,%rax
160 movq $VMCS_HOST_RSP,%rdi
161 movq %rsp,%rsi
174 movq $-1,%rax
189 movq %rdi,%rax
193 movq $VMCS_HOST_RSP,%rdi
194 movq %rsp,%rsi
207 movq $-1,%rax
217 movq 8(%rsp),%rax
220 movq %rbx,(NVMM_X64_GPR_RAX * 8)(%rax)
234 movq $.Lvmx_validstr,%rdi
239 movq $.Lvmx_invalidstr,%rdi