nvmm_x86_vmxfunc.S revision 1.6 1 1.6 maxv /* $NetBSD: nvmm_x86_vmxfunc.S,v 1.6 2020/09/05 07:22:26 maxv Exp $ */
2 1.1 maxv
3 1.1 maxv /*
4 1.6 maxv * Copyright (c) 2018-2020 Maxime Villard, m00nbsd.net
5 1.1 maxv * All rights reserved.
6 1.1 maxv *
7 1.6 maxv * This code is part of the NVMM hypervisor.
8 1.1 maxv *
9 1.1 maxv * Redistribution and use in source and binary forms, with or without
10 1.1 maxv * modification, are permitted provided that the following conditions
11 1.1 maxv * are met:
12 1.1 maxv * 1. Redistributions of source code must retain the above copyright
13 1.1 maxv * notice, this list of conditions and the following disclaimer.
14 1.1 maxv * 2. Redistributions in binary form must reproduce the above copyright
15 1.1 maxv * notice, this list of conditions and the following disclaimer in the
16 1.1 maxv * documentation and/or other materials provided with the distribution.
17 1.1 maxv *
18 1.6 maxv * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19 1.6 maxv * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20 1.6 maxv * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21 1.6 maxv * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22 1.6 maxv * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
23 1.6 maxv * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 1.6 maxv * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
25 1.6 maxv * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
26 1.6 maxv * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
27 1.6 maxv * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
28 1.6 maxv * SUCH DAMAGE.
29 1.1 maxv */
30 1.1 maxv
31 1.1 maxv /* Override user-land alignment before including asm.h */
32 1.1 maxv #define ALIGN_DATA .align 8
33 1.1 maxv #define ALIGN_TEXT .align 16,0x90
34 1.1 maxv #define _ALIGN_TEXT ALIGN_TEXT
35 1.1 maxv
36 1.1 maxv #define _LOCORE
37 1.1 maxv #include "assym.h"
38 1.1 maxv #include <machine/asm.h>
39 1.1 maxv #include <machine/segments.h>
40 1.1 maxv #include <x86/specialreg.h>
41 1.1 maxv
42 1.1 maxv #define ASM_NVMM
43 1.1 maxv #include <dev/nvmm/x86/nvmm_x86.h>
44 1.1 maxv
45 1.1 maxv .text
46 1.1 maxv
47 1.1 maxv /*
48 1.1 maxv * %rdi = *pa
49 1.1 maxv */
50 1.1 maxv ENTRY(_vmx_vmxon)
51 1.1 maxv vmxon (%rdi)
52 1.1 maxv jz .Lfail_vmxon
53 1.1 maxv jc .Lfail_vmxon
54 1.1 maxv xorq %rax,%rax
55 1.1 maxv retq
56 1.1 maxv .Lfail_vmxon:
57 1.1 maxv movq $-1,%rax
58 1.1 maxv retq
59 1.1 maxv END(_vmx_vmxon)
60 1.1 maxv
61 1.1 maxv /*
62 1.1 maxv * no arg
63 1.1 maxv */
64 1.1 maxv ENTRY(_vmx_vmxoff)
65 1.1 maxv vmxoff
66 1.1 maxv jz .Lfail_vmxoff
67 1.1 maxv jc .Lfail_vmxoff
68 1.1 maxv xorq %rax,%rax
69 1.1 maxv retq
70 1.1 maxv .Lfail_vmxoff:
71 1.1 maxv movq $-1,%rax
72 1.1 maxv retq
73 1.1 maxv END(_vmx_vmxoff)
74 1.1 maxv
75 1.1 maxv /* redef */
76 1.1 maxv #define VMCS_HOST_RSP 0x00006C14
77 1.1 maxv
78 1.1 maxv #define HOST_SAVE_GPRS \
79 1.1 maxv pushq %rbx ;\
80 1.1 maxv pushq %rbp ;\
81 1.1 maxv pushq %r12 ;\
82 1.1 maxv pushq %r13 ;\
83 1.1 maxv pushq %r14 ;\
84 1.1 maxv pushq %r15
85 1.1 maxv
86 1.1 maxv #define HOST_RESTORE_GPRS \
87 1.1 maxv popq %r15 ;\
88 1.1 maxv popq %r14 ;\
89 1.1 maxv popq %r13 ;\
90 1.1 maxv popq %r12 ;\
91 1.1 maxv popq %rbp ;\
92 1.1 maxv popq %rbx
93 1.1 maxv
94 1.1 maxv #define HOST_SAVE_RAX \
95 1.1 maxv pushq %rax
96 1.1 maxv
97 1.1 maxv #define HOST_RESTORE_RAX \
98 1.1 maxv popq %rax
99 1.1 maxv
100 1.1 maxv #define HOST_SAVE_LDT \
101 1.1 maxv sldtw %ax ;\
102 1.5 maxv pushq %rax
103 1.1 maxv
104 1.1 maxv #define HOST_RESTORE_LDT \
105 1.5 maxv popq %rax ;\
106 1.1 maxv lldtw %ax
107 1.1 maxv
108 1.1 maxv /*
109 1.1 maxv * We don't save RAX (done manually), but we do restore it.
110 1.1 maxv */
111 1.1 maxv
112 1.1 maxv #define GUEST_SAVE_GPRS(reg) \
113 1.1 maxv movq %rcx,(NVMM_X64_GPR_RCX * 8)(reg) ;\
114 1.1 maxv movq %rdx,(NVMM_X64_GPR_RDX * 8)(reg) ;\
115 1.2 maxv movq %rbx,(NVMM_X64_GPR_RBX * 8)(reg) ;\
116 1.2 maxv movq %rbp,(NVMM_X64_GPR_RBP * 8)(reg) ;\
117 1.2 maxv movq %rsi,(NVMM_X64_GPR_RSI * 8)(reg) ;\
118 1.2 maxv movq %rdi,(NVMM_X64_GPR_RDI * 8)(reg) ;\
119 1.1 maxv movq %r8,(NVMM_X64_GPR_R8 * 8)(reg) ;\
120 1.1 maxv movq %r9,(NVMM_X64_GPR_R9 * 8)(reg) ;\
121 1.1 maxv movq %r10,(NVMM_X64_GPR_R10 * 8)(reg) ;\
122 1.1 maxv movq %r11,(NVMM_X64_GPR_R11 * 8)(reg) ;\
123 1.1 maxv movq %r12,(NVMM_X64_GPR_R12 * 8)(reg) ;\
124 1.1 maxv movq %r13,(NVMM_X64_GPR_R13 * 8)(reg) ;\
125 1.1 maxv movq %r14,(NVMM_X64_GPR_R14 * 8)(reg) ;\
126 1.2 maxv movq %r15,(NVMM_X64_GPR_R15 * 8)(reg)
127 1.1 maxv
128 1.1 maxv #define GUEST_RESTORE_GPRS(reg) \
129 1.1 maxv movq (NVMM_X64_GPR_RCX * 8)(reg),%rcx ;\
130 1.1 maxv movq (NVMM_X64_GPR_RDX * 8)(reg),%rdx ;\
131 1.2 maxv movq (NVMM_X64_GPR_RBX * 8)(reg),%rbx ;\
132 1.2 maxv movq (NVMM_X64_GPR_RBP * 8)(reg),%rbp ;\
133 1.2 maxv movq (NVMM_X64_GPR_RSI * 8)(reg),%rsi ;\
134 1.2 maxv movq (NVMM_X64_GPR_RDI * 8)(reg),%rdi ;\
135 1.1 maxv movq (NVMM_X64_GPR_R8 * 8)(reg),%r8 ;\
136 1.1 maxv movq (NVMM_X64_GPR_R9 * 8)(reg),%r9 ;\
137 1.1 maxv movq (NVMM_X64_GPR_R10 * 8)(reg),%r10 ;\
138 1.1 maxv movq (NVMM_X64_GPR_R11 * 8)(reg),%r11 ;\
139 1.1 maxv movq (NVMM_X64_GPR_R12 * 8)(reg),%r12 ;\
140 1.1 maxv movq (NVMM_X64_GPR_R13 * 8)(reg),%r13 ;\
141 1.1 maxv movq (NVMM_X64_GPR_R14 * 8)(reg),%r14 ;\
142 1.1 maxv movq (NVMM_X64_GPR_R15 * 8)(reg),%r15 ;\
143 1.1 maxv movq (NVMM_X64_GPR_RAX * 8)(reg),%rax
144 1.1 maxv
145 1.1 maxv /*
146 1.1 maxv * %rdi = VA of guest GPR state
147 1.1 maxv */
148 1.1 maxv ENTRY(vmx_vmlaunch)
149 1.1 maxv /* Save the Host GPRs. */
150 1.1 maxv HOST_SAVE_GPRS
151 1.1 maxv
152 1.1 maxv /* Save the Host LDT. */
153 1.1 maxv HOST_SAVE_LDT
154 1.1 maxv
155 1.1 maxv /* Save the Host RAX. */
156 1.1 maxv movq %rdi,%rax
157 1.1 maxv pushq %rax
158 1.1 maxv
159 1.1 maxv /* Save the Host RSP. */
160 1.1 maxv movq $VMCS_HOST_RSP,%rdi
161 1.1 maxv movq %rsp,%rsi
162 1.1 maxv vmwrite %rsi,%rdi
163 1.1 maxv
164 1.1 maxv /* Restore the Guest GPRs. */
165 1.1 maxv GUEST_RESTORE_GPRS(%rax)
166 1.1 maxv
167 1.1 maxv /* Run the VM. */
168 1.1 maxv vmlaunch
169 1.1 maxv
170 1.1 maxv /* Failure. */
171 1.1 maxv addq $8,%rsp
172 1.1 maxv HOST_RESTORE_LDT
173 1.1 maxv HOST_RESTORE_GPRS
174 1.1 maxv movq $-1,%rax
175 1.1 maxv retq
176 1.1 maxv END(vmx_vmlaunch)
177 1.1 maxv
178 1.1 maxv /*
179 1.1 maxv * %rdi = VA of guest GPR state
180 1.1 maxv */
181 1.1 maxv ENTRY(vmx_vmresume)
182 1.1 maxv /* Save the Host GPRs. */
183 1.1 maxv HOST_SAVE_GPRS
184 1.1 maxv
185 1.1 maxv /* Save the Host LDT. */
186 1.1 maxv HOST_SAVE_LDT
187 1.1 maxv
188 1.1 maxv /* Save the Host RAX. */
189 1.1 maxv movq %rdi,%rax
190 1.1 maxv pushq %rax
191 1.1 maxv
192 1.1 maxv /* Save the Host RSP. */
193 1.1 maxv movq $VMCS_HOST_RSP,%rdi
194 1.1 maxv movq %rsp,%rsi
195 1.1 maxv vmwrite %rsi,%rdi
196 1.1 maxv
197 1.1 maxv /* Restore the Guest GPRs. */
198 1.1 maxv GUEST_RESTORE_GPRS(%rax)
199 1.1 maxv
200 1.1 maxv /* Run the VM. */
201 1.1 maxv vmresume
202 1.1 maxv
203 1.1 maxv /* Failure. */
204 1.1 maxv addq $8,%rsp
205 1.1 maxv HOST_RESTORE_LDT
206 1.1 maxv HOST_RESTORE_GPRS
207 1.1 maxv movq $-1,%rax
208 1.1 maxv retq
209 1.1 maxv END(vmx_vmresume)
210 1.1 maxv
211 1.1 maxv /*
212 1.1 maxv * The CPU jumps here after a #VMEXIT.
213 1.1 maxv */
214 1.1 maxv ENTRY(vmx_resume_rip)
215 1.1 maxv /* Save the Guest GPRs. RAX done manually. */
216 1.1 maxv pushq %rax
217 1.1 maxv movq 8(%rsp),%rax
218 1.1 maxv GUEST_SAVE_GPRS(%rax)
219 1.1 maxv popq %rbx
220 1.1 maxv movq %rbx,(NVMM_X64_GPR_RAX * 8)(%rax)
221 1.1 maxv addq $8,%rsp
222 1.1 maxv
223 1.1 maxv /* Restore the Host LDT. */
224 1.1 maxv HOST_RESTORE_LDT
225 1.1 maxv
226 1.1 maxv /* Restore the Host GPRs. */
227 1.1 maxv HOST_RESTORE_GPRS
228 1.1 maxv
229 1.1 maxv xorq %rax,%rax
230 1.1 maxv retq
231 1.1 maxv END(vmx_resume_rip)
232 1.3 maxv
233 1.3 maxv ENTRY(vmx_insn_failvalid)
234 1.3 maxv movq $.Lvmx_validstr,%rdi
235 1.3 maxv call _C_LABEL(panic)
236 1.3 maxv END(vmx_insn_failvalid)
237 1.3 maxv
238 1.3 maxv ENTRY(vmx_insn_failinvalid)
239 1.3 maxv movq $.Lvmx_invalidstr,%rdi
240 1.3 maxv call _C_LABEL(panic)
241 1.3 maxv END(vmx_insn_failinvalid)
242 1.3 maxv
243 1.3 maxv .section ".rodata"
244 1.3 maxv
245 1.3 maxv .Lvmx_validstr:
246 1.3 maxv .string "VMX fail valid\0"
247 1.3 maxv .Lvmx_invalidstr:
248 1.3 maxv .string "VMX fail invalid\0"
249