nvmm_x86_vmxfunc.S revision 1.3 1 1.3 maxv /* $NetBSD: nvmm_x86_vmxfunc.S,v 1.3 2019/04/27 08:16:19 maxv Exp $ */
2 1.1 maxv
3 1.1 maxv /*
4 1.1 maxv * Copyright (c) 2018 The NetBSD Foundation, Inc.
5 1.1 maxv * All rights reserved.
6 1.1 maxv *
7 1.1 maxv * This code is derived from software contributed to The NetBSD Foundation
8 1.1 maxv * by Maxime Villard.
9 1.1 maxv *
10 1.1 maxv * Redistribution and use in source and binary forms, with or without
11 1.1 maxv * modification, are permitted provided that the following conditions
12 1.1 maxv * are met:
13 1.1 maxv * 1. Redistributions of source code must retain the above copyright
14 1.1 maxv * notice, this list of conditions and the following disclaimer.
15 1.1 maxv * 2. Redistributions in binary form must reproduce the above copyright
16 1.1 maxv * notice, this list of conditions and the following disclaimer in the
17 1.1 maxv * documentation and/or other materials provided with the distribution.
18 1.1 maxv *
19 1.1 maxv * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20 1.1 maxv * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21 1.1 maxv * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 1.1 maxv * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23 1.1 maxv * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24 1.1 maxv * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25 1.1 maxv * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26 1.1 maxv * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27 1.1 maxv * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28 1.1 maxv * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29 1.1 maxv * POSSIBILITY OF SUCH DAMAGE.
30 1.1 maxv */
31 1.1 maxv
32 1.1 maxv /* Override user-land alignment before including asm.h */
33 1.1 maxv #define ALIGN_DATA .align 8
34 1.1 maxv #define ALIGN_TEXT .align 16,0x90
35 1.1 maxv #define _ALIGN_TEXT ALIGN_TEXT
36 1.1 maxv
37 1.1 maxv #define _LOCORE
38 1.1 maxv #include "assym.h"
39 1.1 maxv #include <machine/asm.h>
40 1.1 maxv #include <machine/segments.h>
41 1.1 maxv #include <x86/specialreg.h>
42 1.1 maxv
43 1.1 maxv #define ASM_NVMM
44 1.1 maxv #include <dev/nvmm/x86/nvmm_x86.h>
45 1.1 maxv
46 1.1 maxv .text
47 1.1 maxv
48 1.1 maxv /*
49 1.1 maxv * %rdi = *pa
50 1.1 maxv */
51 1.1 maxv ENTRY(_vmx_vmxon)
52 1.1 maxv vmxon (%rdi)
53 1.1 maxv jz .Lfail_vmxon
54 1.1 maxv jc .Lfail_vmxon
55 1.1 maxv xorq %rax,%rax
56 1.1 maxv retq
57 1.1 maxv .Lfail_vmxon:
58 1.1 maxv movq $-1,%rax
59 1.1 maxv retq
60 1.1 maxv END(_vmx_vmxon)
61 1.1 maxv
62 1.1 maxv /*
63 1.1 maxv * no arg
64 1.1 maxv */
65 1.1 maxv ENTRY(_vmx_vmxoff)
66 1.1 maxv vmxoff
67 1.1 maxv jz .Lfail_vmxoff
68 1.1 maxv jc .Lfail_vmxoff
69 1.1 maxv xorq %rax,%rax
70 1.1 maxv retq
71 1.1 maxv .Lfail_vmxoff:
72 1.1 maxv movq $-1,%rax
73 1.1 maxv retq
74 1.1 maxv END(_vmx_vmxoff)
75 1.1 maxv
76 1.1 maxv /* redef */
77 1.1 maxv #define VMCS_HOST_RSP 0x00006C14
78 1.1 maxv
79 1.1 maxv #define HOST_SAVE_GPRS \
80 1.1 maxv pushq %rbx ;\
81 1.1 maxv pushq %rbp ;\
82 1.1 maxv pushq %r12 ;\
83 1.1 maxv pushq %r13 ;\
84 1.1 maxv pushq %r14 ;\
85 1.1 maxv pushq %r15
86 1.1 maxv
87 1.1 maxv #define HOST_RESTORE_GPRS \
88 1.1 maxv popq %r15 ;\
89 1.1 maxv popq %r14 ;\
90 1.1 maxv popq %r13 ;\
91 1.1 maxv popq %r12 ;\
92 1.1 maxv popq %rbp ;\
93 1.1 maxv popq %rbx
94 1.1 maxv
95 1.1 maxv #define HOST_SAVE_RAX \
96 1.1 maxv pushq %rax
97 1.1 maxv
98 1.1 maxv #define HOST_RESTORE_RAX \
99 1.1 maxv popq %rax
100 1.1 maxv
101 1.1 maxv #define HOST_SAVE_LDT \
102 1.1 maxv sldtw %ax ;\
103 1.1 maxv pushw %ax
104 1.1 maxv
105 1.1 maxv #define HOST_RESTORE_LDT \
106 1.1 maxv popw %ax ;\
107 1.1 maxv lldtw %ax
108 1.1 maxv
109 1.1 maxv /*
110 1.1 maxv * We don't save RAX (done manually), but we do restore it.
111 1.1 maxv */
112 1.1 maxv
113 1.1 maxv #define GUEST_SAVE_GPRS(reg) \
114 1.1 maxv movq %rcx,(NVMM_X64_GPR_RCX * 8)(reg) ;\
115 1.1 maxv movq %rdx,(NVMM_X64_GPR_RDX * 8)(reg) ;\
116 1.2 maxv movq %rbx,(NVMM_X64_GPR_RBX * 8)(reg) ;\
117 1.2 maxv movq %rbp,(NVMM_X64_GPR_RBP * 8)(reg) ;\
118 1.2 maxv movq %rsi,(NVMM_X64_GPR_RSI * 8)(reg) ;\
119 1.2 maxv movq %rdi,(NVMM_X64_GPR_RDI * 8)(reg) ;\
120 1.1 maxv movq %r8,(NVMM_X64_GPR_R8 * 8)(reg) ;\
121 1.1 maxv movq %r9,(NVMM_X64_GPR_R9 * 8)(reg) ;\
122 1.1 maxv movq %r10,(NVMM_X64_GPR_R10 * 8)(reg) ;\
123 1.1 maxv movq %r11,(NVMM_X64_GPR_R11 * 8)(reg) ;\
124 1.1 maxv movq %r12,(NVMM_X64_GPR_R12 * 8)(reg) ;\
125 1.1 maxv movq %r13,(NVMM_X64_GPR_R13 * 8)(reg) ;\
126 1.1 maxv movq %r14,(NVMM_X64_GPR_R14 * 8)(reg) ;\
127 1.2 maxv movq %r15,(NVMM_X64_GPR_R15 * 8)(reg)
128 1.1 maxv
129 1.1 maxv #define GUEST_RESTORE_GPRS(reg) \
130 1.1 maxv movq (NVMM_X64_GPR_RCX * 8)(reg),%rcx ;\
131 1.1 maxv movq (NVMM_X64_GPR_RDX * 8)(reg),%rdx ;\
132 1.2 maxv movq (NVMM_X64_GPR_RBX * 8)(reg),%rbx ;\
133 1.2 maxv movq (NVMM_X64_GPR_RBP * 8)(reg),%rbp ;\
134 1.2 maxv movq (NVMM_X64_GPR_RSI * 8)(reg),%rsi ;\
135 1.2 maxv movq (NVMM_X64_GPR_RDI * 8)(reg),%rdi ;\
136 1.1 maxv movq (NVMM_X64_GPR_R8 * 8)(reg),%r8 ;\
137 1.1 maxv movq (NVMM_X64_GPR_R9 * 8)(reg),%r9 ;\
138 1.1 maxv movq (NVMM_X64_GPR_R10 * 8)(reg),%r10 ;\
139 1.1 maxv movq (NVMM_X64_GPR_R11 * 8)(reg),%r11 ;\
140 1.1 maxv movq (NVMM_X64_GPR_R12 * 8)(reg),%r12 ;\
141 1.1 maxv movq (NVMM_X64_GPR_R13 * 8)(reg),%r13 ;\
142 1.1 maxv movq (NVMM_X64_GPR_R14 * 8)(reg),%r14 ;\
143 1.1 maxv movq (NVMM_X64_GPR_R15 * 8)(reg),%r15 ;\
144 1.1 maxv movq (NVMM_X64_GPR_RAX * 8)(reg),%rax
145 1.1 maxv
146 1.1 maxv /*
147 1.1 maxv * %rdi = VA of guest GPR state
148 1.1 maxv */
149 1.1 maxv ENTRY(vmx_vmlaunch)
150 1.1 maxv /* Save the Host GPRs. */
151 1.1 maxv HOST_SAVE_GPRS
152 1.1 maxv
153 1.1 maxv /* Disable Host interrupts. */
154 1.1 maxv cli
155 1.1 maxv
156 1.1 maxv /* Save the Host LDT. */
157 1.1 maxv HOST_SAVE_LDT
158 1.1 maxv
159 1.1 maxv /* Save the Host RAX. */
160 1.1 maxv movq %rdi,%rax
161 1.1 maxv pushq %rax
162 1.1 maxv
163 1.1 maxv /* Save the Host RSP. */
164 1.1 maxv movq $VMCS_HOST_RSP,%rdi
165 1.1 maxv movq %rsp,%rsi
166 1.1 maxv vmwrite %rsi,%rdi
167 1.1 maxv
168 1.1 maxv /* Restore the Guest GPRs. */
169 1.1 maxv GUEST_RESTORE_GPRS(%rax)
170 1.1 maxv
171 1.1 maxv /* Run the VM. */
172 1.1 maxv vmlaunch
173 1.1 maxv
174 1.1 maxv /* Failure. */
175 1.1 maxv addq $8,%rsp
176 1.1 maxv HOST_RESTORE_LDT
177 1.1 maxv sti
178 1.1 maxv HOST_RESTORE_GPRS
179 1.1 maxv movq $-1,%rax
180 1.1 maxv retq
181 1.1 maxv END(vmx_vmlaunch)
182 1.1 maxv
183 1.1 maxv /*
184 1.1 maxv * %rdi = VA of guest GPR state
185 1.1 maxv */
186 1.1 maxv ENTRY(vmx_vmresume)
187 1.1 maxv /* Save the Host GPRs. */
188 1.1 maxv HOST_SAVE_GPRS
189 1.1 maxv
190 1.1 maxv /* Disable Host interrupts. */
191 1.1 maxv cli
192 1.1 maxv
193 1.1 maxv /* Save the Host LDT. */
194 1.1 maxv HOST_SAVE_LDT
195 1.1 maxv
196 1.1 maxv /* Save the Host RAX. */
197 1.1 maxv movq %rdi,%rax
198 1.1 maxv pushq %rax
199 1.1 maxv
200 1.1 maxv /* Save the Host RSP. */
201 1.1 maxv movq $VMCS_HOST_RSP,%rdi
202 1.1 maxv movq %rsp,%rsi
203 1.1 maxv vmwrite %rsi,%rdi
204 1.1 maxv
205 1.1 maxv /* Restore the Guest GPRs. */
206 1.1 maxv GUEST_RESTORE_GPRS(%rax)
207 1.1 maxv
208 1.1 maxv /* Run the VM. */
209 1.1 maxv vmresume
210 1.1 maxv
211 1.1 maxv /* Failure. */
212 1.1 maxv addq $8,%rsp
213 1.1 maxv HOST_RESTORE_LDT
214 1.1 maxv sti
215 1.1 maxv HOST_RESTORE_GPRS
216 1.1 maxv movq $-1,%rax
217 1.1 maxv retq
218 1.1 maxv END(vmx_vmresume)
219 1.1 maxv
220 1.1 maxv /*
221 1.1 maxv * The CPU jumps here after a #VMEXIT.
222 1.1 maxv */
223 1.1 maxv ENTRY(vmx_resume_rip)
224 1.1 maxv /* Save the Guest GPRs. RAX done manually. */
225 1.1 maxv pushq %rax
226 1.1 maxv movq 8(%rsp),%rax
227 1.1 maxv GUEST_SAVE_GPRS(%rax)
228 1.1 maxv popq %rbx
229 1.1 maxv movq %rbx,(NVMM_X64_GPR_RAX * 8)(%rax)
230 1.1 maxv addq $8,%rsp
231 1.1 maxv
232 1.1 maxv /* Restore the Host LDT. */
233 1.1 maxv HOST_RESTORE_LDT
234 1.1 maxv
235 1.1 maxv /* Enable Host interrupts. */
236 1.1 maxv sti
237 1.1 maxv
238 1.1 maxv /* Restore the Host GPRs. */
239 1.1 maxv HOST_RESTORE_GPRS
240 1.1 maxv
241 1.1 maxv xorq %rax,%rax
242 1.1 maxv retq
243 1.1 maxv END(vmx_resume_rip)
244 1.3 maxv
245 1.3 maxv ENTRY(vmx_insn_failvalid)
246 1.3 maxv movq $.Lvmx_validstr,%rdi
247 1.3 maxv call _C_LABEL(panic)
248 1.3 maxv END(vmx_insn_failvalid)
249 1.3 maxv
250 1.3 maxv ENTRY(vmx_insn_failinvalid)
251 1.3 maxv movq $.Lvmx_invalidstr,%rdi
252 1.3 maxv call _C_LABEL(panic)
253 1.3 maxv END(vmx_insn_failinvalid)
254 1.3 maxv
255 1.3 maxv .section ".rodata"
256 1.3 maxv
257 1.3 maxv .Lvmx_validstr:
258 1.3 maxv .string "VMX fail valid\0"
259 1.3 maxv .Lvmx_invalidstr:
260 1.3 maxv .string "VMX fail invalid\0"
261