nvmm_x86_svmfunc.S revision 1.1 1 /* $NetBSD: nvmm_x86_svmfunc.S,v 1.1 2018/11/07 07:43:08 maxv Exp $ */
2
3 /*
4 * Copyright (c) 2018 The NetBSD Foundation, Inc.
5 * All rights reserved.
6 *
7 * This code is derived from software contributed to The NetBSD Foundation
8 * by Maxime Villard.
9 *
10 * Redistribution and use in source and binary forms, with or without
11 * modification, are permitted provided that the following conditions
12 * are met:
13 * 1. Redistributions of source code must retain the above copyright
14 * notice, this list of conditions and the following disclaimer.
15 * 2. Redistributions in binary form must reproduce the above copyright
16 * notice, this list of conditions and the following disclaimer in the
17 * documentation and/or other materials provided with the distribution.
18 *
19 * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20 * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21 * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23 * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24 * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25 * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26 * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27 * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28 * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29 * POSSIBILITY OF SUCH DAMAGE.
30 */
31
32 /* Override user-land alignment before including asm.h */
33 #define ALIGN_DATA .align 8
34 #define ALIGN_TEXT .align 16,0x90
35 #define _ALIGN_TEXT ALIGN_TEXT
36
37 #define _LOCORE
38 #include "assym.h"
39 #include <machine/asm.h>
40 #include <machine/segments.h>
41 #include <x86/specialreg.h>
42
43 #define ASM_NVMM
44 #include <dev/nvmm/x86/nvmm_x86.h>
45
46 .text
47
48 #define HOST_SAVE_GPRS \
49 pushq %rbx ;\
50 pushq %rbp ;\
51 pushq %r12 ;\
52 pushq %r13 ;\
53 pushq %r14 ;\
54 pushq %r15
55
56 #define HOST_RESTORE_GPRS \
57 popq %r15 ;\
58 popq %r14 ;\
59 popq %r13 ;\
60 popq %r12 ;\
61 popq %rbp ;\
62 popq %rbx
63
64 #define HOST_SAVE_MSR(msr) \
65 movq $msr,%rcx ;\
66 rdmsr ;\
67 pushq %rdx ;\
68 pushq %rax
69
70 #define HOST_RESTORE_MSR(msr) \
71 popq %rax ;\
72 popq %rdx ;\
73 movq $msr,%rcx ;\
74 wrmsr
75
76 #define HOST_SAVE_SEGREG(sreg) \
77 movw sreg,%ax ;\
78 pushw %ax
79
80 #define HOST_RESTORE_SEGREG(sreg)\
81 popw %ax ;\
82 movw %ax,sreg
83
84 #define HOST_SAVE_TR \
85 strw %ax ;\
86 pushw %ax
87
88 #define HOST_RESTORE_TR \
89 popw %ax ;\
90 movzwq %ax,%rdx ;\
91 movq CPUVAR(GDT),%rax ;\
92 andq $~0x0200,4(%rax,%rdx, 1) ;\
93 ltrw %dx
94
95 #define HOST_SAVE_LDT \
96 sldtw %ax ;\
97 pushw %ax
98
99 #define HOST_RESTORE_LDT \
100 popw %ax ;\
101 lldtw %ax
102
103 /*
104 * All GPRs except RAX and RSP, which are taken care of in VMCB.
105 */
106
107 #define GUEST_SAVE_GPRS(reg) \
108 movq %rbx,(NVMM_X64_GPR_RBX * 8)(reg) ;\
109 movq %rcx,(NVMM_X64_GPR_RCX * 8)(reg) ;\
110 movq %rdx,(NVMM_X64_GPR_RDX * 8)(reg) ;\
111 movq %r8,(NVMM_X64_GPR_R8 * 8)(reg) ;\
112 movq %r9,(NVMM_X64_GPR_R9 * 8)(reg) ;\
113 movq %r10,(NVMM_X64_GPR_R10 * 8)(reg) ;\
114 movq %r11,(NVMM_X64_GPR_R11 * 8)(reg) ;\
115 movq %r12,(NVMM_X64_GPR_R12 * 8)(reg) ;\
116 movq %r13,(NVMM_X64_GPR_R13 * 8)(reg) ;\
117 movq %r14,(NVMM_X64_GPR_R14 * 8)(reg) ;\
118 movq %r15,(NVMM_X64_GPR_R15 * 8)(reg) ;\
119 movq %rbp,(NVMM_X64_GPR_RBP * 8)(reg) ;\
120 movq %rdi,(NVMM_X64_GPR_RDI * 8)(reg) ;\
121 movq %rsi,(NVMM_X64_GPR_RSI * 8)(reg)
122
123 #define GUEST_RESTORE_GPRS(reg) \
124 movq (NVMM_X64_GPR_RBX * 8)(reg),%rbx ;\
125 movq (NVMM_X64_GPR_RCX * 8)(reg),%rcx ;\
126 movq (NVMM_X64_GPR_RDX * 8)(reg),%rdx ;\
127 movq (NVMM_X64_GPR_R8 * 8)(reg),%r8 ;\
128 movq (NVMM_X64_GPR_R9 * 8)(reg),%r9 ;\
129 movq (NVMM_X64_GPR_R10 * 8)(reg),%r10 ;\
130 movq (NVMM_X64_GPR_R11 * 8)(reg),%r11 ;\
131 movq (NVMM_X64_GPR_R12 * 8)(reg),%r12 ;\
132 movq (NVMM_X64_GPR_R13 * 8)(reg),%r13 ;\
133 movq (NVMM_X64_GPR_R14 * 8)(reg),%r14 ;\
134 movq (NVMM_X64_GPR_R15 * 8)(reg),%r15 ;\
135 movq (NVMM_X64_GPR_RBP * 8)(reg),%rbp ;\
136 movq (NVMM_X64_GPR_RDI * 8)(reg),%rdi ;\
137 movq (NVMM_X64_GPR_RSI * 8)(reg),%rsi
138
139 /*
140 * %rdi = PA of VMCB
141 * %rsi = VA of guest GPR state
142 */
143 ENTRY(svm_vmrun)
144 /* Save the Host GPRs. */
145 HOST_SAVE_GPRS
146
147 /* Disable Host interrupts. */
148 clgi
149
150 /* Save the Host TR. */
151 HOST_SAVE_TR
152
153 /* Save the variable Host MSRs. */
154 HOST_SAVE_MSR(MSR_KERNELGSBASE)
155 HOST_SAVE_MSR(MSR_GSBASE)
156 HOST_SAVE_MSR(MSR_FSBASE)
157
158 /* Reset the Host Segregs. */
159 movq $GSEL(GUDATA_SEL, SEL_UPL),%rax
160 movw %ax,%ds
161 movw %ax,%es
162 xorq %rax,%rax
163 movw %ax,%fs
164 movw %ax,%gs
165
166 /* Save some Host Segregs. */
167 HOST_SAVE_SEGREG(%fs)
168 HOST_SAVE_SEGREG(%gs)
169
170 /* Save the Host LDT. */
171 HOST_SAVE_LDT
172
173 /* Prepare RAX. */
174 pushq %rsi
175 pushq %rdi
176
177 /* Restore the Guest GPRs. */
178 movq %rsi,%rax
179 GUEST_RESTORE_GPRS(%rax)
180
181 /* Set RAX. */
182 popq %rax
183
184 /* Run the VM. */
185 vmload %rax
186 vmrun %rax
187 vmsave %rax
188
189 /* Get RAX. */
190 popq %rax
191
192 /* Save the Guest GPRs. */
193 GUEST_SAVE_GPRS(%rax)
194
195 /* Restore the Host LDT. */
196 HOST_RESTORE_LDT
197
198 /* Restore the Host Segregs. */
199 HOST_RESTORE_SEGREG(%gs)
200 HOST_RESTORE_SEGREG(%fs)
201
202 /* Restore the variable Host MSRs. */
203 HOST_RESTORE_MSR(MSR_FSBASE)
204 HOST_RESTORE_MSR(MSR_GSBASE)
205 HOST_RESTORE_MSR(MSR_KERNELGSBASE)
206
207 /* Restore the Host TR. */
208 HOST_RESTORE_TR
209
210 /* Enable Host interrupts. */
211 stgi
212
213 /* Restore the Host GPRs. */
214 HOST_RESTORE_GPRS
215
216 xorq %rax,%rax
217 retq
218 END(svm_vmrun)
219