nvmm_x86_svmfunc.S revision 1.6 1 /* $NetBSD: nvmm_x86_svmfunc.S,v 1.6 2020/09/05 07:22:26 maxv Exp $ */
2
3 /*
4 * Copyright (c) 2018-2020 Maxime Villard, m00nbsd.net
5 * All rights reserved.
6 *
7 * This code is part of the NVMM hypervisor.
8 *
9 * Redistribution and use in source and binary forms, with or without
10 * modification, are permitted provided that the following conditions
11 * are met:
12 * 1. Redistributions of source code must retain the above copyright
13 * notice, this list of conditions and the following disclaimer.
14 * 2. Redistributions in binary form must reproduce the above copyright
15 * notice, this list of conditions and the following disclaimer in the
16 * documentation and/or other materials provided with the distribution.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
23 * BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
24 * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED
25 * AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
26 * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
27 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
28 * SUCH DAMAGE.
29 */
30
31 /* Override user-land alignment before including asm.h */
32 #define ALIGN_DATA .align 8
33 #define ALIGN_TEXT .align 16,0x90
34 #define _ALIGN_TEXT ALIGN_TEXT
35
36 #define _LOCORE
37 #include "assym.h"
38 #include <machine/asm.h>
39 #include <machine/segments.h>
40 #include <x86/specialreg.h>
41
42 #define ASM_NVMM
43 #include <dev/nvmm/x86/nvmm_x86.h>
44
45 .text
46
47 #define HOST_SAVE_GPRS \
48 pushq %rbx ;\
49 pushq %rbp ;\
50 pushq %r12 ;\
51 pushq %r13 ;\
52 pushq %r14 ;\
53 pushq %r15
54
55 #define HOST_RESTORE_GPRS \
56 popq %r15 ;\
57 popq %r14 ;\
58 popq %r13 ;\
59 popq %r12 ;\
60 popq %rbp ;\
61 popq %rbx
62
63 #define HOST_SAVE_MSR(msr) \
64 movq $msr,%rcx ;\
65 rdmsr ;\
66 pushq %rdx ;\
67 pushq %rax
68
69 #define HOST_RESTORE_MSR(msr) \
70 popq %rax ;\
71 popq %rdx ;\
72 movq $msr,%rcx ;\
73 wrmsr
74
75 #define HOST_SAVE_TR \
76 strw %ax ;\
77 pushq %rax
78
79 #define HOST_RESTORE_TR \
80 popq %rax ;\
81 movzwq %ax,%rdx ;\
82 movq CPUVAR(GDT),%rax ;\
83 andq $~0x0200,4(%rax,%rdx, 1) ;\
84 ltrw %dx
85
86 #define HOST_SAVE_LDT \
87 sldtw %ax ;\
88 pushq %rax
89
90 #define HOST_RESTORE_LDT \
91 popq %rax ;\
92 lldtw %ax
93
94 /*
95 * All GPRs except RAX and RSP, which are taken care of in VMCB.
96 */
97
98 #define GUEST_SAVE_GPRS(reg) \
99 movq %rcx,(NVMM_X64_GPR_RCX * 8)(reg) ;\
100 movq %rdx,(NVMM_X64_GPR_RDX * 8)(reg) ;\
101 movq %rbx,(NVMM_X64_GPR_RBX * 8)(reg) ;\
102 movq %rbp,(NVMM_X64_GPR_RBP * 8)(reg) ;\
103 movq %rsi,(NVMM_X64_GPR_RSI * 8)(reg) ;\
104 movq %rdi,(NVMM_X64_GPR_RDI * 8)(reg) ;\
105 movq %r8,(NVMM_X64_GPR_R8 * 8)(reg) ;\
106 movq %r9,(NVMM_X64_GPR_R9 * 8)(reg) ;\
107 movq %r10,(NVMM_X64_GPR_R10 * 8)(reg) ;\
108 movq %r11,(NVMM_X64_GPR_R11 * 8)(reg) ;\
109 movq %r12,(NVMM_X64_GPR_R12 * 8)(reg) ;\
110 movq %r13,(NVMM_X64_GPR_R13 * 8)(reg) ;\
111 movq %r14,(NVMM_X64_GPR_R14 * 8)(reg) ;\
112 movq %r15,(NVMM_X64_GPR_R15 * 8)(reg)
113
114 #define GUEST_RESTORE_GPRS(reg) \
115 movq (NVMM_X64_GPR_RCX * 8)(reg),%rcx ;\
116 movq (NVMM_X64_GPR_RDX * 8)(reg),%rdx ;\
117 movq (NVMM_X64_GPR_RBX * 8)(reg),%rbx ;\
118 movq (NVMM_X64_GPR_RBP * 8)(reg),%rbp ;\
119 movq (NVMM_X64_GPR_RSI * 8)(reg),%rsi ;\
120 movq (NVMM_X64_GPR_RDI * 8)(reg),%rdi ;\
121 movq (NVMM_X64_GPR_R8 * 8)(reg),%r8 ;\
122 movq (NVMM_X64_GPR_R9 * 8)(reg),%r9 ;\
123 movq (NVMM_X64_GPR_R10 * 8)(reg),%r10 ;\
124 movq (NVMM_X64_GPR_R11 * 8)(reg),%r11 ;\
125 movq (NVMM_X64_GPR_R12 * 8)(reg),%r12 ;\
126 movq (NVMM_X64_GPR_R13 * 8)(reg),%r13 ;\
127 movq (NVMM_X64_GPR_R14 * 8)(reg),%r14 ;\
128 movq (NVMM_X64_GPR_R15 * 8)(reg),%r15
129
130 /*
131 * %rdi = PA of VMCB
132 * %rsi = VA of guest GPR state
133 */
134 ENTRY(svm_vmrun)
135 /* Save the Host GPRs. */
136 HOST_SAVE_GPRS
137
138 /* Save the Host TR. */
139 HOST_SAVE_TR
140
141 /* Save the Host GSBASE. */
142 HOST_SAVE_MSR(MSR_GSBASE)
143
144 /* Reset DS and ES. */
145 movq $GSEL(GUDATA_SEL, SEL_UPL),%rax
146 movw %ax,%ds
147 movw %ax,%es
148
149 /* Save the Host LDT. */
150 HOST_SAVE_LDT
151
152 /* Prepare RAX. */
153 pushq %rsi
154 pushq %rdi
155
156 /* Restore the Guest GPRs. */
157 movq %rsi,%rax
158 GUEST_RESTORE_GPRS(%rax)
159
160 /* Set RAX. */
161 popq %rax
162
163 /* Run the VM. */
164 vmload %rax
165 vmrun %rax
166 vmsave %rax
167
168 /* Get RAX. */
169 popq %rax
170
171 /* Save the Guest GPRs. */
172 GUEST_SAVE_GPRS(%rax)
173
174 /* Restore the Host LDT. */
175 HOST_RESTORE_LDT
176
177 /* Reset FS and GS. */
178 xorq %rax,%rax
179 movw %ax,%fs
180 movw %ax,%gs
181
182 /* Restore the Host GSBASE. */
183 HOST_RESTORE_MSR(MSR_GSBASE)
184
185 /* Restore the Host TR. */
186 HOST_RESTORE_TR
187
188 /* Restore the Host GPRs. */
189 HOST_RESTORE_GPRS
190
191 xorq %rax,%rax
192 retq
193 END(svm_vmrun)
194