nvmm_x86_svmfunc.S revision 1.1 1 1.1 maxv /* $NetBSD: nvmm_x86_svmfunc.S,v 1.1 2018/11/07 07:43:08 maxv Exp $ */
2 1.1 maxv
3 1.1 maxv /*
4 1.1 maxv * Copyright (c) 2018 The NetBSD Foundation, Inc.
5 1.1 maxv * All rights reserved.
6 1.1 maxv *
7 1.1 maxv * This code is derived from software contributed to The NetBSD Foundation
8 1.1 maxv * by Maxime Villard.
9 1.1 maxv *
10 1.1 maxv * Redistribution and use in source and binary forms, with or without
11 1.1 maxv * modification, are permitted provided that the following conditions
12 1.1 maxv * are met:
13 1.1 maxv * 1. Redistributions of source code must retain the above copyright
14 1.1 maxv * notice, this list of conditions and the following disclaimer.
15 1.1 maxv * 2. Redistributions in binary form must reproduce the above copyright
16 1.1 maxv * notice, this list of conditions and the following disclaimer in the
17 1.1 maxv * documentation and/or other materials provided with the distribution.
18 1.1 maxv *
19 1.1 maxv * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20 1.1 maxv * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21 1.1 maxv * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 1.1 maxv * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23 1.1 maxv * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24 1.1 maxv * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25 1.1 maxv * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26 1.1 maxv * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27 1.1 maxv * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28 1.1 maxv * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29 1.1 maxv * POSSIBILITY OF SUCH DAMAGE.
30 1.1 maxv */
31 1.1 maxv
32 1.1 maxv /* Override user-land alignment before including asm.h */
33 1.1 maxv #define ALIGN_DATA .align 8
34 1.1 maxv #define ALIGN_TEXT .align 16,0x90
35 1.1 maxv #define _ALIGN_TEXT ALIGN_TEXT
36 1.1 maxv
37 1.1 maxv #define _LOCORE
38 1.1 maxv #include "assym.h"
39 1.1 maxv #include <machine/asm.h>
40 1.1 maxv #include <machine/segments.h>
41 1.1 maxv #include <x86/specialreg.h>
42 1.1 maxv
43 1.1 maxv #define ASM_NVMM
44 1.1 maxv #include <dev/nvmm/x86/nvmm_x86.h>
45 1.1 maxv
46 1.1 maxv .text
47 1.1 maxv
48 1.1 maxv #define HOST_SAVE_GPRS \
49 1.1 maxv pushq %rbx ;\
50 1.1 maxv pushq %rbp ;\
51 1.1 maxv pushq %r12 ;\
52 1.1 maxv pushq %r13 ;\
53 1.1 maxv pushq %r14 ;\
54 1.1 maxv pushq %r15
55 1.1 maxv
56 1.1 maxv #define HOST_RESTORE_GPRS \
57 1.1 maxv popq %r15 ;\
58 1.1 maxv popq %r14 ;\
59 1.1 maxv popq %r13 ;\
60 1.1 maxv popq %r12 ;\
61 1.1 maxv popq %rbp ;\
62 1.1 maxv popq %rbx
63 1.1 maxv
64 1.1 maxv #define HOST_SAVE_MSR(msr) \
65 1.1 maxv movq $msr,%rcx ;\
66 1.1 maxv rdmsr ;\
67 1.1 maxv pushq %rdx ;\
68 1.1 maxv pushq %rax
69 1.1 maxv
70 1.1 maxv #define HOST_RESTORE_MSR(msr) \
71 1.1 maxv popq %rax ;\
72 1.1 maxv popq %rdx ;\
73 1.1 maxv movq $msr,%rcx ;\
74 1.1 maxv wrmsr
75 1.1 maxv
76 1.1 maxv #define HOST_SAVE_SEGREG(sreg) \
77 1.1 maxv movw sreg,%ax ;\
78 1.1 maxv pushw %ax
79 1.1 maxv
80 1.1 maxv #define HOST_RESTORE_SEGREG(sreg)\
81 1.1 maxv popw %ax ;\
82 1.1 maxv movw %ax,sreg
83 1.1 maxv
84 1.1 maxv #define HOST_SAVE_TR \
85 1.1 maxv strw %ax ;\
86 1.1 maxv pushw %ax
87 1.1 maxv
88 1.1 maxv #define HOST_RESTORE_TR \
89 1.1 maxv popw %ax ;\
90 1.1 maxv movzwq %ax,%rdx ;\
91 1.1 maxv movq CPUVAR(GDT),%rax ;\
92 1.1 maxv andq $~0x0200,4(%rax,%rdx, 1) ;\
93 1.1 maxv ltrw %dx
94 1.1 maxv
95 1.1 maxv #define HOST_SAVE_LDT \
96 1.1 maxv sldtw %ax ;\
97 1.1 maxv pushw %ax
98 1.1 maxv
99 1.1 maxv #define HOST_RESTORE_LDT \
100 1.1 maxv popw %ax ;\
101 1.1 maxv lldtw %ax
102 1.1 maxv
103 1.1 maxv /*
104 1.1 maxv * All GPRs except RAX and RSP, which are taken care of in VMCB.
105 1.1 maxv */
106 1.1 maxv
107 1.1 maxv #define GUEST_SAVE_GPRS(reg) \
108 1.1 maxv movq %rbx,(NVMM_X64_GPR_RBX * 8)(reg) ;\
109 1.1 maxv movq %rcx,(NVMM_X64_GPR_RCX * 8)(reg) ;\
110 1.1 maxv movq %rdx,(NVMM_X64_GPR_RDX * 8)(reg) ;\
111 1.1 maxv movq %r8,(NVMM_X64_GPR_R8 * 8)(reg) ;\
112 1.1 maxv movq %r9,(NVMM_X64_GPR_R9 * 8)(reg) ;\
113 1.1 maxv movq %r10,(NVMM_X64_GPR_R10 * 8)(reg) ;\
114 1.1 maxv movq %r11,(NVMM_X64_GPR_R11 * 8)(reg) ;\
115 1.1 maxv movq %r12,(NVMM_X64_GPR_R12 * 8)(reg) ;\
116 1.1 maxv movq %r13,(NVMM_X64_GPR_R13 * 8)(reg) ;\
117 1.1 maxv movq %r14,(NVMM_X64_GPR_R14 * 8)(reg) ;\
118 1.1 maxv movq %r15,(NVMM_X64_GPR_R15 * 8)(reg) ;\
119 1.1 maxv movq %rbp,(NVMM_X64_GPR_RBP * 8)(reg) ;\
120 1.1 maxv movq %rdi,(NVMM_X64_GPR_RDI * 8)(reg) ;\
121 1.1 maxv movq %rsi,(NVMM_X64_GPR_RSI * 8)(reg)
122 1.1 maxv
123 1.1 maxv #define GUEST_RESTORE_GPRS(reg) \
124 1.1 maxv movq (NVMM_X64_GPR_RBX * 8)(reg),%rbx ;\
125 1.1 maxv movq (NVMM_X64_GPR_RCX * 8)(reg),%rcx ;\
126 1.1 maxv movq (NVMM_X64_GPR_RDX * 8)(reg),%rdx ;\
127 1.1 maxv movq (NVMM_X64_GPR_R8 * 8)(reg),%r8 ;\
128 1.1 maxv movq (NVMM_X64_GPR_R9 * 8)(reg),%r9 ;\
129 1.1 maxv movq (NVMM_X64_GPR_R10 * 8)(reg),%r10 ;\
130 1.1 maxv movq (NVMM_X64_GPR_R11 * 8)(reg),%r11 ;\
131 1.1 maxv movq (NVMM_X64_GPR_R12 * 8)(reg),%r12 ;\
132 1.1 maxv movq (NVMM_X64_GPR_R13 * 8)(reg),%r13 ;\
133 1.1 maxv movq (NVMM_X64_GPR_R14 * 8)(reg),%r14 ;\
134 1.1 maxv movq (NVMM_X64_GPR_R15 * 8)(reg),%r15 ;\
135 1.1 maxv movq (NVMM_X64_GPR_RBP * 8)(reg),%rbp ;\
136 1.1 maxv movq (NVMM_X64_GPR_RDI * 8)(reg),%rdi ;\
137 1.1 maxv movq (NVMM_X64_GPR_RSI * 8)(reg),%rsi
138 1.1 maxv
139 1.1 maxv /*
140 1.1 maxv * %rdi = PA of VMCB
141 1.1 maxv * %rsi = VA of guest GPR state
142 1.1 maxv */
143 1.1 maxv ENTRY(svm_vmrun)
144 1.1 maxv /* Save the Host GPRs. */
145 1.1 maxv HOST_SAVE_GPRS
146 1.1 maxv
147 1.1 maxv /* Disable Host interrupts. */
148 1.1 maxv clgi
149 1.1 maxv
150 1.1 maxv /* Save the Host TR. */
151 1.1 maxv HOST_SAVE_TR
152 1.1 maxv
153 1.1 maxv /* Save the variable Host MSRs. */
154 1.1 maxv HOST_SAVE_MSR(MSR_KERNELGSBASE)
155 1.1 maxv HOST_SAVE_MSR(MSR_GSBASE)
156 1.1 maxv HOST_SAVE_MSR(MSR_FSBASE)
157 1.1 maxv
158 1.1 maxv /* Reset the Host Segregs. */
159 1.1 maxv movq $GSEL(GUDATA_SEL, SEL_UPL),%rax
160 1.1 maxv movw %ax,%ds
161 1.1 maxv movw %ax,%es
162 1.1 maxv xorq %rax,%rax
163 1.1 maxv movw %ax,%fs
164 1.1 maxv movw %ax,%gs
165 1.1 maxv
166 1.1 maxv /* Save some Host Segregs. */
167 1.1 maxv HOST_SAVE_SEGREG(%fs)
168 1.1 maxv HOST_SAVE_SEGREG(%gs)
169 1.1 maxv
170 1.1 maxv /* Save the Host LDT. */
171 1.1 maxv HOST_SAVE_LDT
172 1.1 maxv
173 1.1 maxv /* Prepare RAX. */
174 1.1 maxv pushq %rsi
175 1.1 maxv pushq %rdi
176 1.1 maxv
177 1.1 maxv /* Restore the Guest GPRs. */
178 1.1 maxv movq %rsi,%rax
179 1.1 maxv GUEST_RESTORE_GPRS(%rax)
180 1.1 maxv
181 1.1 maxv /* Set RAX. */
182 1.1 maxv popq %rax
183 1.1 maxv
184 1.1 maxv /* Run the VM. */
185 1.1 maxv vmload %rax
186 1.1 maxv vmrun %rax
187 1.1 maxv vmsave %rax
188 1.1 maxv
189 1.1 maxv /* Get RAX. */
190 1.1 maxv popq %rax
191 1.1 maxv
192 1.1 maxv /* Save the Guest GPRs. */
193 1.1 maxv GUEST_SAVE_GPRS(%rax)
194 1.1 maxv
195 1.1 maxv /* Restore the Host LDT. */
196 1.1 maxv HOST_RESTORE_LDT
197 1.1 maxv
198 1.1 maxv /* Restore the Host Segregs. */
199 1.1 maxv HOST_RESTORE_SEGREG(%gs)
200 1.1 maxv HOST_RESTORE_SEGREG(%fs)
201 1.1 maxv
202 1.1 maxv /* Restore the variable Host MSRs. */
203 1.1 maxv HOST_RESTORE_MSR(MSR_FSBASE)
204 1.1 maxv HOST_RESTORE_MSR(MSR_GSBASE)
205 1.1 maxv HOST_RESTORE_MSR(MSR_KERNELGSBASE)
206 1.1 maxv
207 1.1 maxv /* Restore the Host TR. */
208 1.1 maxv HOST_RESTORE_TR
209 1.1 maxv
210 1.1 maxv /* Enable Host interrupts. */
211 1.1 maxv stgi
212 1.1 maxv
213 1.1 maxv /* Restore the Host GPRs. */
214 1.1 maxv HOST_RESTORE_GPRS
215 1.1 maxv
216 1.1 maxv xorq %rax,%rax
217 1.1 maxv retq
218 1.1 maxv END(svm_vmrun)
219