nvmm_x86_svmfunc.S revision 1.5 1 1.5 maxv /* $NetBSD: nvmm_x86_svmfunc.S,v 1.5 2020/08/11 15:48:42 maxv Exp $ */
2 1.1 maxv
3 1.1 maxv /*
4 1.4 maxv * Copyright (c) 2018-2020 The NetBSD Foundation, Inc.
5 1.1 maxv * All rights reserved.
6 1.1 maxv *
7 1.1 maxv * This code is derived from software contributed to The NetBSD Foundation
8 1.1 maxv * by Maxime Villard.
9 1.1 maxv *
10 1.1 maxv * Redistribution and use in source and binary forms, with or without
11 1.1 maxv * modification, are permitted provided that the following conditions
12 1.1 maxv * are met:
13 1.1 maxv * 1. Redistributions of source code must retain the above copyright
14 1.1 maxv * notice, this list of conditions and the following disclaimer.
15 1.1 maxv * 2. Redistributions in binary form must reproduce the above copyright
16 1.1 maxv * notice, this list of conditions and the following disclaimer in the
17 1.1 maxv * documentation and/or other materials provided with the distribution.
18 1.1 maxv *
19 1.1 maxv * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
20 1.1 maxv * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
21 1.1 maxv * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
22 1.1 maxv * PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
23 1.1 maxv * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
24 1.1 maxv * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
25 1.1 maxv * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
26 1.1 maxv * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
27 1.1 maxv * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
28 1.1 maxv * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
29 1.1 maxv * POSSIBILITY OF SUCH DAMAGE.
30 1.1 maxv */
31 1.1 maxv
32 1.1 maxv /* Override user-land alignment before including asm.h */
33 1.1 maxv #define ALIGN_DATA .align 8
34 1.1 maxv #define ALIGN_TEXT .align 16,0x90
35 1.1 maxv #define _ALIGN_TEXT ALIGN_TEXT
36 1.1 maxv
37 1.1 maxv #define _LOCORE
38 1.1 maxv #include "assym.h"
39 1.1 maxv #include <machine/asm.h>
40 1.1 maxv #include <machine/segments.h>
41 1.1 maxv #include <x86/specialreg.h>
42 1.1 maxv
43 1.1 maxv #define ASM_NVMM
44 1.1 maxv #include <dev/nvmm/x86/nvmm_x86.h>
45 1.1 maxv
46 1.1 maxv .text
47 1.1 maxv
48 1.1 maxv #define HOST_SAVE_GPRS \
49 1.1 maxv pushq %rbx ;\
50 1.1 maxv pushq %rbp ;\
51 1.1 maxv pushq %r12 ;\
52 1.1 maxv pushq %r13 ;\
53 1.1 maxv pushq %r14 ;\
54 1.1 maxv pushq %r15
55 1.1 maxv
56 1.1 maxv #define HOST_RESTORE_GPRS \
57 1.1 maxv popq %r15 ;\
58 1.1 maxv popq %r14 ;\
59 1.1 maxv popq %r13 ;\
60 1.1 maxv popq %r12 ;\
61 1.1 maxv popq %rbp ;\
62 1.1 maxv popq %rbx
63 1.1 maxv
64 1.1 maxv #define HOST_SAVE_MSR(msr) \
65 1.1 maxv movq $msr,%rcx ;\
66 1.1 maxv rdmsr ;\
67 1.1 maxv pushq %rdx ;\
68 1.1 maxv pushq %rax
69 1.1 maxv
70 1.1 maxv #define HOST_RESTORE_MSR(msr) \
71 1.1 maxv popq %rax ;\
72 1.1 maxv popq %rdx ;\
73 1.1 maxv movq $msr,%rcx ;\
74 1.1 maxv wrmsr
75 1.1 maxv
76 1.1 maxv #define HOST_SAVE_TR \
77 1.1 maxv strw %ax ;\
78 1.5 maxv pushq %rax
79 1.1 maxv
80 1.1 maxv #define HOST_RESTORE_TR \
81 1.5 maxv popq %rax ;\
82 1.1 maxv movzwq %ax,%rdx ;\
83 1.1 maxv movq CPUVAR(GDT),%rax ;\
84 1.1 maxv andq $~0x0200,4(%rax,%rdx, 1) ;\
85 1.1 maxv ltrw %dx
86 1.1 maxv
87 1.1 maxv #define HOST_SAVE_LDT \
88 1.1 maxv sldtw %ax ;\
89 1.5 maxv pushq %rax
90 1.1 maxv
91 1.1 maxv #define HOST_RESTORE_LDT \
92 1.5 maxv popq %rax ;\
93 1.1 maxv lldtw %ax
94 1.1 maxv
95 1.1 maxv /*
96 1.1 maxv * All GPRs except RAX and RSP, which are taken care of in VMCB.
97 1.1 maxv */
98 1.1 maxv
99 1.1 maxv #define GUEST_SAVE_GPRS(reg) \
100 1.1 maxv movq %rcx,(NVMM_X64_GPR_RCX * 8)(reg) ;\
101 1.1 maxv movq %rdx,(NVMM_X64_GPR_RDX * 8)(reg) ;\
102 1.3 maxv movq %rbx,(NVMM_X64_GPR_RBX * 8)(reg) ;\
103 1.3 maxv movq %rbp,(NVMM_X64_GPR_RBP * 8)(reg) ;\
104 1.3 maxv movq %rsi,(NVMM_X64_GPR_RSI * 8)(reg) ;\
105 1.3 maxv movq %rdi,(NVMM_X64_GPR_RDI * 8)(reg) ;\
106 1.1 maxv movq %r8,(NVMM_X64_GPR_R8 * 8)(reg) ;\
107 1.1 maxv movq %r9,(NVMM_X64_GPR_R9 * 8)(reg) ;\
108 1.1 maxv movq %r10,(NVMM_X64_GPR_R10 * 8)(reg) ;\
109 1.1 maxv movq %r11,(NVMM_X64_GPR_R11 * 8)(reg) ;\
110 1.1 maxv movq %r12,(NVMM_X64_GPR_R12 * 8)(reg) ;\
111 1.1 maxv movq %r13,(NVMM_X64_GPR_R13 * 8)(reg) ;\
112 1.1 maxv movq %r14,(NVMM_X64_GPR_R14 * 8)(reg) ;\
113 1.3 maxv movq %r15,(NVMM_X64_GPR_R15 * 8)(reg)
114 1.1 maxv
115 1.1 maxv #define GUEST_RESTORE_GPRS(reg) \
116 1.1 maxv movq (NVMM_X64_GPR_RCX * 8)(reg),%rcx ;\
117 1.1 maxv movq (NVMM_X64_GPR_RDX * 8)(reg),%rdx ;\
118 1.3 maxv movq (NVMM_X64_GPR_RBX * 8)(reg),%rbx ;\
119 1.3 maxv movq (NVMM_X64_GPR_RBP * 8)(reg),%rbp ;\
120 1.3 maxv movq (NVMM_X64_GPR_RSI * 8)(reg),%rsi ;\
121 1.3 maxv movq (NVMM_X64_GPR_RDI * 8)(reg),%rdi ;\
122 1.1 maxv movq (NVMM_X64_GPR_R8 * 8)(reg),%r8 ;\
123 1.1 maxv movq (NVMM_X64_GPR_R9 * 8)(reg),%r9 ;\
124 1.1 maxv movq (NVMM_X64_GPR_R10 * 8)(reg),%r10 ;\
125 1.1 maxv movq (NVMM_X64_GPR_R11 * 8)(reg),%r11 ;\
126 1.1 maxv movq (NVMM_X64_GPR_R12 * 8)(reg),%r12 ;\
127 1.1 maxv movq (NVMM_X64_GPR_R13 * 8)(reg),%r13 ;\
128 1.1 maxv movq (NVMM_X64_GPR_R14 * 8)(reg),%r14 ;\
129 1.3 maxv movq (NVMM_X64_GPR_R15 * 8)(reg),%r15
130 1.1 maxv
131 1.1 maxv /*
132 1.1 maxv * %rdi = PA of VMCB
133 1.1 maxv * %rsi = VA of guest GPR state
134 1.1 maxv */
135 1.1 maxv ENTRY(svm_vmrun)
136 1.1 maxv /* Save the Host GPRs. */
137 1.1 maxv HOST_SAVE_GPRS
138 1.1 maxv
139 1.1 maxv /* Save the Host TR. */
140 1.1 maxv HOST_SAVE_TR
141 1.1 maxv
142 1.2 maxv /* Save the Host GSBASE. */
143 1.1 maxv HOST_SAVE_MSR(MSR_GSBASE)
144 1.1 maxv
145 1.2 maxv /* Reset DS and ES. */
146 1.1 maxv movq $GSEL(GUDATA_SEL, SEL_UPL),%rax
147 1.1 maxv movw %ax,%ds
148 1.1 maxv movw %ax,%es
149 1.1 maxv
150 1.1 maxv /* Save the Host LDT. */
151 1.1 maxv HOST_SAVE_LDT
152 1.1 maxv
153 1.1 maxv /* Prepare RAX. */
154 1.1 maxv pushq %rsi
155 1.1 maxv pushq %rdi
156 1.1 maxv
157 1.1 maxv /* Restore the Guest GPRs. */
158 1.1 maxv movq %rsi,%rax
159 1.1 maxv GUEST_RESTORE_GPRS(%rax)
160 1.1 maxv
161 1.1 maxv /* Set RAX. */
162 1.1 maxv popq %rax
163 1.1 maxv
164 1.1 maxv /* Run the VM. */
165 1.1 maxv vmload %rax
166 1.1 maxv vmrun %rax
167 1.1 maxv vmsave %rax
168 1.1 maxv
169 1.1 maxv /* Get RAX. */
170 1.1 maxv popq %rax
171 1.1 maxv
172 1.1 maxv /* Save the Guest GPRs. */
173 1.1 maxv GUEST_SAVE_GPRS(%rax)
174 1.1 maxv
175 1.1 maxv /* Restore the Host LDT. */
176 1.1 maxv HOST_RESTORE_LDT
177 1.1 maxv
178 1.2 maxv /* Reset FS and GS. */
179 1.2 maxv xorq %rax,%rax
180 1.2 maxv movw %ax,%fs
181 1.2 maxv movw %ax,%gs
182 1.1 maxv
183 1.2 maxv /* Restore the Host GSBASE. */
184 1.1 maxv HOST_RESTORE_MSR(MSR_GSBASE)
185 1.1 maxv
186 1.1 maxv /* Restore the Host TR. */
187 1.1 maxv HOST_RESTORE_TR
188 1.1 maxv
189 1.1 maxv /* Restore the Host GPRs. */
190 1.1 maxv HOST_RESTORE_GPRS
191 1.1 maxv
192 1.1 maxv xorq %rax,%rax
193 1.1 maxv retq
194 1.1 maxv END(svm_vmrun)
195