Home | History | Annotate | Line # | Download | only in x86
nvmm_x86_svmfunc.S revision 1.3
      1 /*	$NetBSD: nvmm_x86_svmfunc.S,v 1.3 2019/04/24 18:45:15 maxv Exp $	*/
      2 
      3 /*
      4  * Copyright (c) 2018 The NetBSD Foundation, Inc.
      5  * All rights reserved.
      6  *
      7  * This code is derived from software contributed to The NetBSD Foundation
      8  * by Maxime Villard.
      9  *
     10  * Redistribution and use in source and binary forms, with or without
     11  * modification, are permitted provided that the following conditions
     12  * are met:
     13  * 1. Redistributions of source code must retain the above copyright
     14  *    notice, this list of conditions and the following disclaimer.
     15  * 2. Redistributions in binary form must reproduce the above copyright
     16  *    notice, this list of conditions and the following disclaimer in the
     17  *    documentation and/or other materials provided with the distribution.
     18  *
     19  * THIS SOFTWARE IS PROVIDED BY THE NETBSD FOUNDATION, INC. AND CONTRIBUTORS
     20  * ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
     21  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
     22  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL THE FOUNDATION OR CONTRIBUTORS
     23  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
     24  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
     25  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
     26  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
     27  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
     28  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
     29  * POSSIBILITY OF SUCH DAMAGE.
     30  */
     31 
     32 /* Override user-land alignment before including asm.h */
     33 #define	ALIGN_DATA	.align	8
     34 #define ALIGN_TEXT	.align 16,0x90
     35 #define _ALIGN_TEXT	ALIGN_TEXT
     36 
     37 #define _LOCORE
     38 #include "assym.h"
     39 #include <machine/asm.h>
     40 #include <machine/segments.h>
     41 #include <x86/specialreg.h>
     42 
     43 #define ASM_NVMM
     44 #include <dev/nvmm/x86/nvmm_x86.h>
     45 
     46 	.text
     47 
     48 #define HOST_SAVE_GPRS		\
     49 	pushq	%rbx		;\
     50 	pushq	%rbp		;\
     51 	pushq	%r12		;\
     52 	pushq	%r13		;\
     53 	pushq	%r14		;\
     54 	pushq	%r15
     55 
     56 #define HOST_RESTORE_GPRS	\
     57 	popq	%r15		;\
     58 	popq	%r14		;\
     59 	popq	%r13		;\
     60 	popq	%r12		;\
     61 	popq	%rbp		;\
     62 	popq	%rbx
     63 
     64 #define HOST_SAVE_MSR(msr)	\
     65 	movq	$msr,%rcx	;\
     66 	rdmsr			;\
     67 	pushq	%rdx		;\
     68 	pushq	%rax
     69 
     70 #define HOST_RESTORE_MSR(msr)	\
     71 	popq	%rax		;\
     72 	popq	%rdx		;\
     73 	movq	$msr,%rcx	;\
     74 	wrmsr
     75 
     76 #define HOST_SAVE_TR		\
     77 	strw	%ax		;\
     78 	pushw	%ax
     79 
     80 #define HOST_RESTORE_TR				\
     81 	popw	%ax				;\
     82 	movzwq	%ax,%rdx			;\
     83 	movq	CPUVAR(GDT),%rax		;\
     84 	andq	$~0x0200,4(%rax,%rdx, 1)	;\
     85 	ltrw	%dx
     86 
     87 #define HOST_SAVE_LDT		\
     88 	sldtw	%ax		;\
     89 	pushw	%ax
     90 
     91 #define HOST_RESTORE_LDT	\
     92 	popw	%ax		;\
     93 	lldtw	%ax
     94 
     95 /*
     96  * All GPRs except RAX and RSP, which are taken care of in VMCB.
     97  */
     98 
     99 #define GUEST_SAVE_GPRS(reg)				\
    100 	movq	%rcx,(NVMM_X64_GPR_RCX * 8)(reg)	;\
    101 	movq	%rdx,(NVMM_X64_GPR_RDX * 8)(reg)	;\
    102 	movq	%rbx,(NVMM_X64_GPR_RBX * 8)(reg)	;\
    103 	movq	%rbp,(NVMM_X64_GPR_RBP * 8)(reg)	;\
    104 	movq	%rsi,(NVMM_X64_GPR_RSI * 8)(reg)	;\
    105 	movq	%rdi,(NVMM_X64_GPR_RDI * 8)(reg)	;\
    106 	movq	%r8,(NVMM_X64_GPR_R8 * 8)(reg)		;\
    107 	movq	%r9,(NVMM_X64_GPR_R9 * 8)(reg)		;\
    108 	movq	%r10,(NVMM_X64_GPR_R10 * 8)(reg)	;\
    109 	movq	%r11,(NVMM_X64_GPR_R11 * 8)(reg)	;\
    110 	movq	%r12,(NVMM_X64_GPR_R12 * 8)(reg)	;\
    111 	movq	%r13,(NVMM_X64_GPR_R13 * 8)(reg)	;\
    112 	movq	%r14,(NVMM_X64_GPR_R14 * 8)(reg)	;\
    113 	movq	%r15,(NVMM_X64_GPR_R15 * 8)(reg)
    114 
    115 #define GUEST_RESTORE_GPRS(reg)				\
    116 	movq	(NVMM_X64_GPR_RCX * 8)(reg),%rcx	;\
    117 	movq	(NVMM_X64_GPR_RDX * 8)(reg),%rdx	;\
    118 	movq	(NVMM_X64_GPR_RBX * 8)(reg),%rbx	;\
    119 	movq	(NVMM_X64_GPR_RBP * 8)(reg),%rbp	;\
    120 	movq	(NVMM_X64_GPR_RSI * 8)(reg),%rsi	;\
    121 	movq	(NVMM_X64_GPR_RDI * 8)(reg),%rdi	;\
    122 	movq	(NVMM_X64_GPR_R8 * 8)(reg),%r8		;\
    123 	movq	(NVMM_X64_GPR_R9 * 8)(reg),%r9		;\
    124 	movq	(NVMM_X64_GPR_R10 * 8)(reg),%r10	;\
    125 	movq	(NVMM_X64_GPR_R11 * 8)(reg),%r11	;\
    126 	movq	(NVMM_X64_GPR_R12 * 8)(reg),%r12	;\
    127 	movq	(NVMM_X64_GPR_R13 * 8)(reg),%r13	;\
    128 	movq	(NVMM_X64_GPR_R14 * 8)(reg),%r14	;\
    129 	movq	(NVMM_X64_GPR_R15 * 8)(reg),%r15
    130 
    131 /*
    132  * %rdi = PA of VMCB
    133  * %rsi = VA of guest GPR state
    134  */
    135 ENTRY(svm_vmrun)
    136 	/* Save the Host GPRs. */
    137 	HOST_SAVE_GPRS
    138 
    139 	/* Disable Host interrupts. */
    140 	clgi
    141 
    142 	/* Save the Host TR. */
    143 	HOST_SAVE_TR
    144 
    145 	/* Save the Host GSBASE. */
    146 	HOST_SAVE_MSR(MSR_GSBASE)
    147 
    148 	/* Reset DS and ES. */
    149 	movq	$GSEL(GUDATA_SEL, SEL_UPL),%rax
    150 	movw	%ax,%ds
    151 	movw	%ax,%es
    152 
    153 	/* Save the Host LDT. */
    154 	HOST_SAVE_LDT
    155 
    156 	/* Prepare RAX. */
    157 	pushq	%rsi
    158 	pushq	%rdi
    159 
    160 	/* Restore the Guest GPRs. */
    161 	movq	%rsi,%rax
    162 	GUEST_RESTORE_GPRS(%rax)
    163 
    164 	/* Set RAX. */
    165 	popq	%rax
    166 
    167 	/* Run the VM. */
    168 	vmload	%rax
    169 	vmrun	%rax
    170 	vmsave	%rax
    171 
    172 	/* Get RAX. */
    173 	popq	%rax
    174 
    175 	/* Save the Guest GPRs. */
    176 	GUEST_SAVE_GPRS(%rax)
    177 
    178 	/* Restore the Host LDT. */
    179 	HOST_RESTORE_LDT
    180 
    181 	/* Reset FS and GS. */
    182 	xorq	%rax,%rax
    183 	movw	%ax,%fs
    184 	movw	%ax,%gs
    185 
    186 	/* Restore the Host GSBASE. */
    187 	HOST_RESTORE_MSR(MSR_GSBASE)
    188 
    189 	/* Restore the Host TR. */
    190 	HOST_RESTORE_TR
    191 
    192 	/* Enable Host interrupts. */
    193 	stgi
    194 
    195 	/* Restore the Host GPRs. */
    196 	HOST_RESTORE_GPRS
    197 
    198 	xorq	%rax,%rax
    199 	retq
    200 END(svm_vmrun)
    201