1 /*-
2 * SPDX-License-Identifier: BSD-2-Clause-FreeBSD
3 *
4 * Copyright (c) 2013, Anish Gupta (akgupt3@gmail.com)
5 * All rights reserved.
6 *
7 * Redistribution and use in source and binary forms, with or without
8 * modification, are permitted provided that the following conditions
9 * are met:
10 * 1. Redistributions of source code must retain the above copyright
11 * notice unmodified, this list of conditions, and the following
12 * disclaimer.
13 * 2. Redistributions in binary form must reproduce the above copyright
14 * notice, this list of conditions and the following disclaimer in the
15 * documentation and/or other materials provided with the distribution.
16 *
17 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
18 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
19 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
20 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
21 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
22 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
23 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
24 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
25 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
26 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
27 *
28 * $FreeBSD$
29 */
30 #include <machine/asmacros.h>
31
32 #include "svm_assym.h"
33
34 /*
35 * Be friendly to DTrace FBT's prologue/epilogue pattern matching.
36 *
37 * They are also responsible for saving/restoring the host %rbp across VMRUN.
38 */
39 #define VENTER push %rbp ; mov %rsp,%rbp
40 #define VLEAVE pop %rbp
41
42 /*
43 * svm_launch(uint64_t vmcb, struct svm_regctx *gctx, struct pcpu *pcpu)
44 * %rdi: physical address of VMCB
45 * %rsi: pointer to guest context
46 * %rdx: pointer to the pcpu data
47 */
48 ENTRY(svm_launch)
49 VENTER
50
51 /* save pointer to the pcpu data */
52 push %rdx
53
54 /*
55 * Host register state saved across a VMRUN.
56 *
57 * All "callee saved registers" except:
58 * %rsp: because it is preserved by the processor across VMRUN.
59 * %rbp: because it is saved/restored by the function prologue/epilogue.
60 */
61 push %rbx
62 push %r12
63 push %r13
64 push %r14
65 push %r15
66
67 /* Save the physical address of the VMCB in %rax */
68 movq %rdi, %rax
69
70 push %rsi /* push guest context pointer on the stack */
71
72 /*
73 * Restore guest state.
74 */
75 movq SCTX_R8(%rsi), %r8
76 movq SCTX_R9(%rsi), %r9
77 movq SCTX_R10(%rsi), %r10
78 movq SCTX_R11(%rsi), %r11
79 movq SCTX_R12(%rsi), %r12
80 movq SCTX_R13(%rsi), %r13
81 movq SCTX_R14(%rsi), %r14
82 movq SCTX_R15(%rsi), %r15
83 movq SCTX_RBP(%rsi), %rbp
84 movq SCTX_RBX(%rsi), %rbx
85 movq SCTX_RCX(%rsi), %rcx
86 movq SCTX_RDX(%rsi), %rdx
87 movq SCTX_RDI(%rsi), %rdi
88 movq SCTX_RSI(%rsi), %rsi /* %rsi must be restored last */
89
90 vmload %rax
91 vmrun %rax
92 vmsave %rax
93
94 pop %rax /* pop guest context pointer from the stack */
95
96 /*
97 * Save guest state.
98 */
99 movq %r8, SCTX_R8(%rax)
100 movq %r9, SCTX_R9(%rax)
101 movq %r10, SCTX_R10(%rax)
102 movq %r11, SCTX_R11(%rax)
103 movq %r12, SCTX_R12(%rax)
104 movq %r13, SCTX_R13(%rax)
105 movq %r14, SCTX_R14(%rax)
106 movq %r15, SCTX_R15(%rax)
107 movq %rbp, SCTX_RBP(%rax)
108 movq %rbx, SCTX_RBX(%rax)
109 movq %rcx, SCTX_RCX(%rax)
110 movq %rdx, SCTX_RDX(%rax)
111 movq %rdi, SCTX_RDI(%rax)
112 movq %rsi, SCTX_RSI(%rax)
113
114 /*
115 * To prevent malicious branch target predictions from
116 * affecting the host, overwrite all entries in the RSB upon
117 * exiting a guest.
118 */
119 mov $16, %ecx /* 16 iterations, two calls per loop */
120 mov %rsp, %rax
121 0: call 2f /* create an RSB entry. */
122 1: pause
123 call 1b /* capture rogue speculation. */
124 2: call 2f /* create an RSB entry. */
125 1: pause
126 call 1b /* capture rogue speculation. */
127 2: sub $1, %ecx
128 jnz 0b
129 mov %rax, %rsp
130
131 /* Restore host state */
132 pop %r15
133 pop %r14
134 pop %r13
135 pop %r12
136 pop %rbx
137
138 /* Restore %GS.base to point to the host's pcpu data */
139 pop %rdx
140 mov %edx, %eax
141 shr $32, %rdx
142 mov $MSR_GSBASE, %rcx
143 wrmsr
144
145 /*
146 * Clobber the remaining registers with guest contents so they
147 * can't be misused.
148 */
149 xor %rbp, %rbp
150 xor %rdi, %rdi
151 xor %rsi, %rsi
152 xor %r8, %r8
153 xor %r9, %r9
154 xor %r10, %r10
155 xor %r11, %r11
156
157 VLEAVE
158 ret
159 END(svm_launch)
Cache object: a3e03d9ac21d199e73b68afc36ca6ce6
|