1 /*-
2 * Copyright (c) 1993 The Regents of the University of California.
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 * 3. Neither the name of the University nor the names of its contributors
14 * may be used to endorse or promote products derived from this software
15 * without specific prior written permission.
16 *
17 * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
18 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
19 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
20 * ARE DISCLAIMED. IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
21 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
22 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
23 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
24 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
25 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
26 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
27 * SUCH DAMAGE.
28 *
29 * $FreeBSD$
30 */
31
32 #include <machine/asmacros.h>
33 #include <machine/cputypes.h>
34 #include <machine/pmap.h>
35 #include <machine/specialreg.h>
36
37 #include "assym.inc"
38
39 #define IDXSHIFT 10
40
41 .text
42
43 ENTRY(sse2_pagezero)
44 pushl %ebx
45 movl 8(%esp),%ecx
46 movl %ecx,%eax
47 addl $4096,%eax
48 xor %ebx,%ebx
49 jmp 1f
50 /*
51 * The loop takes 14 bytes. Ensure that it doesn't cross a 16-byte
52 * cache line.
53 */
54 .p2align 4,0x90
55 1:
56 movnti %ebx,(%ecx)
57 movnti %ebx,4(%ecx)
58 addl $8,%ecx
59 cmpl %ecx,%eax
60 jne 1b
61 sfence
62 popl %ebx
63 ret
64 END(sse2_pagezero)
65
66 ENTRY(i686_pagezero)
67 pushl %edi
68 pushl %ebx
69
70 movl 12(%esp),%edi
71 movl $1024,%ecx
72
73 ALIGN_TEXT
74 1:
75 xorl %eax,%eax
76 repe
77 scasl
78 jnz 2f
79
80 popl %ebx
81 popl %edi
82 ret
83
84 ALIGN_TEXT
85
86 2:
87 incl %ecx
88 subl $4,%edi
89
90 movl %ecx,%edx
91 cmpl $16,%ecx
92
93 jge 3f
94
95 movl %edi,%ebx
96 andl $0x3f,%ebx
97 shrl %ebx
98 shrl %ebx
99 movl $16,%ecx
100 subl %ebx,%ecx
101
102 3:
103 subl %ecx,%edx
104 rep
105 stosl
106
107 movl %edx,%ecx
108 testl %edx,%edx
109 jnz 1b
110
111 popl %ebx
112 popl %edi
113 ret
114 END(i686_pagezero)
115
116 /* fillw(pat, base, cnt) */
117 ENTRY(fillw)
118 pushl %edi
119 movl 8(%esp),%eax
120 movl 12(%esp),%edi
121 movl 16(%esp),%ecx
122 rep
123 stosw
124 popl %edi
125 ret
126 END(fillw)
127
128 /*
129 * memmove(dst, src, cnt) (return dst)
130 * ws@tools.de (Wolfgang Solfrank, TooLs GmbH) +49-228-985800
131 */
132 ENTRY(memmove)
133 pushl %ebp
134 movl %esp,%ebp
135 pushl %esi
136 pushl %edi
137 movl 8(%ebp),%edi
138 movl 12(%ebp),%esi
139 1:
140 movl 16(%ebp),%ecx
141
142 movl %edi,%eax
143 subl %esi,%eax
144 cmpl %ecx,%eax /* overlapping && src < dst? */
145 jb 1f
146
147 shrl $2,%ecx /* copy by 32-bit words */
148 rep
149 movsl
150 movl 16(%ebp),%ecx
151 andl $3,%ecx /* any bytes left? */
152 rep
153 movsb
154 popl %edi
155 popl %esi
156 movl 8(%ebp),%eax /* return dst for memmove */
157 popl %ebp
158 ret
159
160 ALIGN_TEXT
161 1:
162 addl %ecx,%edi /* copy backwards */
163 addl %ecx,%esi
164 decl %edi
165 decl %esi
166 andl $3,%ecx /* any fractional bytes? */
167 std
168 rep
169 movsb
170 movl 16(%ebp),%ecx /* copy remainder by 32-bit words */
171 shrl $2,%ecx
172 subl $3,%esi
173 subl $3,%edi
174 rep
175 movsl
176 popl %edi
177 popl %esi
178 cld
179 movl 8(%ebp),%eax /* return dst for memmove */
180 popl %ebp
181 ret
182 END(memmove)
183
184 /*
185 * Note: memcpy does not support overlapping copies
186 */
187 ENTRY(memcpy)
188 pushl %edi
189 pushl %esi
190 movl 12(%esp),%edi
191 movl 16(%esp),%esi
192 movl 20(%esp),%ecx
193 movl %edi,%eax
194 shrl $2,%ecx /* copy by 32-bit words */
195 rep
196 movsl
197 movl 20(%esp),%ecx
198 andl $3,%ecx /* any bytes left? */
199 rep
200 movsb
201 popl %esi
202 popl %edi
203 ret
204 END(memcpy)
205
206 /*
207 * Handling of special 386 registers and descriptor tables etc
208 */
209 /* void lgdt(struct region_descriptor *rdp); */
210 ENTRY(lgdt)
211 /* reload the descriptor table */
212 movl 4(%esp),%eax
213 lgdt (%eax)
214
215 /* flush the prefetch q */
216 jmp 1f
217 nop
218 1:
219 /* reload "stale" selectors */
220 movl $KDSEL,%eax
221 movl %eax,%ds
222 movl %eax,%es
223 movl %eax,%gs
224 movl %eax,%ss
225 movl $KPSEL,%eax
226 movl %eax,%fs
227
228 /* reload code selector by turning return into intersegmental return */
229 movl (%esp),%eax
230 pushl %eax
231 movl $KCSEL,4(%esp)
232 lret
233 END(lgdt)
234
235 /* ssdtosd(*ssdp,*sdp) */
236 ENTRY(ssdtosd)
237 pushl %ebx
238 movl 8(%esp),%ecx
239 movl 8(%ecx),%ebx
240 shll $16,%ebx
241 movl (%ecx),%edx
242 roll $16,%edx
243 movb %dh,%bl
244 movb %dl,%bh
245 rorl $8,%ebx
246 movl 4(%ecx),%eax
247 movw %ax,%dx
248 andl $0xf0000,%eax
249 orl %eax,%ebx
250 movl 12(%esp),%ecx
251 movl %edx,(%ecx)
252 movl %ebx,4(%ecx)
253 popl %ebx
254 ret
255 END(ssdtosd)
256
257 /* void reset_dbregs() */
258 ENTRY(reset_dbregs)
259 movl $0,%eax
260 movl %eax,%dr7 /* disable all breakpoints first */
261 movl %eax,%dr0
262 movl %eax,%dr1
263 movl %eax,%dr2
264 movl %eax,%dr3
265 movl %eax,%dr6
266 ret
267 END(reset_dbregs)
268
269 /*****************************************************************************/
270 /* setjump, longjump */
271 /*****************************************************************************/
272
273 ENTRY(setjmp)
274 movl 4(%esp),%eax
275 movl %ebx,(%eax) /* save ebx */
276 movl %esp,4(%eax) /* save esp */
277 movl %ebp,8(%eax) /* save ebp */
278 movl %esi,12(%eax) /* save esi */
279 movl %edi,16(%eax) /* save edi */
280 movl (%esp),%edx /* get rta */
281 movl %edx,20(%eax) /* save eip */
282 xorl %eax,%eax /* return(0); */
283 ret
284 END(setjmp)
285
286 ENTRY(longjmp)
287 movl 4(%esp),%eax
288 movl (%eax),%ebx /* restore ebx */
289 movl 4(%eax),%esp /* restore esp */
290 movl 8(%eax),%ebp /* restore ebp */
291 movl 12(%eax),%esi /* restore esi */
292 movl 16(%eax),%edi /* restore edi */
293 movl 20(%eax),%edx /* get rta */
294 movl %edx,(%esp) /* put in return frame */
295 xorl %eax,%eax /* return(1); */
296 incl %eax
297 ret
298 END(longjmp)
299
300 /*
301 * Support for reading MSRs in the safe manner. (Instead of panic on #gp,
302 * return an error.)
303 */
304 ENTRY(rdmsr_safe)
305 /* int rdmsr_safe(u_int msr, uint64_t *data) */
306 movl PCPU(CURPCB),%ecx
307 movl $msr_onfault,PCB_ONFAULT(%ecx)
308
309 movl 4(%esp),%ecx
310 rdmsr
311 movl 8(%esp),%ecx
312 movl %eax,(%ecx)
313 movl %edx,4(%ecx)
314 xorl %eax,%eax
315
316 movl PCPU(CURPCB),%ecx
317 movl %eax,PCB_ONFAULT(%ecx)
318
319 ret
320
321 /*
322 * Support for writing MSRs in the safe manner. (Instead of panic on #gp,
323 * return an error.)
324 */
325 ENTRY(wrmsr_safe)
326 /* int wrmsr_safe(u_int msr, uint64_t data) */
327 movl PCPU(CURPCB),%ecx
328 movl $msr_onfault,PCB_ONFAULT(%ecx)
329
330 movl 4(%esp),%ecx
331 movl 8(%esp),%eax
332 movl 12(%esp),%edx
333 wrmsr
334 xorl %eax,%eax
335
336 movl PCPU(CURPCB),%ecx
337 movl %eax,PCB_ONFAULT(%ecx)
338
339 ret
340
341 /*
342 * MSR operations fault handler
343 */
344 ALIGN_TEXT
345 msr_onfault:
346 movl PCPU(CURPCB),%ecx
347 movl $0,PCB_ONFAULT(%ecx)
348 movl $EFAULT,%eax
349 ret
350
351 .altmacro
352 .macro rsb_seq_label l
353 rsb_seq_\l:
354 .endm
355 .macro rsb_call_label l
356 call rsb_seq_\l
357 .endm
358 .macro rsb_seq count
359 ll=1
360 .rept \count
361 rsb_call_label %(ll)
362 nop
363 rsb_seq_label %(ll)
364 addl $4,%esp
365 ll=ll+1
366 .endr
367 .endm
368
369 ENTRY(rsb_flush)
370 rsb_seq 32
371 ret
372
373 ENTRY(handle_ibrs_entry)
374 cmpb $0,hw_ibrs_ibpb_active
375 je 1f
376 movl $MSR_IA32_SPEC_CTRL,%ecx
377 rdmsr
378 orl $(IA32_SPEC_CTRL_IBRS|IA32_SPEC_CTRL_STIBP),%eax
379 orl $(IA32_SPEC_CTRL_IBRS|IA32_SPEC_CTRL_STIBP)>>32,%edx
380 wrmsr
381 movb $1,PCPU(IBPB_SET)
382 /*
383 * i386 does not implement SMEP.
384 */
385 1: jmp rsb_flush
386 END(handle_ibrs_entry)
387
388 ENTRY(handle_ibrs_exit)
389 cmpb $0,PCPU(IBPB_SET)
390 je 1f
391 movl $MSR_IA32_SPEC_CTRL,%ecx
392 rdmsr
393 andl $~(IA32_SPEC_CTRL_IBRS|IA32_SPEC_CTRL_STIBP),%eax
394 andl $~((IA32_SPEC_CTRL_IBRS|IA32_SPEC_CTRL_STIBP)>>32),%edx
395 wrmsr
396 movb $0,PCPU(IBPB_SET)
397 1: ret
398 END(handle_ibrs_exit)
399
400 ENTRY(mds_handler_void)
401 ret
402 END(mds_handler_void)
403
404 ENTRY(mds_handler_verw)
405 subl $4, %esp
406 movw %ds, (%esp)
407 verw (%esp)
408 addl $4, %esp
409 ret
410 END(mds_handler_verw)
411
412 ENTRY(mds_handler_ivb)
413 movl %cr0, %eax
414 testb $CR0_TS, %al
415 je 1f
416 clts
417 1: movl PCPU(MDS_BUF), %edx
418 movdqa %xmm0, PCPU(MDS_TMP)
419 pxor %xmm0, %xmm0
420
421 lfence
422 orpd (%edx), %xmm0
423 orpd (%edx), %xmm0
424 mfence
425 movl $40, %ecx
426 addl $16, %edx
427 2: movntdq %xmm0, (%edx)
428 addl $16, %edx
429 decl %ecx
430 jnz 2b
431 mfence
432
433 movdqa PCPU(MDS_TMP),%xmm0
434 testb $CR0_TS, %al
435 je 3f
436 movl %eax, %cr0
437 3: ret
438 END(mds_handler_ivb)
439
440 ENTRY(mds_handler_bdw)
441 movl %cr0, %eax
442 testb $CR0_TS, %al
443 je 1f
444 clts
445 1: movl PCPU(MDS_BUF), %ebx
446 movdqa %xmm0, PCPU(MDS_TMP)
447 pxor %xmm0, %xmm0
448
449 movl %ebx, %edi
450 movl %ebx, %esi
451 movl $40, %ecx
452 2: movntdq %xmm0, (%ebx)
453 addl $16, %ebx
454 decl %ecx
455 jnz 2b
456 mfence
457 movl $1536, %ecx
458 rep; movsb
459 lfence
460
461 movdqa PCPU(MDS_TMP),%xmm0
462 testb $CR0_TS, %al
463 je 3f
464 movl %eax, %cr0
465 3: ret
466 END(mds_handler_bdw)
467
468 ENTRY(mds_handler_skl_sse)
469 movl %cr0, %eax
470 testb $CR0_TS, %al
471 je 1f
472 clts
473 1: movl PCPU(MDS_BUF), %edi
474 movl PCPU(MDS_BUF64), %edx
475 movdqa %xmm0, PCPU(MDS_TMP)
476 pxor %xmm0, %xmm0
477
478 lfence
479 orpd (%edx), %xmm0
480 orpd (%edx), %xmm0
481 xorl %eax, %eax
482 2: clflushopt 5376(%edi, %eax, 8)
483 addl $8, %eax
484 cmpl $8 * 12, %eax
485 jb 2b
486 sfence
487 movl $6144, %ecx
488 xorl %eax, %eax
489 rep; stosb
490 mfence
491
492 movdqa PCPU(MDS_TMP), %xmm0
493 testb $CR0_TS, %al
494 je 3f
495 movl %eax, %cr0
496 3: ret
497 END(mds_handler_skl_sse)
498
499 ENTRY(mds_handler_skl_avx)
500 movl %cr0, %eax
501 testb $CR0_TS, %al
502 je 1f
503 clts
504 1: movl PCPU(MDS_BUF), %edi
505 movl PCPU(MDS_BUF64), %edx
506 vmovdqa %ymm0, PCPU(MDS_TMP)
507 vpxor %ymm0, %ymm0, %ymm0
508
509 lfence
510 vorpd (%edx), %ymm0, %ymm0
511 vorpd (%edx), %ymm0, %ymm0
512 xorl %eax, %eax
513 2: clflushopt 5376(%edi, %eax, 8)
514 addl $8, %eax
515 cmpl $8 * 12, %eax
516 jb 2b
517 sfence
518 movl $6144, %ecx
519 xorl %eax, %eax
520 rep; stosb
521 mfence
522
523 vmovdqa PCPU(MDS_TMP), %ymm0
524 testb $CR0_TS, %al
525 je 3f
526 movl %eax, %cr0
527 3: ret
528 END(mds_handler_skl_avx)
529
530 ENTRY(mds_handler_skl_avx512)
531 movl %cr0, %eax
532 testb $CR0_TS, %al
533 je 1f
534 clts
535 1: movl PCPU(MDS_BUF), %edi
536 movl PCPU(MDS_BUF64), %edx
537 vmovdqa64 %zmm0, PCPU(MDS_TMP)
538 vpxord %zmm0, %zmm0, %zmm0
539
540 lfence
541 vorpd (%edx), %zmm0, %zmm0
542 vorpd (%edx), %zmm0, %zmm0
543 xorl %eax, %eax
544 2: clflushopt 5376(%edi, %eax, 8)
545 addl $8, %eax
546 cmpl $8 * 12, %eax
547 jb 2b
548 sfence
549 movl $6144, %ecx
550 xorl %eax, %eax
551 rep; stosb
552 mfence
553
554 vmovdqa64 PCPU(MDS_TMP), %zmm0
555 testb $CR0_TS, %al
556 je 3f
557 movl %eax, %cr0
558 3: ret
559 END(mds_handler_skl_avx512)
560
561 ENTRY(mds_handler_silvermont)
562 movl %cr0, %eax
563 testb $CR0_TS, %al
564 je 1f
565 clts
566 1: movl PCPU(MDS_BUF), %edx
567 movdqa %xmm0, PCPU(MDS_TMP)
568 pxor %xmm0, %xmm0
569
570 movl $16, %ecx
571 2: movntdq %xmm0, (%edx)
572 addl $16, %edx
573 decl %ecx
574 jnz 2b
575 mfence
576
577 movdqa PCPU(MDS_TMP),%xmm0
578 testb $CR0_TS, %al
579 je 3f
580 movl %eax, %cr0
581 3: ret
582 END(mds_handler_silvermont)
Cache object: 5cb6aa795b6f1aef1b958fdf89b0fc15
|