The Design and Implementation of the FreeBSD Operating System, Second Edition
Now available: The Design and Implementation of the FreeBSD Operating System (Second Edition)


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]

FreeBSD/Linux Kernel Cross Reference
sys/amd64/amd64/support.S

Version: -  FREEBSD  -  FREEBSD-13-STABLE  -  FREEBSD-13-0  -  FREEBSD-12-STABLE  -  FREEBSD-12-0  -  FREEBSD-11-STABLE  -  FREEBSD-11-0  -  FREEBSD-10-STABLE  -  FREEBSD-10-0  -  FREEBSD-9-STABLE  -  FREEBSD-9-0  -  FREEBSD-8-STABLE  -  FREEBSD-8-0  -  FREEBSD-7-STABLE  -  FREEBSD-7-0  -  FREEBSD-6-STABLE  -  FREEBSD-6-0  -  FREEBSD-5-STABLE  -  FREEBSD-5-0  -  FREEBSD-4-STABLE  -  FREEBSD-3-STABLE  -  FREEBSD22  -  l41  -  OPENBSD  -  linux-2.6  -  MK84  -  PLAN9  -  xnu-8792 
SearchContext: -  none  -  3  -  10 

    1 /*-
    2  * Copyright (c) 2003 Peter Wemm.
    3  * Copyright (c) 1993 The Regents of the University of California.
    4  * All rights reserved.
    5  *
    6  * Redistribution and use in source and binary forms, with or without
    7  * modification, are permitted provided that the following conditions
    8  * are met:
    9  * 1. Redistributions of source code must retain the above copyright
   10  *    notice, this list of conditions and the following disclaimer.
   11  * 2. Redistributions in binary form must reproduce the above copyright
   12  *    notice, this list of conditions and the following disclaimer in the
   13  *    documentation and/or other materials provided with the distribution.
   14  * 3. Neither the name of the University nor the names of its contributors
   15  *    may be used to endorse or promote products derived from this software
   16  *    without specific prior written permission.
   17  *
   18  * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
   19  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
   20  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
   21  * ARE DISCLAIMED.  IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
   22  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
   23  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
   24  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
   25  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
   26  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
   27  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
   28  * SUCH DAMAGE.
   29  *
   30  * $FreeBSD: releng/12.0/sys/amd64/amd64/support.S 340688 2018-11-20 18:14:30Z mjg $
   31  */
   32 
   33 #include "opt_ddb.h"
   34 
   35 #include <machine/asmacros.h>
   36 #include <machine/specialreg.h>
   37 #include <machine/pmap.h>
   38 
   39 #include "assym.inc"
   40 
   41         .text
   42 
   43 /* Address: %rdi */
   44 ENTRY(pagezero_std)
   45         PUSH_FRAME_POINTER
   46         movq    $PAGE_SIZE/8,%rcx
   47         xorl    %eax,%eax
   48         rep
   49         stosq
   50         POP_FRAME_POINTER
   51         ret
   52 END(pagezero_std)
   53 
   54 ENTRY(pagezero_erms)
   55         PUSH_FRAME_POINTER
   56         movq    $PAGE_SIZE,%rcx
   57         xorl    %eax,%eax
   58         rep
   59         stosb
   60         POP_FRAME_POINTER
   61         ret
   62 END(pagezero_erms)
   63 
   64 /*
   65  * pagecopy(%rdi=from, %rsi=to)
   66  */
   67 ENTRY(pagecopy)
   68         PUSH_FRAME_POINTER
   69         movq    $PAGE_SIZE/8,%rcx
   70         movq    %rdi,%r9
   71         movq    %rsi,%rdi
   72         movq    %r9,%rsi
   73         rep
   74         movsq
   75         POP_FRAME_POINTER
   76         ret
   77 END(pagecopy)
   78 
   79 /* Address: %rdi */
   80 ENTRY(sse2_pagezero)
   81         PUSH_FRAME_POINTER
   82         movq    $-PAGE_SIZE,%rdx
   83         subq    %rdx,%rdi
   84         xorl    %eax,%eax
   85         jmp     1f
   86         /*
   87          * The loop takes 29 bytes.  Ensure that it doesn't cross a 32-byte
   88          * cache line.
   89          */
   90         .p2align 5,0x90
   91 1:
   92         movnti  %rax,(%rdi,%rdx)
   93         movnti  %rax,8(%rdi,%rdx)
   94         movnti  %rax,16(%rdi,%rdx)
   95         movnti  %rax,24(%rdi,%rdx)
   96         addq    $32,%rdx
   97         jne     1b
   98         sfence
   99         POP_FRAME_POINTER
  100         ret
  101 END(sse2_pagezero)
  102 
  103 /*
  104  * memcmpy(b1, b2, len)
  105  *         rdi,rsi,len
  106  */
  107 ENTRY(memcmp)
  108         PUSH_FRAME_POINTER
  109         cmpq    $16,%rdx
  110         jae     5f
  111 1:
  112         testq   %rdx,%rdx
  113         je      3f
  114         xorl    %ecx,%ecx
  115 2:
  116         movzbl  (%rdi,%rcx,1),%eax
  117         movzbl  (%rsi,%rcx,1),%r8d
  118         cmpb    %r8b,%al
  119         jne     4f
  120         addq    $1,%rcx
  121         cmpq    %rcx,%rdx
  122         jz      3f
  123         movzbl  (%rdi,%rcx,1),%eax
  124         movzbl  (%rsi,%rcx,1),%r8d
  125         cmpb    %r8b,%al
  126         jne     4f
  127         addq    $1,%rcx
  128         cmpq    %rcx,%rdx
  129         jz      3f
  130         movzbl  (%rdi,%rcx,1),%eax
  131         movzbl  (%rsi,%rcx,1),%r8d
  132         cmpb    %r8b,%al
  133         jne     4f
  134         addq    $1,%rcx
  135         cmpq    %rcx,%rdx
  136         jz      3f
  137         movzbl  (%rdi,%rcx,1),%eax
  138         movzbl  (%rsi,%rcx,1),%r8d
  139         cmpb    %r8b,%al
  140         jne     4f
  141         addq    $1,%rcx
  142         cmpq    %rcx,%rdx
  143         jne     2b
  144 3:
  145         xorl    %eax,%eax
  146         POP_FRAME_POINTER
  147         ret
  148 4:
  149         subl    %r8d,%eax
  150         POP_FRAME_POINTER
  151         ret
  152 5:
  153         cmpq    $32,%rdx
  154         jae     7f
  155 6:
  156         /*
  157          * 8 bytes
  158          */
  159         movq    (%rdi),%r8
  160         movq    (%rsi),%r9
  161         cmpq    %r8,%r9
  162         jne     1b
  163         leaq    8(%rdi),%rdi
  164         leaq    8(%rsi),%rsi
  165         subq    $8,%rdx
  166         cmpq    $8,%rdx
  167         jae     6b
  168         jl      1b
  169         jmp     3b
  170 7:
  171         /*
  172          * 32 bytes
  173          */
  174         movq    (%rsi),%r8
  175         movq    8(%rsi),%r9
  176         subq    (%rdi),%r8
  177         subq    8(%rdi),%r9
  178         or      %r8,%r9
  179         jnz     1b
  180 
  181         movq    16(%rsi),%r8
  182         movq    24(%rsi),%r9
  183         subq    16(%rdi),%r8
  184         subq    24(%rdi),%r9
  185         or      %r8,%r9
  186         jnz     1b
  187 
  188         leaq    32(%rdi),%rdi
  189         leaq    32(%rsi),%rsi
  190         subq    $32,%rdx
  191         cmpq    $32,%rdx
  192         jae     7b
  193         jnz     1b
  194         jmp     3b
  195 END(memcmp)
  196 
  197 /*
  198  * memmove(dst, src, cnt)
  199  *         rdi, rsi, rdx
  200  * Adapted from bcopy written by:
  201  *  ws@tools.de     (Wolfgang Solfrank, TooLs GmbH) +49-228-985800
  202  */
  203 
  204 /*
  205  * Register state at entry is supposed to be as follows:
  206  * rdi - destination
  207  * rsi - source
  208  * rdx - count
  209  *
  210  * The macro possibly clobbers the above and: rcx, r8.
  211  * It does not clobber rax, r10 nor r11.
  212  */
  213 .macro MEMMOVE erms overlap begin end
  214         \begin
  215 .if \overlap == 1
  216         movq    %rdi,%r8
  217         subq    %rsi,%r8
  218         cmpq    %rcx,%r8        /* overlapping && src < dst? */
  219         jb      2f
  220 .endif
  221 
  222         cmpq    $32,%rcx
  223         jb      1016f
  224 
  225         cmpq    $256,%rcx
  226         ja      1256f
  227 
  228 1032:
  229         movq    (%rsi),%rdx
  230         movq    %rdx,(%rdi)
  231         movq    8(%rsi),%rdx
  232         movq    %rdx,8(%rdi)
  233         movq    16(%rsi),%rdx
  234         movq    %rdx,16(%rdi)
  235         movq    24(%rsi),%rdx
  236         movq    %rdx,24(%rdi)
  237         leaq    32(%rsi),%rsi
  238         leaq    32(%rdi),%rdi
  239         subq    $32,%rcx
  240         cmpq    $32,%rcx
  241         jae     1032b
  242         cmpb    $0,%cl
  243         jne     1016f
  244         \end
  245         ret
  246         ALIGN_TEXT
  247 1016:
  248         cmpb    $16,%cl
  249         jl      1008f
  250         movq    (%rsi),%rdx
  251         movq    %rdx,(%rdi)
  252         movq    8(%rsi),%rdx
  253         movq    %rdx,8(%rdi)
  254         subb    $16,%cl
  255         jz      1000f
  256         leaq    16(%rsi),%rsi
  257         leaq    16(%rdi),%rdi
  258 1008:
  259         cmpb    $8,%cl
  260         jl      1004f
  261         movq    (%rsi),%rdx
  262         movq    %rdx,(%rdi)
  263         subb    $8,%cl
  264         jz      1000f
  265         leaq    8(%rsi),%rsi
  266         leaq    8(%rdi),%rdi
  267 1004:
  268         cmpb    $4,%cl
  269         jl      1002f
  270         movl    (%rsi),%edx
  271         movl    %edx,(%rdi)
  272         subb    $4,%cl
  273         jz      1000f
  274         leaq    4(%rsi),%rsi
  275         leaq    4(%rdi),%rdi
  276 1002:
  277         cmpb    $2,%cl
  278         jl      1001f
  279         movw    (%rsi),%dx
  280         movw    %dx,(%rdi)
  281         subb    $2,%cl
  282         jz      1000f
  283         leaq    2(%rsi),%rsi
  284         leaq    2(%rdi),%rdi
  285 1001:
  286         cmpb    $1,%cl
  287         jl      1000f
  288         movb    (%rsi),%dl
  289         movb    %dl,(%rdi)
  290 1000:
  291         \end
  292         ret
  293 
  294         ALIGN_TEXT
  295 1256:
  296 .if \erms == 1
  297         rep
  298         movsb
  299 .else
  300         shrq    $3,%rcx                         /* copy by 64-bit words */
  301         rep
  302         movsq
  303         movq    %rdx,%rcx
  304         andb    $7,%cl                         /* any bytes left? */
  305         jne     1004b
  306 .endif
  307         \end
  308         ret
  309 
  310 .if \overlap == 1
  311         /*
  312          * Copy backwards.
  313          */
  314         ALIGN_TEXT
  315 2:
  316         addq    %rcx,%rdi
  317         addq    %rcx,%rsi
  318 
  319         cmpq    $32,%rcx
  320         jb      2016f
  321 
  322         cmpq    $256,%rcx
  323         ja      2256f
  324 
  325 2032:
  326         movq    -8(%rsi),%rdx
  327         movq    %rdx,-8(%rdi)
  328         movq    -16(%rsi),%rdx
  329         movq    %rdx,-16(%rdi)
  330         movq    -24(%rsi),%rdx
  331         movq    %rdx,-24(%rdi)
  332         movq    -32(%rsi),%rdx
  333         movq    %rdx,-32(%rdi)
  334         leaq    -32(%rsi),%rsi
  335         leaq    -32(%rdi),%rdi
  336         subq    $32,%rcx
  337         cmpq    $32,%rcx
  338         jae     2032b
  339         cmpb    $0,%cl
  340         jne     2016f
  341         \end
  342         ret
  343         ALIGN_TEXT
  344 2016:
  345         cmpb    $16,%cl
  346         jl      2008f
  347         movq    -8(%rsi),%rdx
  348         movq    %rdx,-8(%rdi)
  349         movq    -16(%rsi),%rdx
  350         movq    %rdx,-16(%rdi)
  351         subb    $16,%cl
  352         jz      2000f
  353         leaq    -16(%rsi),%rsi
  354         leaq    -16(%rdi),%rdi
  355 2008:
  356         cmpb    $8,%cl
  357         jl      2004f
  358         movq    -8(%rsi),%rdx
  359         movq    %rdx,-8(%rdi)
  360         subb    $8,%cl
  361         jz      2000f
  362         leaq    -8(%rsi),%rsi
  363         leaq    -8(%rdi),%rdi
  364 2004:
  365         cmpb    $4,%cl
  366         jl      2002f
  367         movl    -4(%rsi),%edx
  368         movl    %edx,-4(%rdi)
  369         subb    $4,%cl
  370         jz      2000f
  371         leaq    -4(%rsi),%rsi
  372         leaq    -4(%rdi),%rdi
  373 2002:
  374         cmpb    $2,%cl
  375         jl      2001f
  376         movw    -2(%rsi),%dx
  377         movw    %dx,-2(%rdi)
  378         subb    $2,%cl
  379         jz      2000f
  380         leaq    -2(%rsi),%rsi
  381         leaq    -2(%rdi),%rdi
  382 2001:
  383         cmpb    $1,%cl
  384         jl      2000f
  385         movb    -1(%rsi),%dl
  386         movb    %dl,-1(%rdi)
  387 2000:
  388         \end
  389         ret
  390         ALIGN_TEXT
  391 2256:
  392         decq    %rdi
  393         decq    %rsi
  394         std
  395 .if \erms == 1
  396         rep
  397         movsb
  398 .else
  399         andq    $7,%rcx                         /* any fractional bytes? */
  400         je      3f
  401         rep
  402         movsb
  403 3:
  404         movq    %rdx,%rcx                       /* copy remainder by 32-bit words */
  405         shrq    $3,%rcx
  406         subq    $7,%rsi
  407         subq    $7,%rdi
  408         rep
  409         movsq
  410 .endif
  411         cld
  412         \end
  413         ret
  414 .endif
  415 .endm
  416 
  417 .macro MEMMOVE_BEGIN
  418         PUSH_FRAME_POINTER
  419         movq    %rdi,%rax
  420         movq    %rdx,%rcx
  421 .endm
  422 
  423 .macro MEMMOVE_END
  424         POP_FRAME_POINTER
  425 .endm
  426 
  427 ENTRY(memmove_std)
  428         MEMMOVE erms=0 overlap=1 begin=MEMMOVE_BEGIN end=MEMMOVE_END
  429 END(memmove_std)
  430 
  431 ENTRY(memmove_erms)
  432         MEMMOVE erms=1 overlap=1 begin=MEMMOVE_BEGIN end=MEMMOVE_END
  433 END(memmove_erms)
  434 
  435 /*
  436  * memcpy(dst, src, len)
  437  *        rdi, rsi, rdx
  438  *
  439  * Note: memcpy does not support overlapping copies
  440  */
  441 ENTRY(memcpy_std)
  442         MEMMOVE erms=0 overlap=0 begin=MEMMOVE_BEGIN end=MEMMOVE_END
  443 END(memcpy_std)
  444 
  445 ENTRY(memcpy_erms)
  446         MEMMOVE erms=1 overlap=0 begin=MEMMOVE_BEGIN end=MEMMOVE_END
  447 END(memcpy_erms)
  448 
  449 /*
  450  * memset(dst, c,   len)
  451  *        rdi, rsi, rdx
  452  */
  453 .macro MEMSET erms
  454         PUSH_FRAME_POINTER
  455         movq    %rdi,%rax
  456         movq    %rdx,%rcx
  457         movzbq  %sil,%r8
  458         movabs  $0x0101010101010101,%r10
  459         imulq   %r8,%r10
  460 
  461         cmpq    $32,%rcx
  462         jbe     101632f
  463 
  464         cmpq    $256,%rcx
  465         ja      1256f
  466 
  467 103200:
  468         movq    %r10,(%rdi)
  469         movq    %r10,8(%rdi)
  470         movq    %r10,16(%rdi)
  471         movq    %r10,24(%rdi)
  472         leaq    32(%rdi),%rdi
  473         subq    $32,%rcx
  474         cmpq    $32,%rcx
  475         ja      103200b
  476         cmpb    $16,%cl
  477         ja      201632f
  478         movq    %r10,-16(%rdi,%rcx)
  479         movq    %r10,-8(%rdi,%rcx)
  480         POP_FRAME_POINTER
  481         ret
  482         ALIGN_TEXT
  483 101632:
  484         cmpb    $16,%cl
  485         jl      100816f
  486 201632:
  487         movq    %r10,(%rdi)
  488         movq    %r10,8(%rdi)
  489         movq    %r10,-16(%rdi,%rcx)
  490         movq    %r10,-8(%rdi,%rcx)
  491         POP_FRAME_POINTER
  492         ret
  493         ALIGN_TEXT
  494 100816:
  495         cmpb    $8,%cl
  496         jl      100408f
  497         movq    %r10,(%rdi)
  498         movq    %r10,-8(%rdi,%rcx)
  499         POP_FRAME_POINTER
  500         ret
  501         ALIGN_TEXT
  502 100408:
  503         cmpb    $4,%cl
  504         jl      100204f
  505         movl    %r10d,(%rdi)
  506         movl    %r10d,-4(%rdi,%rcx)
  507         POP_FRAME_POINTER
  508         ret
  509         ALIGN_TEXT
  510 100204:
  511         cmpb    $2,%cl
  512         jl      100001f
  513         movw    %r10w,(%rdi)
  514         movw    %r10w,-2(%rdi,%rcx)
  515         POP_FRAME_POINTER
  516         ret
  517         ALIGN_TEXT
  518 100001:
  519         cmpb    $0,%cl
  520         je      100000f
  521         movb    %r10b,(%rdi)
  522 100000:
  523         POP_FRAME_POINTER
  524         ret
  525         ALIGN_TEXT
  526 1256:
  527         movq    %rdi,%r9
  528         movq    %r10,%rax
  529         testl   $15,%edi
  530         jnz     3f
  531 1:
  532 .if \erms == 1
  533         rep
  534         stosb
  535         movq    %r9,%rax
  536 .else
  537         movq    %rcx,%rdx
  538         shrq    $3,%rcx
  539         rep
  540         stosq
  541         movq    %r9,%rax
  542         andl    $7,%edx
  543         jnz     2f
  544         POP_FRAME_POINTER
  545         ret
  546 2:
  547         movq    %r10,-8(%rdi,%rdx)
  548 .endif
  549         POP_FRAME_POINTER
  550         ret
  551         ALIGN_TEXT
  552 3:
  553         movq    %r10,(%rdi)
  554         movq    %r10,8(%rdi)
  555         movq    %rdi,%r8
  556         andq    $15,%r8
  557         leaq    -16(%rcx,%r8),%rcx
  558         neg     %r8
  559         leaq    16(%rdi,%r8),%rdi
  560         jmp     1b
  561 .endm
  562 
  563 ENTRY(memset_std)
  564         MEMSET erms=0
  565 END(memset_std)
  566 
  567 ENTRY(memset_erms)
  568         MEMSET erms=1
  569 END(memset_erms)
  570 
  571 /* fillw(pat, base, cnt) */
  572 /*       %rdi,%rsi, %rdx */
  573 ENTRY(fillw)
  574         PUSH_FRAME_POINTER
  575         movq    %rdi,%rax
  576         movq    %rsi,%rdi
  577         movq    %rdx,%rcx
  578         rep
  579         stosw
  580         POP_FRAME_POINTER
  581         ret
  582 END(fillw)
  583 
  584 /*****************************************************************************/
  585 /* copyout and fubyte family                                                 */
  586 /*****************************************************************************/
  587 /*
  588  * Access user memory from inside the kernel. These routines should be
  589  * the only places that do this.
  590  *
  591  * These routines set curpcb->pcb_onfault for the time they execute. When a
  592  * protection violation occurs inside the functions, the trap handler
  593  * returns to *curpcb->pcb_onfault instead of the function.
  594  */
  595 
  596 .macro SMAP_DISABLE smap
  597 .if     \smap
  598         stac
  599 .endif
  600 .endm
  601 
  602 
  603 .macro SMAP_ENABLE smap
  604 .if     \smap
  605         clac
  606 .endif
  607 .endm
  608 
  609 .macro COPYINOUT_BEGIN
  610 .endm
  611 
  612 .macro COPYINOUT_END
  613         movq    %rax,PCB_ONFAULT(%r11)
  614         POP_FRAME_POINTER
  615 .endm
  616 
  617 .macro COPYINOUT_SMAP_END
  618         SMAP_ENABLE smap=1
  619         COPYINOUT_END
  620 .endm
  621 
  622 /*
  623  * copyout(from_kernel, to_user, len)
  624  *         %rdi,        %rsi,    %rdx
  625  */
  626 .macro  COPYOUT smap erms
  627         PUSH_FRAME_POINTER
  628         movq    PCPU(CURPCB),%r11
  629         movq    $copy_fault,PCB_ONFAULT(%r11)
  630 
  631         /*
  632          * Check explicitly for non-user addresses.  If 486 write protection
  633          * is being used, this check is essential because we are in kernel
  634          * mode so the h/w does not provide any protection against writing
  635          * kernel addresses.
  636          */
  637 
  638         /*
  639          * First, prevent address wrapping.
  640          */
  641         movq    %rsi,%rax
  642         addq    %rdx,%rax
  643         jc      copy_fault
  644 /*
  645  * XXX STOP USING VM_MAXUSER_ADDRESS.
  646  * It is an end address, not a max, so every time it is used correctly it
  647  * looks like there is an off by one error, and of course it caused an off
  648  * by one error in several places.
  649  */
  650         movq    $VM_MAXUSER_ADDRESS,%rcx
  651         cmpq    %rcx,%rax
  652         ja      copy_fault
  653 
  654         /*
  655          * Set return value to zero. Remaining failure mode goes through
  656          * copy_fault.
  657          */
  658         xorl    %eax,%eax
  659 
  660         /*
  661          * Set up arguments for MEMMOVE.
  662          */
  663         movq    %rdi,%r8
  664         movq    %rsi,%rdi
  665         movq    %r8,%rsi
  666         movq    %rdx,%rcx
  667 
  668 
  669         SMAP_DISABLE \smap
  670 .if     \smap == 1
  671         MEMMOVE erms=\erms overlap=0 begin=COPYINOUT_BEGIN end=COPYINOUT_SMAP_END
  672 .else
  673         MEMMOVE erms=\erms overlap=0 begin=COPYINOUT_BEGIN end=COPYINOUT_END
  674 .endif
  675         /* NOTREACHED */
  676 .endm
  677 
  678 ENTRY(copyout_nosmap_std)
  679         COPYOUT smap=0 erms=0
  680 END(copyout_nosmap_std)
  681 
  682 ENTRY(copyout_smap_std)
  683         COPYOUT smap=1 erms=0
  684 END(copyout_smap_std)
  685 
  686 ENTRY(copyout_nosmap_erms)
  687         COPYOUT smap=0 erms=1
  688 END(copyout_nosmap_erms)
  689 
  690 ENTRY(copyout_smap_erms)
  691         COPYOUT smap=1 erms=1
  692 END(copyout_smap_erms)
  693 
  694 /*
  695  * copyin(from_user, to_kernel, len)
  696  *        %rdi,      %rsi,      %rdx
  697  */
  698 .macro  COPYIN smap erms
  699         PUSH_FRAME_POINTER
  700         movq    PCPU(CURPCB),%r11
  701         movq    $copy_fault,PCB_ONFAULT(%r11)
  702 
  703         /*
  704          * make sure address is valid
  705          */
  706         movq    %rdi,%rax
  707         addq    %rdx,%rax
  708         jc      copy_fault
  709         movq    $VM_MAXUSER_ADDRESS,%rcx
  710         cmpq    %rcx,%rax
  711         ja      copy_fault
  712 
  713         xorl    %eax,%eax
  714 
  715         movq    %rdi,%r8
  716         movq    %rsi,%rdi
  717         movq    %r8,%rsi
  718         movq    %rdx,%rcx
  719 
  720         SMAP_DISABLE \smap
  721 .if     \smap == 1
  722         MEMMOVE erms=\erms overlap=0 begin=COPYINOUT_BEGIN end=COPYINOUT_SMAP_END
  723 .else
  724         MEMMOVE erms=\erms overlap=0 begin=COPYINOUT_BEGIN end=COPYINOUT_END
  725 .endif
  726         /* NOTREACHED */
  727 .endm
  728 
  729 ENTRY(copyin_nosmap_std)
  730         COPYIN smap=0 erms=0
  731 END(copyin_nosmap_std)
  732 
  733 ENTRY(copyin_smap_std)
  734         COPYIN smap=1 erms=0
  735 END(copyin_smap_std)
  736 
  737 ENTRY(copyin_nosmap_erms)
  738         COPYIN smap=0 erms=1
  739 END(copyin_nosmap_erms)
  740 
  741 ENTRY(copyin_smap_erms)
  742         COPYIN smap=1 erms=1
  743 END(copyin_smap_erms)
  744 
  745         ALIGN_TEXT
  746         /* Trap entry clears PSL.AC */
  747 copy_fault:
  748         movq    $0,PCB_ONFAULT(%r11)
  749         movl    $EFAULT,%eax
  750         POP_FRAME_POINTER
  751         ret
  752 
  753 /*
  754  * casueword32.  Compare and set user integer.  Returns -1 on fault,
  755  *        0 if access was successful.  Old value is written to *oldp.
  756  *        dst = %rdi, old = %esi, oldp = %rdx, new = %ecx
  757  */
  758 ENTRY(casueword32_nosmap)
  759         PUSH_FRAME_POINTER
  760         movq    PCPU(CURPCB),%r8
  761         movq    $fusufault,PCB_ONFAULT(%r8)
  762 
  763         movq    $VM_MAXUSER_ADDRESS-4,%rax
  764         cmpq    %rax,%rdi                       /* verify address is valid */
  765         ja      fusufault
  766 
  767         movl    %esi,%eax                       /* old */
  768 #ifdef SMP
  769         lock
  770 #endif
  771         cmpxchgl %ecx,(%rdi)                    /* new = %ecx */
  772 
  773         /*
  774          * The old value is in %eax.  If the store succeeded it will be the
  775          * value we expected (old) from before the store, otherwise it will
  776          * be the current value.  Save %eax into %esi to prepare the return
  777          * value.
  778          */
  779         movl    %eax,%esi
  780         xorl    %eax,%eax
  781         movq    %rax,PCB_ONFAULT(%r8)
  782 
  783         /*
  784          * Access the oldp after the pcb_onfault is cleared, to correctly
  785          * catch corrupted pointer.
  786          */
  787         movl    %esi,(%rdx)                     /* oldp = %rdx */
  788         POP_FRAME_POINTER
  789         ret
  790 END(casueword32_nosmap)
  791 
  792 ENTRY(casueword32_smap)
  793         PUSH_FRAME_POINTER
  794         movq    PCPU(CURPCB),%r8
  795         movq    $fusufault,PCB_ONFAULT(%r8)
  796 
  797         movq    $VM_MAXUSER_ADDRESS-4,%rax
  798         cmpq    %rax,%rdi                       /* verify address is valid */
  799         ja      fusufault
  800 
  801         movl    %esi,%eax                       /* old */
  802         stac
  803 #ifdef SMP
  804         lock
  805 #endif
  806         cmpxchgl %ecx,(%rdi)                    /* new = %ecx */
  807         clac
  808 
  809         /*
  810          * The old value is in %eax.  If the store succeeded it will be the
  811          * value we expected (old) from before the store, otherwise it will
  812          * be the current value.  Save %eax into %esi to prepare the return
  813          * value.
  814          */
  815         movl    %eax,%esi
  816         xorl    %eax,%eax
  817         movq    %rax,PCB_ONFAULT(%r8)
  818 
  819         /*
  820          * Access the oldp after the pcb_onfault is cleared, to correctly
  821          * catch corrupted pointer.
  822          */
  823         movl    %esi,(%rdx)                     /* oldp = %rdx */
  824         POP_FRAME_POINTER
  825         ret
  826 END(casueword32_smap)
  827 
  828 /*
  829  * casueword.  Compare and set user long.  Returns -1 on fault,
  830  *        0 if access was successful.  Old value is written to *oldp.
  831  *        dst = %rdi, old = %rsi, oldp = %rdx, new = %rcx
  832  */
  833 ENTRY(casueword_nosmap)
  834         PUSH_FRAME_POINTER
  835         movq    PCPU(CURPCB),%r8
  836         movq    $fusufault,PCB_ONFAULT(%r8)
  837 
  838         movq    $VM_MAXUSER_ADDRESS-4,%rax
  839         cmpq    %rax,%rdi                       /* verify address is valid */
  840         ja      fusufault
  841 
  842         movq    %rsi,%rax                       /* old */
  843 #ifdef SMP
  844         lock
  845 #endif
  846         cmpxchgq %rcx,(%rdi)                    /* new = %rcx */
  847 
  848         /*
  849          * The old value is in %rax.  If the store succeeded it will be the
  850          * value we expected (old) from before the store, otherwise it will
  851          * be the current value.
  852          */
  853         movq    %rax,%rsi
  854         xorl    %eax,%eax
  855         movq    %rax,PCB_ONFAULT(%r8)
  856         movq    %rsi,(%rdx)
  857         POP_FRAME_POINTER
  858         ret
  859 END(casueword_nosmap)
  860 
  861 ENTRY(casueword_smap)
  862         PUSH_FRAME_POINTER
  863         movq    PCPU(CURPCB),%r8
  864         movq    $fusufault,PCB_ONFAULT(%r8)
  865 
  866         movq    $VM_MAXUSER_ADDRESS-4,%rax
  867         cmpq    %rax,%rdi                       /* verify address is valid */
  868         ja      fusufault
  869 
  870         movq    %rsi,%rax                       /* old */
  871         stac
  872 #ifdef SMP
  873         lock
  874 #endif
  875         cmpxchgq %rcx,(%rdi)                    /* new = %rcx */
  876         clac
  877 
  878         /*
  879          * The old value is in %rax.  If the store succeeded it will be the
  880          * value we expected (old) from before the store, otherwise it will
  881          * be the current value.
  882          */
  883         movq    %rax,%rsi
  884         xorl    %eax,%eax
  885         movq    %rax,PCB_ONFAULT(%r8)
  886         movq    %rsi,(%rdx)
  887         POP_FRAME_POINTER
  888         ret
  889 END(casueword_smap)
  890 
  891 /*
  892  * Fetch (load) a 64-bit word, a 32-bit word, a 16-bit word, or an 8-bit
  893  * byte from user memory.
  894  * addr = %rdi, valp = %rsi
  895  */
  896 
  897 ENTRY(fueword_nosmap)
  898         PUSH_FRAME_POINTER
  899         movq    PCPU(CURPCB),%rcx
  900         movq    $fusufault,PCB_ONFAULT(%rcx)
  901 
  902         movq    $VM_MAXUSER_ADDRESS-8,%rax
  903         cmpq    %rax,%rdi                       /* verify address is valid */
  904         ja      fusufault
  905 
  906         xorl    %eax,%eax
  907         movq    (%rdi),%r11
  908         movq    %rax,PCB_ONFAULT(%rcx)
  909         movq    %r11,(%rsi)
  910         POP_FRAME_POINTER
  911         ret
  912 END(fueword_nosmap)
  913 
  914 ENTRY(fueword_smap)
  915         PUSH_FRAME_POINTER
  916         movq    PCPU(CURPCB),%rcx
  917         movq    $fusufault,PCB_ONFAULT(%rcx)
  918 
  919         movq    $VM_MAXUSER_ADDRESS-8,%rax
  920         cmpq    %rax,%rdi                       /* verify address is valid */
  921         ja      fusufault
  922 
  923         xorl    %eax,%eax
  924         stac
  925         movq    (%rdi),%r11
  926         clac
  927         movq    %rax,PCB_ONFAULT(%rcx)
  928         movq    %r11,(%rsi)
  929         POP_FRAME_POINTER
  930         ret
  931 END(fueword_smap)
  932 
  933 ENTRY(fueword32_nosmap)
  934         PUSH_FRAME_POINTER
  935         movq    PCPU(CURPCB),%rcx
  936         movq    $fusufault,PCB_ONFAULT(%rcx)
  937 
  938         movq    $VM_MAXUSER_ADDRESS-4,%rax
  939         cmpq    %rax,%rdi                       /* verify address is valid */
  940         ja      fusufault
  941 
  942         xorl    %eax,%eax
  943         movl    (%rdi),%r11d
  944         movq    %rax,PCB_ONFAULT(%rcx)
  945         movl    %r11d,(%rsi)
  946         POP_FRAME_POINTER
  947         ret
  948 END(fueword32_nosmap)
  949 
  950 ENTRY(fueword32_smap)
  951         PUSH_FRAME_POINTER
  952         movq    PCPU(CURPCB),%rcx
  953         movq    $fusufault,PCB_ONFAULT(%rcx)
  954 
  955         movq    $VM_MAXUSER_ADDRESS-4,%rax
  956         cmpq    %rax,%rdi                       /* verify address is valid */
  957         ja      fusufault
  958 
  959         xorl    %eax,%eax
  960         stac
  961         movl    (%rdi),%r11d
  962         clac
  963         movq    %rax,PCB_ONFAULT(%rcx)
  964         movl    %r11d,(%rsi)
  965         POP_FRAME_POINTER
  966         ret
  967 END(fueword32_smap)
  968 
  969 ENTRY(fuword16_nosmap)
  970         PUSH_FRAME_POINTER
  971         movq    PCPU(CURPCB),%rcx
  972         movq    $fusufault,PCB_ONFAULT(%rcx)
  973 
  974         movq    $VM_MAXUSER_ADDRESS-2,%rax
  975         cmpq    %rax,%rdi
  976         ja      fusufault
  977 
  978         movzwl  (%rdi),%eax
  979         movq    $0,PCB_ONFAULT(%rcx)
  980         POP_FRAME_POINTER
  981         ret
  982 END(fuword16_nosmap)
  983 
  984 ENTRY(fuword16_smap)
  985         PUSH_FRAME_POINTER
  986         movq    PCPU(CURPCB),%rcx
  987         movq    $fusufault,PCB_ONFAULT(%rcx)
  988 
  989         movq    $VM_MAXUSER_ADDRESS-2,%rax
  990         cmpq    %rax,%rdi
  991         ja      fusufault
  992 
  993         stac
  994         movzwl  (%rdi),%eax
  995         clac
  996         movq    $0,PCB_ONFAULT(%rcx)
  997         POP_FRAME_POINTER
  998         ret
  999 END(fuword16_smap)
 1000 
 1001 ENTRY(fubyte_nosmap)
 1002         PUSH_FRAME_POINTER
 1003         movq    PCPU(CURPCB),%rcx
 1004         movq    $fusufault,PCB_ONFAULT(%rcx)
 1005 
 1006         movq    $VM_MAXUSER_ADDRESS-1,%rax
 1007         cmpq    %rax,%rdi
 1008         ja      fusufault
 1009 
 1010         movzbl  (%rdi),%eax
 1011         movq    $0,PCB_ONFAULT(%rcx)
 1012         POP_FRAME_POINTER
 1013         ret
 1014 END(fubyte_nosmap)
 1015 
 1016 ENTRY(fubyte_smap)
 1017         PUSH_FRAME_POINTER
 1018         movq    PCPU(CURPCB),%rcx
 1019         movq    $fusufault,PCB_ONFAULT(%rcx)
 1020 
 1021         movq    $VM_MAXUSER_ADDRESS-1,%rax
 1022         cmpq    %rax,%rdi
 1023         ja      fusufault
 1024 
 1025         stac
 1026         movzbl  (%rdi),%eax
 1027         clac
 1028         movq    $0,PCB_ONFAULT(%rcx)
 1029         POP_FRAME_POINTER
 1030         ret
 1031 END(fubyte_smap)
 1032 
 1033 /*
 1034  * Store a 64-bit word, a 32-bit word, a 16-bit word, or an 8-bit byte to
 1035  * user memory.
 1036  * addr = %rdi, value = %rsi
 1037  */
 1038 ENTRY(suword_nosmap)
 1039         PUSH_FRAME_POINTER
 1040         movq    PCPU(CURPCB),%rcx
 1041         movq    $fusufault,PCB_ONFAULT(%rcx)
 1042 
 1043         movq    $VM_MAXUSER_ADDRESS-8,%rax
 1044         cmpq    %rax,%rdi                       /* verify address validity */
 1045         ja      fusufault
 1046 
 1047         movq    %rsi,(%rdi)
 1048         xorl    %eax,%eax
 1049         movq    PCPU(CURPCB),%rcx
 1050         movq    %rax,PCB_ONFAULT(%rcx)
 1051         POP_FRAME_POINTER
 1052         ret
 1053 END(suword_nosmap)
 1054 
 1055 ENTRY(suword_smap)
 1056         PUSH_FRAME_POINTER
 1057         movq    PCPU(CURPCB),%rcx
 1058         movq    $fusufault,PCB_ONFAULT(%rcx)
 1059 
 1060         movq    $VM_MAXUSER_ADDRESS-8,%rax
 1061         cmpq    %rax,%rdi                       /* verify address validity */
 1062         ja      fusufault
 1063 
 1064         stac
 1065         movq    %rsi,(%rdi)
 1066         clac
 1067         xorl    %eax,%eax
 1068         movq    PCPU(CURPCB),%rcx
 1069         movq    %rax,PCB_ONFAULT(%rcx)
 1070         POP_FRAME_POINTER
 1071         ret
 1072 END(suword_smap)
 1073 
 1074 ENTRY(suword32_nosmap)
 1075         PUSH_FRAME_POINTER
 1076         movq    PCPU(CURPCB),%rcx
 1077         movq    $fusufault,PCB_ONFAULT(%rcx)
 1078 
 1079         movq    $VM_MAXUSER_ADDRESS-4,%rax
 1080         cmpq    %rax,%rdi                       /* verify address validity */
 1081         ja      fusufault
 1082 
 1083         movl    %esi,(%rdi)
 1084         xorl    %eax,%eax
 1085         movq    PCPU(CURPCB),%rcx
 1086         movq    %rax,PCB_ONFAULT(%rcx)
 1087         POP_FRAME_POINTER
 1088         ret
 1089 END(suword32_nosmap)
 1090 
 1091 ENTRY(suword32_smap)
 1092         PUSH_FRAME_POINTER
 1093         movq    PCPU(CURPCB),%rcx
 1094         movq    $fusufault,PCB_ONFAULT(%rcx)
 1095 
 1096         movq    $VM_MAXUSER_ADDRESS-4,%rax
 1097         cmpq    %rax,%rdi                       /* verify address validity */
 1098         ja      fusufault
 1099 
 1100         stac
 1101         movl    %esi,(%rdi)
 1102         clac
 1103         xorl    %eax,%eax
 1104         movq    PCPU(CURPCB),%rcx
 1105         movq    %rax,PCB_ONFAULT(%rcx)
 1106         POP_FRAME_POINTER
 1107         ret
 1108 END(suword32_smap)
 1109 
 1110 ENTRY(suword16_nosmap)
 1111         PUSH_FRAME_POINTER
 1112         movq    PCPU(CURPCB),%rcx
 1113         movq    $fusufault,PCB_ONFAULT(%rcx)
 1114 
 1115         movq    $VM_MAXUSER_ADDRESS-2,%rax
 1116         cmpq    %rax,%rdi                       /* verify address validity */
 1117         ja      fusufault
 1118 
 1119         movw    %si,(%rdi)
 1120         xorl    %eax,%eax
 1121         movq    PCPU(CURPCB),%rcx               /* restore trashed register */
 1122         movq    %rax,PCB_ONFAULT(%rcx)
 1123         POP_FRAME_POINTER
 1124         ret
 1125 END(suword16_nosmap)
 1126 
 1127 ENTRY(suword16_smap)
 1128         PUSH_FRAME_POINTER
 1129         movq    PCPU(CURPCB),%rcx
 1130         movq    $fusufault,PCB_ONFAULT(%rcx)
 1131 
 1132         movq    $VM_MAXUSER_ADDRESS-2,%rax
 1133         cmpq    %rax,%rdi                       /* verify address validity */
 1134         ja      fusufault
 1135 
 1136         stac
 1137         movw    %si,(%rdi)
 1138         clac
 1139         xorl    %eax,%eax
 1140         movq    PCPU(CURPCB),%rcx               /* restore trashed register */
 1141         movq    %rax,PCB_ONFAULT(%rcx)
 1142         POP_FRAME_POINTER
 1143         ret
 1144 END(suword16_smap)
 1145 
 1146 ENTRY(subyte_nosmap)
 1147         PUSH_FRAME_POINTER
 1148         movq    PCPU(CURPCB),%rcx
 1149         movq    $fusufault,PCB_ONFAULT(%rcx)
 1150 
 1151         movq    $VM_MAXUSER_ADDRESS-1,%rax
 1152         cmpq    %rax,%rdi                       /* verify address validity */
 1153         ja      fusufault
 1154 
 1155         movl    %esi,%eax
 1156         movb    %al,(%rdi)
 1157         xorl    %eax,%eax
 1158         movq    PCPU(CURPCB),%rcx               /* restore trashed register */
 1159         movq    %rax,PCB_ONFAULT(%rcx)
 1160         POP_FRAME_POINTER
 1161         ret
 1162 END(subyte_nosmap)
 1163 
 1164 ENTRY(subyte_smap)
 1165         PUSH_FRAME_POINTER
 1166         movq    PCPU(CURPCB),%rcx
 1167         movq    $fusufault,PCB_ONFAULT(%rcx)
 1168 
 1169         movq    $VM_MAXUSER_ADDRESS-1,%rax
 1170         cmpq    %rax,%rdi                       /* verify address validity */
 1171         ja      fusufault
 1172 
 1173         movl    %esi,%eax
 1174         stac
 1175         movb    %al,(%rdi)
 1176         clac
 1177         xorl    %eax,%eax
 1178         movq    PCPU(CURPCB),%rcx               /* restore trashed register */
 1179         movq    %rax,PCB_ONFAULT(%rcx)
 1180         POP_FRAME_POINTER
 1181         ret
 1182 END(subyte_smap)
 1183 
 1184         ALIGN_TEXT
 1185         /* Fault entry clears PSL.AC */
 1186 fusufault:
 1187         movq    PCPU(CURPCB),%rcx
 1188         xorl    %eax,%eax
 1189         movq    %rax,PCB_ONFAULT(%rcx)
 1190         decq    %rax
 1191         POP_FRAME_POINTER
 1192         ret
 1193 
 1194 /*
 1195  * copyinstr(from, to, maxlen, int *lencopied)
 1196  *           %rdi, %rsi, %rdx, %rcx
 1197  *
 1198  *      copy a string from 'from' to 'to', stop when a 0 character is reached.
 1199  *      return ENAMETOOLONG if string is longer than maxlen, and
 1200  *      EFAULT on protection violations. If lencopied is non-zero,
 1201  *      return the actual length in *lencopied.
 1202  */
 1203 .macro COPYINSTR smap
 1204         PUSH_FRAME_POINTER
 1205         movq    %rdx,%r8                        /* %r8 = maxlen */
 1206         movq    PCPU(CURPCB),%r9
 1207         movq    $cpystrflt,PCB_ONFAULT(%r9)
 1208 
 1209         movq    $VM_MAXUSER_ADDRESS,%rax
 1210 
 1211         /* make sure 'from' is within bounds */
 1212         subq    %rdi,%rax
 1213         jbe     cpystrflt
 1214 
 1215         SMAP_DISABLE \smap
 1216 
 1217         /* restrict maxlen to <= VM_MAXUSER_ADDRESS-from */
 1218         cmpq    %rdx,%rax
 1219         jb      8f
 1220 1:
 1221         incq    %rdx
 1222 2:
 1223         decq    %rdx
 1224 .if \smap == 0
 1225         jz      copyinstr_toolong
 1226 .else
 1227         jz      copyinstr_toolong_smap
 1228 .endif
 1229 
 1230         movb    (%rdi),%al
 1231         movb    %al,(%rsi)
 1232         incq    %rsi
 1233         incq    %rdi
 1234         testb   %al,%al
 1235         jnz     2b
 1236 
 1237         SMAP_ENABLE \smap
 1238 
 1239         /* Success -- 0 byte reached */
 1240         decq    %rdx
 1241         xorl    %eax,%eax
 1242 
 1243         /* set *lencopied and return %eax */
 1244         movq    %rax,PCB_ONFAULT(%r9)
 1245 
 1246         testq   %rcx,%rcx
 1247         jz      3f
 1248         subq    %rdx,%r8
 1249         movq    %r8,(%rcx)
 1250 3:
 1251         POP_FRAME_POINTER
 1252         ret
 1253         ALIGN_TEXT
 1254 8:
 1255         movq    %rax,%rdx
 1256         movq    %rax,%r8
 1257         jmp 1b
 1258 
 1259 .endm
 1260 
 1261 ENTRY(copyinstr_nosmap)
 1262         COPYINSTR smap=0
 1263 END(copyinstr_nosmap)
 1264 
 1265 ENTRY(copyinstr_smap)
 1266         COPYINSTR smap=1
 1267 END(copyinstr_smap)
 1268 
 1269 cpystrflt:
 1270         /* Fault entry clears PSL.AC */
 1271         movl    $EFAULT,%eax
 1272 cpystrflt_x:
 1273         /* set *lencopied and return %eax */
 1274         movq    $0,PCB_ONFAULT(%r9)
 1275 
 1276         testq   %rcx,%rcx
 1277         jz      1f
 1278         subq    %rdx,%r8
 1279         movq    %r8,(%rcx)
 1280 1:
 1281         POP_FRAME_POINTER
 1282         ret
 1283 
 1284 copyinstr_toolong_smap:
 1285         clac
 1286 copyinstr_toolong:
 1287         /* rdx is zero - return ENAMETOOLONG or EFAULT */
 1288         movq    $VM_MAXUSER_ADDRESS,%rax
 1289         cmpq    %rax,%rdi
 1290         jae     cpystrflt
 1291         movl    $ENAMETOOLONG,%eax
 1292         jmp     cpystrflt_x
 1293 
 1294 /*
 1295  * copystr(from, to, maxlen, int *lencopied)
 1296  *         %rdi, %rsi, %rdx, %rcx
 1297  */
 1298 ENTRY(copystr)
 1299         PUSH_FRAME_POINTER
 1300         movq    %rdx,%r8                        /* %r8 = maxlen */
 1301 
 1302         incq    %rdx
 1303 1:
 1304         decq    %rdx
 1305         jz      4f
 1306         movb    (%rdi),%al
 1307         movb    %al,(%rsi)
 1308         incq    %rsi
 1309         incq    %rdi
 1310         testb   %al,%al
 1311         jnz     1b
 1312 
 1313         /* Success -- 0 byte reached */
 1314         decq    %rdx
 1315         xorl    %eax,%eax
 1316 2:
 1317         testq   %rcx,%rcx
 1318         jz      3f
 1319         /* set *lencopied and return %rax */
 1320         subq    %rdx,%r8
 1321         movq    %r8,(%rcx)
 1322 3:
 1323         POP_FRAME_POINTER
 1324         ret
 1325 4:
 1326         /* rdx is zero -- return ENAMETOOLONG */
 1327         movl    $ENAMETOOLONG,%eax
 1328         jmp     2b
 1329 END(copystr)
 1330 
 1331 /*
 1332  * Handling of special amd64 registers and descriptor tables etc
 1333  */
 1334 /* void lgdt(struct region_descriptor *rdp); */
 1335 ENTRY(lgdt)
 1336         /* reload the descriptor table */
 1337         lgdt    (%rdi)
 1338 
 1339         /* flush the prefetch q */
 1340         jmp     1f
 1341         nop
 1342 1:
 1343         movl    $KDSEL,%eax
 1344         movl    %eax,%ds
 1345         movl    %eax,%es
 1346         movl    %eax,%fs        /* Beware, use wrmsr to set 64 bit base */
 1347         movl    %eax,%gs
 1348         movl    %eax,%ss
 1349 
 1350         /* reload code selector by turning return into intersegmental return */
 1351         popq    %rax
 1352         pushq   $KCSEL
 1353         pushq   %rax
 1354         MEXITCOUNT
 1355         lretq
 1356 END(lgdt)
 1357 
 1358 /*****************************************************************************/
 1359 /* setjump, longjump                                                         */
 1360 /*****************************************************************************/
 1361 
 1362 ENTRY(setjmp)
 1363         movq    %rbx,0(%rdi)                    /* save rbx */
 1364         movq    %rsp,8(%rdi)                    /* save rsp */
 1365         movq    %rbp,16(%rdi)                   /* save rbp */
 1366         movq    %r12,24(%rdi)                   /* save r12 */
 1367         movq    %r13,32(%rdi)                   /* save r13 */
 1368         movq    %r14,40(%rdi)                   /* save r14 */
 1369         movq    %r15,48(%rdi)                   /* save r15 */
 1370         movq    0(%rsp),%rdx                    /* get rta */
 1371         movq    %rdx,56(%rdi)                   /* save rip */
 1372         xorl    %eax,%eax                       /* return(0); */
 1373         ret
 1374 END(setjmp)
 1375 
 1376 ENTRY(longjmp)
 1377         movq    0(%rdi),%rbx                    /* restore rbx */
 1378         movq    8(%rdi),%rsp                    /* restore rsp */
 1379         movq    16(%rdi),%rbp                   /* restore rbp */
 1380         movq    24(%rdi),%r12                   /* restore r12 */
 1381         movq    32(%rdi),%r13                   /* restore r13 */
 1382         movq    40(%rdi),%r14                   /* restore r14 */
 1383         movq    48(%rdi),%r15                   /* restore r15 */
 1384         movq    56(%rdi),%rdx                   /* get rta */
 1385         movq    %rdx,0(%rsp)                    /* put in return frame */
 1386         xorl    %eax,%eax                       /* return(1); */
 1387         incl    %eax
 1388         ret
 1389 END(longjmp)
 1390 
 1391 /*
 1392  * Support for reading MSRs in the safe manner.  (Instead of panic on #gp,
 1393  * return an error.)
 1394  */
 1395 ENTRY(rdmsr_safe)
 1396 /* int rdmsr_safe(u_int msr, uint64_t *data) */
 1397         PUSH_FRAME_POINTER
 1398         movq    PCPU(CURPCB),%r8
 1399         movq    $msr_onfault,PCB_ONFAULT(%r8)
 1400         movl    %edi,%ecx
 1401         rdmsr                   /* Read MSR pointed by %ecx. Returns
 1402                                    hi byte in edx, lo in %eax */
 1403         salq    $32,%rdx        /* sign-shift %rdx left */
 1404         movl    %eax,%eax       /* zero-extend %eax -> %rax */
 1405         orq     %rdx,%rax
 1406         movq    %rax,(%rsi)
 1407         xorq    %rax,%rax
 1408         movq    %rax,PCB_ONFAULT(%r8)
 1409         POP_FRAME_POINTER
 1410         ret
 1411 
 1412 /*
 1413  * Support for writing MSRs in the safe manner.  (Instead of panic on #gp,
 1414  * return an error.)
 1415  */
 1416 ENTRY(wrmsr_safe)
 1417 /* int wrmsr_safe(u_int msr, uint64_t data) */
 1418         PUSH_FRAME_POINTER
 1419         movq    PCPU(CURPCB),%r8
 1420         movq    $msr_onfault,PCB_ONFAULT(%r8)
 1421         movl    %edi,%ecx
 1422         movl    %esi,%eax
 1423         sarq    $32,%rsi
 1424         movl    %esi,%edx
 1425         wrmsr                   /* Write MSR pointed by %ecx. Accepts
 1426                                    hi byte in edx, lo in %eax. */
 1427         xorq    %rax,%rax
 1428         movq    %rax,PCB_ONFAULT(%r8)
 1429         POP_FRAME_POINTER
 1430         ret
 1431 
 1432 /*
 1433  * MSR operations fault handler
 1434  */
 1435         ALIGN_TEXT
 1436 msr_onfault:
 1437         movq    $0,PCB_ONFAULT(%r8)
 1438         movl    $EFAULT,%eax
 1439         POP_FRAME_POINTER
 1440         ret
 1441 
 1442 /*
 1443  * void pmap_pti_pcid_invalidate(uint64_t ucr3, uint64_t kcr3);
 1444  * Invalidates address space addressed by ucr3, then returns to kcr3.
 1445  * Done in assembler to ensure no other memory accesses happen while
 1446  * on ucr3.
 1447  */
 1448         ALIGN_TEXT
 1449 ENTRY(pmap_pti_pcid_invalidate)
 1450         pushfq
 1451         cli
 1452         movq    %rdi,%cr3       /* to user page table */
 1453         movq    %rsi,%cr3       /* back to kernel */
 1454         popfq
 1455         retq
 1456 
 1457 /*
 1458  * void pmap_pti_pcid_invlpg(uint64_t ucr3, uint64_t kcr3, vm_offset_t va);
 1459  * Invalidates virtual address va in address space ucr3, then returns to kcr3.
 1460  */
 1461         ALIGN_TEXT
 1462 ENTRY(pmap_pti_pcid_invlpg)
 1463         pushfq
 1464         cli
 1465         movq    %rdi,%cr3       /* to user page table */
 1466         invlpg  (%rdx)
 1467         movq    %rsi,%cr3       /* back to kernel */
 1468         popfq
 1469         retq
 1470 
 1471 /*
 1472  * void pmap_pti_pcid_invlrng(uint64_t ucr3, uint64_t kcr3, vm_offset_t sva,
 1473  *     vm_offset_t eva);
 1474  * Invalidates virtual addresses between sva and eva in address space ucr3,
 1475  * then returns to kcr3.
 1476  */
 1477         ALIGN_TEXT
 1478 ENTRY(pmap_pti_pcid_invlrng)
 1479         pushfq
 1480         cli
 1481         movq    %rdi,%cr3       /* to user page table */
 1482 1:      invlpg  (%rdx)
 1483         addq    $PAGE_SIZE,%rdx
 1484         cmpq    %rdx,%rcx
 1485         ja      1b
 1486         movq    %rsi,%cr3       /* back to kernel */
 1487         popfq
 1488         retq
 1489 
 1490         .altmacro
 1491         .macro  ibrs_seq_label l
 1492 handle_ibrs_\l:
 1493         .endm
 1494         .macro  ibrs_call_label l
 1495         call    handle_ibrs_\l
 1496         .endm
 1497         .macro  ibrs_seq count
 1498         ll=1
 1499         .rept   \count
 1500         ibrs_call_label %(ll)
 1501         nop
 1502         ibrs_seq_label %(ll)
 1503         addq    $8,%rsp
 1504         ll=ll+1
 1505         .endr
 1506         .endm
 1507 
 1508 /* all callers already saved %rax, %rdx, and %rcx */
 1509 ENTRY(handle_ibrs_entry)
 1510         cmpb    $0,hw_ibrs_active(%rip)
 1511         je      1f
 1512         movl    $MSR_IA32_SPEC_CTRL,%ecx
 1513         rdmsr
 1514         orl     $(IA32_SPEC_CTRL_IBRS|IA32_SPEC_CTRL_STIBP),%eax
 1515         orl     $(IA32_SPEC_CTRL_IBRS|IA32_SPEC_CTRL_STIBP)>>32,%edx
 1516         wrmsr
 1517         movb    $1,PCPU(IBPB_SET)
 1518         testl   $CPUID_STDEXT_SMEP,cpu_stdext_feature(%rip)
 1519         jne     1f
 1520         ibrs_seq 32
 1521 1:      ret
 1522 END(handle_ibrs_entry)
 1523 
 1524 ENTRY(handle_ibrs_exit)
 1525         cmpb    $0,PCPU(IBPB_SET)
 1526         je      1f
 1527         movl    $MSR_IA32_SPEC_CTRL,%ecx
 1528         rdmsr
 1529         andl    $~(IA32_SPEC_CTRL_IBRS|IA32_SPEC_CTRL_STIBP),%eax
 1530         andl    $~((IA32_SPEC_CTRL_IBRS|IA32_SPEC_CTRL_STIBP)>>32),%edx
 1531         wrmsr
 1532         movb    $0,PCPU(IBPB_SET)
 1533 1:      ret
 1534 END(handle_ibrs_exit)
 1535 
 1536 /* registers-neutral version, but needs stack */
 1537 ENTRY(handle_ibrs_exit_rs)
 1538         cmpb    $0,PCPU(IBPB_SET)
 1539         je      1f
 1540         pushq   %rax
 1541         pushq   %rdx
 1542         pushq   %rcx
 1543         movl    $MSR_IA32_SPEC_CTRL,%ecx
 1544         rdmsr
 1545         andl    $~(IA32_SPEC_CTRL_IBRS|IA32_SPEC_CTRL_STIBP),%eax
 1546         andl    $~((IA32_SPEC_CTRL_IBRS|IA32_SPEC_CTRL_STIBP)>>32),%edx
 1547         wrmsr
 1548         popq    %rcx
 1549         popq    %rdx
 1550         popq    %rax
 1551         movb    $0,PCPU(IBPB_SET)
 1552 1:      ret
 1553 END(handle_ibrs_exit_rs)
 1554 
 1555         .noaltmacro
 1556 
 1557 /*
 1558  * Flush L1D cache.  Load enough of the data from the kernel text
 1559  * to flush existing L1D content.
 1560  *
 1561  * N.B. The function does not follow ABI calling conventions, it corrupts %rbx.
 1562  * The vmm.ko caller expects that only %rax, %rdx, %rbx, %rcx, %r9, and %rflags
 1563  * registers are clobbered.  The NMI handler caller only needs %r13 preserved.
 1564  */
 1565 ENTRY(flush_l1d_sw)
 1566 #define L1D_FLUSH_SIZE  (64 * 1024)
 1567         movq    $KERNBASE, %r9
 1568         movq    $-L1D_FLUSH_SIZE, %rcx
 1569         /*
 1570          * pass 1: Preload TLB.
 1571          * Kernel text is mapped using superpages.  TLB preload is
 1572          * done for the benefit of older CPUs which split 2M page
 1573          * into 4k TLB entries.
 1574          */
 1575 1:      movb    L1D_FLUSH_SIZE(%r9, %rcx), %al
 1576         addq    $PAGE_SIZE, %rcx
 1577         jne     1b
 1578         xorl    %eax, %eax
 1579         cpuid
 1580         movq    $-L1D_FLUSH_SIZE, %rcx
 1581         /* pass 2: Read each cache line. */
 1582 2:      movb    L1D_FLUSH_SIZE(%r9, %rcx), %al
 1583         addq    $64, %rcx
 1584         jne     2b
 1585         lfence
 1586         ret
 1587 #undef  L1D_FLUSH_SIZE
 1588 END(flush_l1d_sw)

Cache object: a7552d867b2dc665140f63aa815b80b5


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]


This page is part of the FreeBSD/Linux Linux Kernel Cross-Reference, and was automatically generated using a modified version of the LXR engine.