The Design and Implementation of the FreeBSD Operating System, Second Edition
Now available: The Design and Implementation of the FreeBSD Operating System (Second Edition)


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]

FreeBSD/Linux Kernel Cross Reference
sys/i386/i386/support.s

Version: -  FREEBSD  -  FREEBSD-13-STABLE  -  FREEBSD-13-0  -  FREEBSD-12-STABLE  -  FREEBSD-12-0  -  FREEBSD-11-STABLE  -  FREEBSD-11-0  -  FREEBSD-10-STABLE  -  FREEBSD-10-0  -  FREEBSD-9-STABLE  -  FREEBSD-9-0  -  FREEBSD-8-STABLE  -  FREEBSD-8-0  -  FREEBSD-7-STABLE  -  FREEBSD-7-0  -  FREEBSD-6-STABLE  -  FREEBSD-6-0  -  FREEBSD-5-STABLE  -  FREEBSD-5-0  -  FREEBSD-4-STABLE  -  FREEBSD-3-STABLE  -  FREEBSD22  -  l41  -  OPENBSD  -  linux-2.6  -  MK84  -  PLAN9  -  xnu-8792 
SearchContext: -  none  -  3  -  10 

    1 /*-
    2  * Copyright (c) 1993 The Regents of the University of California.
    3  * All rights reserved.
    4  *
    5  * Redistribution and use in source and binary forms, with or without
    6  * modification, are permitted provided that the following conditions
    7  * are met:
    8  * 1. Redistributions of source code must retain the above copyright
    9  *    notice, this list of conditions and the following disclaimer.
   10  * 2. Redistributions in binary form must reproduce the above copyright
   11  *    notice, this list of conditions and the following disclaimer in the
   12  *    documentation and/or other materials provided with the distribution.
   13  * 4. Neither the name of the University nor the names of its contributors
   14  *    may be used to endorse or promote products derived from this software
   15  *    without specific prior written permission.
   16  *
   17  * THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND
   18  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
   19  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
   20  * ARE DISCLAIMED.  IN NO EVENT SHALL THE REGENTS OR CONTRIBUTORS BE LIABLE
   21  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
   22  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
   23  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
   24  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
   25  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
   26  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
   27  * SUCH DAMAGE.
   28  *
   29  * $FreeBSD$
   30  */
   31 
   32 #include "opt_npx.h"
   33 
   34 #include <machine/asmacros.h>
   35 #include <machine/cputypes.h>
   36 #include <machine/intr_machdep.h>
   37 #include <machine/pmap.h>
   38 #include <machine/specialreg.h>
   39 
   40 #include "assym.s"
   41 
   42 #define IDXSHIFT        10
   43 
   44         .data
   45         ALIGN_DATA
   46         .globl  intrcnt, eintrcnt
   47 intrcnt:
   48         .space  INTRCNT_COUNT * 4
   49 eintrcnt:
   50 
   51         .globl  intrnames, eintrnames
   52 intrnames:
   53         .space  INTRCNT_COUNT * (MAXCOMLEN + 1)
   54 eintrnames:
   55 
   56         .text
   57 
   58 /*
   59  * bcopy family
   60  * void bzero(void *buf, u_int len)
   61  */
   62 ENTRY(bzero)
   63         pushl   %edi
   64         movl    8(%esp),%edi
   65         movl    12(%esp),%ecx
   66         xorl    %eax,%eax
   67         shrl    $2,%ecx
   68         cld
   69         rep
   70         stosl
   71         movl    12(%esp),%ecx
   72         andl    $3,%ecx
   73         rep
   74         stosb
   75         popl    %edi
   76         ret
   77 END(bzero)      
   78         
   79 ENTRY(sse2_pagezero)
   80         pushl   %ebx
   81         movl    8(%esp),%ecx
   82         movl    %ecx,%eax
   83         addl    $4096,%eax
   84         xor     %ebx,%ebx
   85 1:
   86         movnti  %ebx,(%ecx)
   87         addl    $4,%ecx
   88         cmpl    %ecx,%eax
   89         jne     1b
   90         sfence
   91         popl    %ebx
   92         ret
   93 END(sse2_pagezero)
   94 
   95 ENTRY(i686_pagezero)
   96         pushl   %edi
   97         pushl   %ebx
   98 
   99         movl    12(%esp),%edi
  100         movl    $1024,%ecx
  101         cld
  102 
  103         ALIGN_TEXT
  104 1:
  105         xorl    %eax,%eax
  106         repe
  107         scasl
  108         jnz     2f
  109 
  110         popl    %ebx
  111         popl    %edi
  112         ret
  113 
  114         ALIGN_TEXT
  115 
  116 2:
  117         incl    %ecx
  118         subl    $4,%edi
  119 
  120         movl    %ecx,%edx
  121         cmpl    $16,%ecx
  122 
  123         jge     3f
  124 
  125         movl    %edi,%ebx
  126         andl    $0x3f,%ebx
  127         shrl    %ebx
  128         shrl    %ebx
  129         movl    $16,%ecx
  130         subl    %ebx,%ecx
  131 
  132 3:
  133         subl    %ecx,%edx
  134         rep
  135         stosl
  136 
  137         movl    %edx,%ecx
  138         testl   %edx,%edx
  139         jnz     1b
  140 
  141         popl    %ebx
  142         popl    %edi
  143         ret
  144 END(i686_pagezero)
  145 
  146 /* fillw(pat, base, cnt) */
  147 ENTRY(fillw)
  148         pushl   %edi
  149         movl    8(%esp),%eax
  150         movl    12(%esp),%edi
  151         movl    16(%esp),%ecx
  152         cld
  153         rep
  154         stosw
  155         popl    %edi
  156         ret
  157 END(fillw)
  158 
  159 ENTRY(bcopyb)
  160         pushl   %esi
  161         pushl   %edi
  162         movl    12(%esp),%esi
  163         movl    16(%esp),%edi
  164         movl    20(%esp),%ecx
  165         movl    %edi,%eax
  166         subl    %esi,%eax
  167         cmpl    %ecx,%eax                       /* overlapping && src < dst? */
  168         jb      1f
  169         cld                                     /* nope, copy forwards */
  170         rep
  171         movsb
  172         popl    %edi
  173         popl    %esi
  174         ret
  175 
  176         ALIGN_TEXT
  177 1:
  178         addl    %ecx,%edi                       /* copy backwards. */
  179         addl    %ecx,%esi
  180         decl    %edi
  181         decl    %esi
  182         std
  183         rep
  184         movsb
  185         popl    %edi
  186         popl    %esi
  187         cld
  188         ret
  189 END(bcopyb)
  190 
  191 /*
  192  * bcopy(src, dst, cnt)
  193  *  ws@tools.de     (Wolfgang Solfrank, TooLs GmbH) +49-228-985800
  194  */
  195 ENTRY(bcopy)
  196         pushl   %ebp
  197         movl    %esp,%ebp
  198         pushl   %esi
  199         pushl   %edi
  200         movl    8(%ebp),%esi
  201         movl    12(%ebp),%edi
  202         movl    16(%ebp),%ecx
  203 
  204         movl    %edi,%eax
  205         subl    %esi,%eax
  206         cmpl    %ecx,%eax                       /* overlapping && src < dst? */
  207         jb      1f
  208 
  209         shrl    $2,%ecx                         /* copy by 32-bit words */
  210         cld                                     /* nope, copy forwards */
  211         rep
  212         movsl
  213         movl    16(%ebp),%ecx
  214         andl    $3,%ecx                         /* any bytes left? */
  215         rep
  216         movsb
  217         popl    %edi
  218         popl    %esi
  219         popl    %ebp
  220         ret
  221 
  222         ALIGN_TEXT
  223 1:
  224         addl    %ecx,%edi                       /* copy backwards */
  225         addl    %ecx,%esi
  226         decl    %edi
  227         decl    %esi
  228         andl    $3,%ecx                         /* any fractional bytes? */
  229         std
  230         rep
  231         movsb
  232         movl    16(%ebp),%ecx                   /* copy remainder by 32-bit words */
  233         shrl    $2,%ecx
  234         subl    $3,%esi
  235         subl    $3,%edi
  236         rep
  237         movsl
  238         popl    %edi
  239         popl    %esi
  240         cld
  241         popl    %ebp
  242         ret
  243 END(bcopy)
  244 
  245 /*
  246  * Note: memcpy does not support overlapping copies
  247  */
  248 ENTRY(memcpy)
  249         pushl   %edi
  250         pushl   %esi
  251         movl    12(%esp),%edi
  252         movl    16(%esp),%esi
  253         movl    20(%esp),%ecx
  254         movl    %edi,%eax
  255         shrl    $2,%ecx                         /* copy by 32-bit words */
  256         cld                                     /* nope, copy forwards */
  257         rep
  258         movsl
  259         movl    20(%esp),%ecx
  260         andl    $3,%ecx                         /* any bytes left? */
  261         rep
  262         movsb
  263         popl    %esi
  264         popl    %edi
  265         ret
  266 END(memcpy)
  267 
  268 /*****************************************************************************/
  269 /* copyout and fubyte family                                                 */
  270 /*****************************************************************************/
  271 /*
  272  * Access user memory from inside the kernel. These routines and possibly
  273  * the math- and DOS emulators should be the only places that do this.
  274  *
  275  * We have to access the memory with user's permissions, so use a segment
  276  * selector with RPL 3. For writes to user space we have to additionally
  277  * check the PTE for write permission, because the 386 does not check
  278  * write permissions when we are executing with EPL 0. The 486 does check
  279  * this if the WP bit is set in CR0, so we can use a simpler version here.
  280  *
  281  * These routines set curpcb->pcb_onfault for the time they execute. When a
  282  * protection violation occurs inside the functions, the trap handler
  283  * returns to *curpcb->pcb_onfault instead of the function.
  284  */
  285 
  286 /*
  287  * copyout(from_kernel, to_user, len)  - MP SAFE
  288  */
  289 ENTRY(copyout)
  290         movl    PCPU(CURPCB),%eax
  291         movl    $copyout_fault,PCB_ONFAULT(%eax)
  292         pushl   %esi
  293         pushl   %edi
  294         pushl   %ebx
  295         movl    16(%esp),%esi
  296         movl    20(%esp),%edi
  297         movl    24(%esp),%ebx
  298         testl   %ebx,%ebx                       /* anything to do? */
  299         jz      done_copyout
  300 
  301         /*
  302          * Check explicitly for non-user addresses.  If 486 write protection
  303          * is being used, this check is essential because we are in kernel
  304          * mode so the h/w does not provide any protection against writing
  305          * kernel addresses.
  306          */
  307 
  308         /*
  309          * First, prevent address wrapping.
  310          */
  311         movl    %edi,%eax
  312         addl    %ebx,%eax
  313         jc      copyout_fault
  314 /*
  315  * XXX STOP USING VM_MAXUSER_ADDRESS.
  316  * It is an end address, not a max, so every time it is used correctly it
  317  * looks like there is an off by one error, and of course it caused an off
  318  * by one error in several places.
  319  */
  320         cmpl    $VM_MAXUSER_ADDRESS,%eax
  321         ja      copyout_fault
  322 
  323         /* bcopy(%esi, %edi, %ebx) */
  324         movl    %ebx,%ecx
  325 
  326         shrl    $2,%ecx
  327         cld
  328         rep
  329         movsl
  330         movb    %bl,%cl
  331         andb    $3,%cl
  332         rep
  333         movsb
  334 
  335 done_copyout:
  336         popl    %ebx
  337         popl    %edi
  338         popl    %esi
  339         xorl    %eax,%eax
  340         movl    PCPU(CURPCB),%edx
  341         movl    %eax,PCB_ONFAULT(%edx)
  342         ret
  343 END(copyout)
  344 
  345         ALIGN_TEXT
  346 copyout_fault:
  347         popl    %ebx
  348         popl    %edi
  349         popl    %esi
  350         movl    PCPU(CURPCB),%edx
  351         movl    $0,PCB_ONFAULT(%edx)
  352         movl    $EFAULT,%eax
  353         ret
  354 
  355 /*
  356  * copyin(from_user, to_kernel, len) - MP SAFE
  357  */
  358 ENTRY(copyin)
  359         movl    PCPU(CURPCB),%eax
  360         movl    $copyin_fault,PCB_ONFAULT(%eax)
  361         pushl   %esi
  362         pushl   %edi
  363         movl    12(%esp),%esi                   /* caddr_t from */
  364         movl    16(%esp),%edi                   /* caddr_t to */
  365         movl    20(%esp),%ecx                   /* size_t  len */
  366 
  367         /*
  368          * make sure address is valid
  369          */
  370         movl    %esi,%edx
  371         addl    %ecx,%edx
  372         jc      copyin_fault
  373         cmpl    $VM_MAXUSER_ADDRESS,%edx
  374         ja      copyin_fault
  375 
  376         movb    %cl,%al
  377         shrl    $2,%ecx                         /* copy longword-wise */
  378         cld
  379         rep
  380         movsl
  381         movb    %al,%cl
  382         andb    $3,%cl                          /* copy remaining bytes */
  383         rep
  384         movsb
  385 
  386         popl    %edi
  387         popl    %esi
  388         xorl    %eax,%eax
  389         movl    PCPU(CURPCB),%edx
  390         movl    %eax,PCB_ONFAULT(%edx)
  391         ret
  392 END(copyin)
  393 
  394         ALIGN_TEXT
  395 copyin_fault:
  396         popl    %edi
  397         popl    %esi
  398         movl    PCPU(CURPCB),%edx
  399         movl    $0,PCB_ONFAULT(%edx)
  400         movl    $EFAULT,%eax
  401         ret
  402 
  403 /*
  404  * casuword.  Compare and set user word.  Returns -1 or the current value.
  405  */
  406 
  407 ALTENTRY(casuword32)
  408 ENTRY(casuword)
  409         movl    PCPU(CURPCB),%ecx
  410         movl    $fusufault,PCB_ONFAULT(%ecx)
  411         movl    4(%esp),%edx                    /* dst */
  412         movl    8(%esp),%eax                    /* old */
  413         movl    12(%esp),%ecx                   /* new */
  414 
  415         cmpl    $VM_MAXUSER_ADDRESS-4,%edx      /* verify address is valid */
  416         ja      fusufault
  417 
  418 #ifdef SMP
  419         lock
  420 #endif
  421         cmpxchgl %ecx,(%edx)                    /* Compare and set. */
  422 
  423         /*
  424          * The old value is in %eax.  If the store succeeded it will be the
  425          * value we expected (old) from before the store, otherwise it will
  426          * be the current value.
  427          */
  428 
  429         movl    PCPU(CURPCB),%ecx
  430         movl    $fusufault,PCB_ONFAULT(%ecx)
  431         movl    $0,PCB_ONFAULT(%ecx)
  432         ret
  433 END(casuword32)
  434 END(casuword)
  435 
  436 /*
  437  * Fetch (load) a 32-bit word, a 16-bit word, or an 8-bit byte from user
  438  * memory.  All these functions are MPSAFE.
  439  */
  440 
  441 ALTENTRY(fuword32)
  442 ENTRY(fuword)
  443         movl    PCPU(CURPCB),%ecx
  444         movl    $fusufault,PCB_ONFAULT(%ecx)
  445         movl    4(%esp),%edx                    /* from */
  446 
  447         cmpl    $VM_MAXUSER_ADDRESS-4,%edx      /* verify address is valid */
  448         ja      fusufault
  449 
  450         movl    (%edx),%eax
  451         movl    $0,PCB_ONFAULT(%ecx)
  452         ret
  453 END(fuword32)
  454 END(fuword)
  455 
  456 /*
  457  * fuswintr() and suswintr() are specialized variants of fuword16() and
  458  * suword16(), respectively.  They are called from the profiling code,
  459  * potentially at interrupt time.  If they fail, that's okay; good things
  460  * will happen later.  They always fail for now, until the trap code is
  461  * able to deal with this.
  462  */
  463 ALTENTRY(suswintr)
  464 ENTRY(fuswintr)
  465         movl    $-1,%eax
  466         ret
  467 END(suswintr)
  468 END(fuswintr)
  469 
  470 ENTRY(fuword16)
  471         movl    PCPU(CURPCB),%ecx
  472         movl    $fusufault,PCB_ONFAULT(%ecx)
  473         movl    4(%esp),%edx
  474 
  475         cmpl    $VM_MAXUSER_ADDRESS-2,%edx
  476         ja      fusufault
  477 
  478         movzwl  (%edx),%eax
  479         movl    $0,PCB_ONFAULT(%ecx)
  480         ret
  481 END(fuword16)
  482 
  483 ENTRY(fubyte)
  484         movl    PCPU(CURPCB),%ecx
  485         movl    $fusufault,PCB_ONFAULT(%ecx)
  486         movl    4(%esp),%edx
  487 
  488         cmpl    $VM_MAXUSER_ADDRESS-1,%edx
  489         ja      fusufault
  490 
  491         movzbl  (%edx),%eax
  492         movl    $0,PCB_ONFAULT(%ecx)
  493         ret
  494 END(fubyte)
  495 
  496         ALIGN_TEXT
  497 fusufault:
  498         movl    PCPU(CURPCB),%ecx
  499         xorl    %eax,%eax
  500         movl    %eax,PCB_ONFAULT(%ecx)
  501         decl    %eax
  502         ret
  503 
  504 /*
  505  * Store a 32-bit word, a 16-bit word, or an 8-bit byte to user memory.
  506  * All these functions are MPSAFE.
  507  */
  508 
  509 ALTENTRY(suword32)
  510 ENTRY(suword)
  511         movl    PCPU(CURPCB),%ecx
  512         movl    $fusufault,PCB_ONFAULT(%ecx)
  513         movl    4(%esp),%edx
  514 
  515         cmpl    $VM_MAXUSER_ADDRESS-4,%edx      /* verify address validity */
  516         ja      fusufault
  517 
  518         movl    8(%esp),%eax
  519         movl    %eax,(%edx)
  520         xorl    %eax,%eax
  521         movl    PCPU(CURPCB),%ecx
  522         movl    %eax,PCB_ONFAULT(%ecx)
  523         ret
  524 END(suword32)
  525 END(suword)
  526 
  527 ENTRY(suword16)
  528         movl    PCPU(CURPCB),%ecx
  529         movl    $fusufault,PCB_ONFAULT(%ecx)
  530         movl    4(%esp),%edx
  531 
  532         cmpl    $VM_MAXUSER_ADDRESS-2,%edx      /* verify address validity */
  533         ja      fusufault
  534 
  535         movw    8(%esp),%ax
  536         movw    %ax,(%edx)
  537         xorl    %eax,%eax
  538         movl    PCPU(CURPCB),%ecx               /* restore trashed register */
  539         movl    %eax,PCB_ONFAULT(%ecx)
  540         ret
  541 END(suword16)
  542 
  543 ENTRY(subyte)
  544         movl    PCPU(CURPCB),%ecx
  545         movl    $fusufault,PCB_ONFAULT(%ecx)
  546         movl    4(%esp),%edx
  547 
  548         cmpl    $VM_MAXUSER_ADDRESS-1,%edx      /* verify address validity */
  549         ja      fusufault
  550 
  551         movb    8(%esp),%al
  552         movb    %al,(%edx)
  553         xorl    %eax,%eax
  554         movl    PCPU(CURPCB),%ecx               /* restore trashed register */
  555         movl    %eax,PCB_ONFAULT(%ecx)
  556         ret
  557 END(subyte)
  558 
  559 /*
  560  * copyinstr(from, to, maxlen, int *lencopied) - MP SAFE
  561  *
  562  *      copy a string from from to to, stop when a 0 character is reached.
  563  *      return ENAMETOOLONG if string is longer than maxlen, and
  564  *      EFAULT on protection violations. If lencopied is non-zero,
  565  *      return the actual length in *lencopied.
  566  */
  567 ENTRY(copyinstr)
  568         pushl   %esi
  569         pushl   %edi
  570         movl    PCPU(CURPCB),%ecx
  571         movl    $cpystrflt,PCB_ONFAULT(%ecx)
  572 
  573         movl    12(%esp),%esi                   /* %esi = from */
  574         movl    16(%esp),%edi                   /* %edi = to */
  575         movl    20(%esp),%edx                   /* %edx = maxlen */
  576 
  577         movl    $VM_MAXUSER_ADDRESS,%eax
  578 
  579         /* make sure 'from' is within bounds */
  580         subl    %esi,%eax
  581         jbe     cpystrflt
  582 
  583         /* restrict maxlen to <= VM_MAXUSER_ADDRESS-from */
  584         cmpl    %edx,%eax
  585         jae     1f
  586         movl    %eax,%edx
  587         movl    %eax,20(%esp)
  588 1:
  589         incl    %edx
  590         cld
  591 
  592 2:
  593         decl    %edx
  594         jz      3f
  595 
  596         lodsb
  597         stosb
  598         orb     %al,%al
  599         jnz     2b
  600 
  601         /* Success -- 0 byte reached */
  602         decl    %edx
  603         xorl    %eax,%eax
  604         jmp     cpystrflt_x
  605 3:
  606         /* edx is zero - return ENAMETOOLONG or EFAULT */
  607         cmpl    $VM_MAXUSER_ADDRESS,%esi
  608         jae     cpystrflt
  609 4:
  610         movl    $ENAMETOOLONG,%eax
  611         jmp     cpystrflt_x
  612 
  613 cpystrflt:
  614         movl    $EFAULT,%eax
  615 
  616 cpystrflt_x:
  617         /* set *lencopied and return %eax */
  618         movl    PCPU(CURPCB),%ecx
  619         movl    $0,PCB_ONFAULT(%ecx)
  620         movl    20(%esp),%ecx
  621         subl    %edx,%ecx
  622         movl    24(%esp),%edx
  623         testl   %edx,%edx
  624         jz      1f
  625         movl    %ecx,(%edx)
  626 1:
  627         popl    %edi
  628         popl    %esi
  629         ret
  630 END(copyinstr)
  631 
  632 /*
  633  * copystr(from, to, maxlen, int *lencopied) - MP SAFE
  634  */
  635 ENTRY(copystr)
  636         pushl   %esi
  637         pushl   %edi
  638 
  639         movl    12(%esp),%esi                   /* %esi = from */
  640         movl    16(%esp),%edi                   /* %edi = to */
  641         movl    20(%esp),%edx                   /* %edx = maxlen */
  642         incl    %edx
  643         cld
  644 1:
  645         decl    %edx
  646         jz      4f
  647         lodsb
  648         stosb
  649         orb     %al,%al
  650         jnz     1b
  651 
  652         /* Success -- 0 byte reached */
  653         decl    %edx
  654         xorl    %eax,%eax
  655         jmp     6f
  656 4:
  657         /* edx is zero -- return ENAMETOOLONG */
  658         movl    $ENAMETOOLONG,%eax
  659 
  660 6:
  661         /* set *lencopied and return %eax */
  662         movl    20(%esp),%ecx
  663         subl    %edx,%ecx
  664         movl    24(%esp),%edx
  665         testl   %edx,%edx
  666         jz      7f
  667         movl    %ecx,(%edx)
  668 7:
  669         popl    %edi
  670         popl    %esi
  671         ret
  672 END(copystr)
  673 
  674 ENTRY(bcmp)
  675         pushl   %edi
  676         pushl   %esi
  677         movl    12(%esp),%edi
  678         movl    16(%esp),%esi
  679         movl    20(%esp),%edx
  680 
  681         movl    %edx,%ecx
  682         shrl    $2,%ecx
  683         cld                                     /* compare forwards */
  684         repe
  685         cmpsl
  686         jne     1f
  687 
  688         movl    %edx,%ecx
  689         andl    $3,%ecx
  690         repe
  691         cmpsb
  692 1:
  693         setne   %al
  694         movsbl  %al,%eax
  695         popl    %esi
  696         popl    %edi
  697         ret
  698 END(bcmp)
  699 
  700 /*
  701  * Handling of special 386 registers and descriptor tables etc
  702  */
  703 /* void lgdt(struct region_descriptor *rdp); */
  704 ENTRY(lgdt)
  705 #ifndef XEN
  706         /* reload the descriptor table */
  707         movl    4(%esp),%eax
  708         lgdt    (%eax)
  709 #endif
  710         
  711         /* flush the prefetch q */
  712         jmp     1f
  713         nop
  714 1:
  715         /* reload "stale" selectors */
  716         movl    $KDSEL,%eax
  717         movl    %eax,%ds
  718         movl    %eax,%es
  719         movl    %eax,%gs
  720         movl    %eax,%ss
  721         movl    $KPSEL,%eax
  722         movl    %eax,%fs
  723 
  724         /* reload code selector by turning return into intersegmental return */
  725         movl    (%esp),%eax
  726         pushl   %eax
  727         movl    $KCSEL,4(%esp)
  728         MEXITCOUNT
  729         lret
  730 END(lgdt)
  731 
  732 /* ssdtosd(*ssdp,*sdp) */
  733 ENTRY(ssdtosd)
  734         pushl   %ebx
  735         movl    8(%esp),%ecx
  736         movl    8(%ecx),%ebx
  737         shll    $16,%ebx
  738         movl    (%ecx),%edx
  739         roll    $16,%edx
  740         movb    %dh,%bl
  741         movb    %dl,%bh
  742         rorl    $8,%ebx
  743         movl    4(%ecx),%eax
  744         movw    %ax,%dx
  745         andl    $0xf0000,%eax
  746         orl     %eax,%ebx
  747         movl    12(%esp),%ecx
  748         movl    %edx,(%ecx)
  749         movl    %ebx,4(%ecx)
  750         popl    %ebx
  751         ret
  752 END(ssdtosd)
  753 
  754 /* void reset_dbregs() */
  755 ENTRY(reset_dbregs)
  756         movl    $0,%eax
  757         movl    %eax,%dr7     /* disable all breapoints first */
  758         movl    %eax,%dr0
  759         movl    %eax,%dr1
  760         movl    %eax,%dr2
  761         movl    %eax,%dr3
  762         movl    %eax,%dr6
  763         ret
  764 END(reset_dbregs)
  765 
  766 /*****************************************************************************/
  767 /* setjump, longjump                                                         */
  768 /*****************************************************************************/
  769 
  770 ENTRY(setjmp)
  771         movl    4(%esp),%eax
  772         movl    %ebx,(%eax)                     /* save ebx */
  773         movl    %esp,4(%eax)                    /* save esp */
  774         movl    %ebp,8(%eax)                    /* save ebp */
  775         movl    %esi,12(%eax)                   /* save esi */
  776         movl    %edi,16(%eax)                   /* save edi */
  777         movl    (%esp),%edx                     /* get rta */
  778         movl    %edx,20(%eax)                   /* save eip */
  779         xorl    %eax,%eax                       /* return(0); */
  780         ret
  781 END(setjmp)
  782 
  783 ENTRY(longjmp)
  784         movl    4(%esp),%eax
  785         movl    (%eax),%ebx                     /* restore ebx */
  786         movl    4(%eax),%esp                    /* restore esp */
  787         movl    8(%eax),%ebp                    /* restore ebp */
  788         movl    12(%eax),%esi                   /* restore esi */
  789         movl    16(%eax),%edi                   /* restore edi */
  790         movl    20(%eax),%edx                   /* get rta */
  791         movl    %edx,(%esp)                     /* put in return frame */
  792         xorl    %eax,%eax                       /* return(1); */
  793         incl    %eax
  794         ret
  795 END(longjmp)
  796 
  797 /*
  798  * Support for BB-profiling (gcc -a).  The kernbb program will extract
  799  * the data from the kernel.
  800  */
  801 
  802         .data
  803         ALIGN_DATA
  804         .globl bbhead
  805 bbhead:
  806         .long 0
  807 
  808         .text
  809 NON_GPROF_ENTRY(__bb_init_func)
  810         movl    4(%esp),%eax
  811         movl    $1,(%eax)
  812         movl    bbhead,%edx
  813         movl    %edx,16(%eax)
  814         movl    %eax,bbhead
  815         NON_GPROF_RET
  816 
  817 /*
  818  * Support for reading MSRs in the safe manner.
  819  */
  820 ENTRY(rdmsr_safe)
  821 /* int rdmsr_safe(u_int msr, uint64_t *data) */
  822         movl    PCPU(CURPCB),%ecx
  823         movl    $msr_onfault,PCB_ONFAULT(%ecx)
  824 
  825         movl    4(%esp),%ecx
  826         rdmsr
  827         movl    8(%esp),%ecx
  828         movl    %eax,(%ecx)
  829         movl    %edx,4(%ecx)
  830         xorl    %eax,%eax
  831 
  832         movl    PCPU(CURPCB),%ecx
  833         movl    %eax,PCB_ONFAULT(%ecx)
  834 
  835         ret
  836 
  837 /*
  838  * Support for writing MSRs in the safe manner.
  839  */
  840 ENTRY(wrmsr_safe)
  841 /* int wrmsr_safe(u_int msr, uint64_t data) */
  842         movl    PCPU(CURPCB),%ecx
  843         movl    $msr_onfault,PCB_ONFAULT(%ecx)
  844 
  845         movl    4(%esp),%ecx
  846         movl    8(%esp),%eax
  847         movl    12(%esp),%edx
  848         wrmsr
  849         xorl    %eax,%eax
  850 
  851         movl    PCPU(CURPCB),%ecx
  852         movl    %eax,PCB_ONFAULT(%ecx)
  853 
  854         ret
  855 
  856 /*
  857  * MSR operations fault handler
  858  */
  859         ALIGN_TEXT
  860 msr_onfault:
  861         movl    PCPU(CURPCB),%ecx
  862         movl    $0,PCB_ONFAULT(%ecx)
  863         movl    $EFAULT,%eax
  864         ret

Cache object: 1b4267f116de5f518998836c2d98c568


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]


This page is part of the FreeBSD/Linux Linux Kernel Cross-Reference, and was automatically generated using a modified version of the LXR engine.