The Design and Implementation of the FreeBSD Operating System, Second Edition
Now available: The Design and Implementation of the FreeBSD Operating System (Second Edition)


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]

FreeBSD/Linux Kernel Cross Reference
sys/bitsy/l.s

Version: -  FREEBSD  -  FREEBSD-13-STABLE  -  FREEBSD-13-0  -  FREEBSD-12-STABLE  -  FREEBSD-12-0  -  FREEBSD-11-STABLE  -  FREEBSD-11-0  -  FREEBSD-10-STABLE  -  FREEBSD-10-0  -  FREEBSD-9-STABLE  -  FREEBSD-9-0  -  FREEBSD-8-STABLE  -  FREEBSD-8-0  -  FREEBSD-7-STABLE  -  FREEBSD-7-0  -  FREEBSD-6-STABLE  -  FREEBSD-6-0  -  FREEBSD-5-STABLE  -  FREEBSD-5-0  -  FREEBSD-4-STABLE  -  FREEBSD-3-STABLE  -  FREEBSD22  -  l41  -  OPENBSD  -  linux-2.6  -  MK84  -  PLAN9  -  xnu-8792 
SearchContext: -  none  -  3  -  10 

    1 #include "mem.h"
    2 
    3 /*
    4  * Entered here from Compaq's bootldr with MMU disabled.
    5  */
    6 TEXT _start(SB), $-4
    7         MOVW    $setR12(SB), R12                /* load the SB */
    8 _main:
    9         /* SVC mode, interrupts disabled */
   10         MOVW    $(PsrDirq|PsrDfiq|PsrMsvc), R1
   11         MOVW    R1, CPSR
   12 
   13         /* disable the MMU */
   14         MOVW    $0x130, R1
   15         MCR     CpMMU, 0, R1, C(CpControl), C(0x0)
   16 
   17         /* flush caches */
   18         MCR     CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
   19         /* drain prefetch */
   20         MOVW    R0,R0                                           
   21         MOVW    R0,R0
   22         MOVW    R0,R0
   23         MOVW    R0,R0
   24 
   25         /* drain write buffer */
   26         MCR     CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
   27 
   28         MOVW    $(MACHADDR+4*BY2PG), R13                /* stack */
   29         SUB     $4, R13                         /* link */
   30         BL      main(SB)
   31         BL      exit(SB)
   32         /* we shouldn't get here */
   33 _mainloop:
   34         B       _mainloop
   35         BL      _div(SB)                        /* hack to get _div etc loaded */
   36 
   37 /* flush tlb's */
   38 TEXT mmuinvalidate(SB), $-4
   39         MCR     CpMMU, 0, R0, C(CpTLBFlush), C(0x7)
   40         RET
   41 
   42 /* flush tlb's */
   43 TEXT mmuinvalidateaddr(SB), $-4
   44         MCR     CpMMU, 0, R0, C(CpTLBFlush), C(0x6), 1
   45         RET
   46 
   47 /* write back and invalidate i and d caches */
   48 TEXT cacheflush(SB), $-4
   49         /* splhi */
   50         MOVW    CPSR, R3
   51         ORR             $(PsrDirq), R3, R1
   52         MOVW    R1, CPSR
   53 
   54         /* write back any dirty data */
   55         MOVW    $0xe0000000,R0
   56         ADD             $(8*1024),R0,R1
   57 _cfloop:
   58         MOVW.P  32(R0),R2
   59         CMP.S   R0,R1
   60         BGE     _cfloop
   61         
   62         /* drain write buffer and invalidate i cache contents */
   63         MCR             CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
   64         MCR             CpMMU, 0, R0, C(CpCacheFlush), C(0x5), 0
   65 
   66         /* drain prefetch */
   67         MOVW    R0,R0                                           
   68         MOVW    R0,R0
   69         MOVW    R0,R0
   70         MOVW    R0,R0
   71 
   72         /* splx */
   73         MOVW    R3, CPSR
   74         RET
   75 
   76 /* write back d cache */
   77 TEXT cachewb(SB), $-4
   78         /* write back any dirty data */
   79 _cachewb:
   80         MOVW    $0xe0000000,R0
   81         ADD             $(8*1024),R0,R1
   82 _cwbloop:
   83         MOVW.P  32(R0),R2
   84         CMP.S   R0,R1
   85         BGE     _cwbloop
   86 
   87         /* drain write buffer */
   88         MCR     CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
   89         RET
   90 
   91 /* write back a single cache line */
   92 TEXT cachewbaddr(SB), $-4
   93         BIC     $31,R0
   94         MCR     CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 1
   95         B       _wbflush
   96 
   97 /* write back a region of cache lines */
   98 TEXT cachewbregion(SB), $-4
   99         MOVW    4(FP),R1
  100         CMP.S   $(4*1024),R1
  101         BGT     _cachewb
  102         ADD     R0,R1
  103         BIC     $31,R0
  104 _cwbrloop:
  105         MCR     CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 1
  106         ADD     $32,R0
  107         CMP.S   R0,R1
  108         BGT     _cwbrloop
  109         B       _wbflush
  110 
  111 /* invalidate the dcache */
  112 TEXT dcacheinvalidate(SB), $-4
  113         MCR     CpMMU, 0, R0, C(CpCacheFlush), C(0x6)
  114         RET
  115 
  116 /* invalidate the icache */
  117 TEXT icacheinvalidate(SB), $-4
  118         MCR     CpMMU, 0, R0, C(CpCacheFlush), C(0x9)
  119         RET
  120 
  121 /* drain write buffer */
  122 TEXT wbflush(SB), $-4
  123 _wbflush:
  124         MCR     CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
  125         RET
  126 
  127 /* return cpu id */
  128 TEXT getcpuid(SB), $-4
  129         MRC     CpMMU, 0, R0, C(CpCPUID), C(0x0)
  130         RET
  131 
  132 /* return fault status */
  133 TEXT getfsr(SB), $-4
  134         MRC     CpMMU, 0, R0, C(CpFSR), C(0x0)
  135         RET
  136 
  137 /* return mmu control register */
  138 TEXT getcontrol(SB), $-4
  139         SUB R0, R0
  140         MRC     CpMMU, 0, R0, C(CpControl), C(0x0)
  141         RET
  142 
  143 /* return mmu dac register */
  144 TEXT getdac(SB), $-4
  145         SUB R0, R0
  146         MRC     CpMMU, 0, R0, C(CpDAC), C(0x0)
  147         RET
  148 
  149 /* return mmu ttb register */
  150 TEXT getttb(SB), $-4
  151         SUB R0, R0
  152         MRC     CpMMU, 0, R0, C(CpTTB), C(0x0)
  153         RET
  154 
  155 /* return fault address */
  156 TEXT getfar(SB), $-4
  157         MRC     CpMMU, 0, R0, C(CpFAR), C(0x0)
  158         RET
  159 
  160 /* set the translation table base */
  161 TEXT putttb(SB), $-4
  162         MCR     CpMMU, 0, R0, C(CpTTB), C(0x0)
  163         RET
  164 
  165 /*
  166  *  enable mmu, i and d caches
  167  */
  168 TEXT mmuenable(SB), $-4
  169         MRC     CpMMU, 0, R0, C(CpControl), C(0x0)
  170         ORR     $(CpCmmuena|CpCdcache|CpCicache|CpCwb|CpCsystem), R0
  171         BIC     $(CpCrom), R0
  172         MCR     CpMMU, 0, R0, C(CpControl), C(0x0)
  173         MOVW R0, R0
  174         MOVW R0, R0
  175         MOVW R0, R0
  176         MOVW R0, R0
  177         RET
  178 
  179 TEXT mmudisable(SB), $-4
  180         MRC     CpMMU, 0, R0, C(CpControl), C(0x0)
  181         BIC     $(CpCmmuena|CpCdcache|CpCicache|CpCwb|CpCvivec), R0
  182         MCR     CpMMU, 0, R0, C(CpControl), C(0x0)
  183         RET
  184 
  185 /*
  186  *  use exception vectors at 0xffff0000
  187  */
  188 TEXT mappedIvecEnable(SB), $-4
  189         MRC     CpMMU, 0, R0, C(CpControl), C(0x0)
  190         ORR     $(CpCvivec), R0
  191         MCR     CpMMU, 0, R0, C(CpControl), C(0x0)
  192         RET
  193 TEXT mappedIvecDisable(SB), $-4
  194         MRC     CpMMU, 0, R0, C(CpControl), C(0x0)
  195         BIC     $(CpCvivec), R0
  196         MCR     CpMMU, 0, R0, C(CpControl), C(0x0)
  197         RET
  198 
  199 /* set the translation table base */
  200 TEXT putdac(SB), $-4
  201         MCR     CpMMU, 0, R0, C(CpDAC), C(0x0)
  202         RET
  203 
  204 /* set address translation pid */
  205 TEXT putpid(SB), $-4
  206         MCR     CpMMU, 0, R0, C(CpPID), C(0x0)
  207         RET
  208 
  209 /*
  210  *  set the stack value for the mode passed in R0
  211  */
  212 TEXT setr13(SB), $-4
  213         MOVW    4(FP), R1
  214 
  215         MOVW    CPSR, R2
  216         BIC     $PsrMask, R2, R3
  217         ORR     R0, R3
  218         MOVW    R3, CPSR
  219 
  220         MOVW    R13, R0
  221         MOVW    R1, R13
  222 
  223         MOVW    R2, CPSR
  224         RET
  225 
  226 /*
  227  *  exception vectors, copied by trapinit() to somewhere useful
  228  */
  229 
  230 TEXT vectors(SB), $-4
  231         MOVW    0x18(R15), R15                  /* reset */
  232         MOVW    0x18(R15), R15                  /* undefined */
  233         MOVW    0x18(R15), R15                  /* SWI */
  234         MOVW    0x18(R15), R15                  /* prefetch abort */
  235         MOVW    0x18(R15), R15                  /* data abort */
  236         MOVW    0x18(R15), R15                  /* reserved */
  237         MOVW    0x18(R15), R15                  /* IRQ */
  238         MOVW    0x18(R15), R15                  /* FIQ */
  239 
  240 TEXT vtable(SB), $-4
  241         WORD    $_vsvc(SB)                      /* reset, in svc mode already */
  242         WORD    $_vund(SB)                      /* undefined, switch to svc mode */
  243         WORD    $_vsvc(SB)                      /* swi, in svc mode already */
  244         WORD    $_vpabt(SB)                     /* prefetch abort, switch to svc mode */
  245         WORD    $_vdabt(SB)                     /* data abort, switch to svc mode */
  246         WORD    $_vsvc(SB)                      /* reserved */
  247         WORD    $_virq(SB)                      /* IRQ, switch to svc mode */
  248         WORD    $_vfiq(SB)                      /* FIQ, switch to svc mode */
  249 
  250 TEXT _vrst(SB), $-4
  251         BL      resettrap(SB)
  252 
  253 TEXT _vsvc(SB), $-4                     /* SWI */
  254         MOVW.W  R14, -4(R13)            /* ureg->pc = interupted PC */
  255         MOVW    SPSR, R14               /* ureg->psr = SPSR */
  256         MOVW.W  R14, -4(R13)            /* ... */
  257         MOVW    $PsrMsvc, R14           /* ureg->type = PsrMsvc */
  258         MOVW.W  R14, -4(R13)            /* ... */
  259         MOVM.DB.W.S [R0-R14], (R13)     /* save user level registers, at end r13 points to ureg */
  260         MOVW    $setR12(SB), R12        /* Make sure we've got the kernel's SB loaded */
  261         MOVW    R13, R0                 /* first arg is pointer to ureg */
  262         SUB     $8, R13                 /* space for argument+link */
  263 
  264         BL      syscall(SB)
  265 
  266         ADD     $(8+4*15), R13          /* make r13 point to ureg->type */
  267         MOVW    8(R13), R14             /* restore link */
  268         MOVW    4(R13), R0              /* restore SPSR */
  269         MOVW    R0, SPSR                /* ... */
  270         MOVM.DB.S (R13), [R0-R14]       /* restore registers */
  271         ADD     $8, R13                 /* pop past ureg->{type+psr} */
  272         RFE                             /* MOVM.IA.S.W (R13), [R15] */
  273 
  274 TEXT _vund(SB), $-4                     /* undefined */
  275         MOVM.IA [R0-R4], (R13)          /* free some working space */
  276         MOVW    $PsrMund, R0
  277         B       _vswitch
  278 
  279 TEXT _vpabt(SB), $-4                    /* prefetch abort */
  280         MOVM.IA [R0-R4], (R13)          /* free some working space */
  281         MOVW    $PsrMabt, R0            /* r0 = type */
  282         B       _vswitch
  283 
  284 TEXT _vdabt(SB), $-4                    /* prefetch abort */
  285         MOVM.IA [R0-R4], (R13)          /* free some working space */
  286         MOVW    $(PsrMabt+1), R0                /* r0 = type */
  287         B       _vswitch
  288 
  289 TEXT _virq(SB), $-4                     /* IRQ */
  290         MOVM.IA [R0-R4], (R13)          /* free some working space */
  291         MOVW    $PsrMirq, R0            /* r0 = type */
  292         B       _vswitch
  293 
  294         /*
  295          *  come here with type in R0 and R13 pointing above saved [r0-r4]
  296          *  and type in r0.  we'll switch to SVC mode and then call trap.
  297          */
  298 _vswitch:
  299         MOVW    SPSR, R1                /* save SPSR for ureg */
  300         MOVW    R14, R2                 /* save interrupted pc for ureg */
  301         MOVW    R13, R3                 /* save pointer to where the original [R0-R3] are */
  302 
  303         /* switch to svc mode */
  304         MOVW    CPSR, R14
  305         BIC     $PsrMask, R14
  306         ORR     $(PsrDirq|PsrDfiq|PsrMsvc), R14
  307         MOVW    R14, CPSR
  308 
  309         /* interupted code kernel or user? */
  310         AND.S   $0xf, R1, R4
  311         BEQ     _userexcep
  312 
  313         /* here for trap from SVC mode */
  314         MOVM.DB.W [R0-R2], (R13)        /* set ureg->{type, psr, pc}; r13 points to ureg->type  */
  315         MOVM.IA   (R3), [R0-R4]         /* restore [R0-R4] from previous mode's stack */
  316         MOVM.DB.W [R0-R14], (R13)       /* save kernel level registers, at end r13 points to ureg */
  317         MOVW    $setR12(SB), R12        /* Make sure we've got the kernel's SB loaded */
  318         MOVW    R13, R0                 /* first arg is pointer to ureg */
  319         SUB     $8, R13                 /* space for argument+link (for debugger) */
  320         MOVW    $0xdeaddead,R11         /* marker */
  321 
  322         BL      trap(SB)
  323 
  324         ADD     $(8+4*15), R13          /* make r13 point to ureg->type */
  325         MOVW    8(R13), R14             /* restore link */
  326         MOVW    4(R13), R0              /* restore SPSR */
  327         MOVW    R0, SPSR                /* ... */
  328         MOVM.DB (R13), [R0-R14] /* restore registers */
  329         ADD     $8, R13                 /* pop past ureg->{type+psr} */
  330         RFE                             /* MOVM.IA.S.W (R13), [R15] */
  331 
  332         /* here for trap from USER mode */
  333 _userexcep:
  334         MOVM.DB.W [R0-R2], (R13)        /* set ureg->{type, psr, pc}; r13 points to ureg->type  */
  335         MOVM.IA   (R3), [R0-R4]         /* restore [R0-R4] from previous mode's stack */
  336         MOVM.DB.W.S [R0-R14], (R13)     /* save kernel level registers, at end r13 points to ureg */
  337         MOVW    $setR12(SB), R12        /* Make sure we've got the kernel's SB loaded */
  338         MOVW    R13, R0                 /* first arg is pointer to ureg */
  339         SUB     $8, R13                 /* space for argument+link (for debugger) */
  340 
  341         BL      trap(SB)
  342 
  343         ADD     $(8+4*15), R13          /* make r13 point to ureg->type */
  344         MOVW    8(R13), R14             /* restore link */
  345         MOVW    4(R13), R0              /* restore SPSR */
  346         MOVW    R0, SPSR                /* ... */
  347         MOVM.DB.S (R13), [R0-R14]       /* restore registers */
  348         ADD     $8, R13                 /* pop past ureg->{type+psr} */
  349         RFE                             /* MOVM.IA.S.W (R13), [R15] */
  350 
  351 TEXT _vfiq(SB), $-4                     /* FIQ */
  352         RFE                             /* FIQ is special, ignore it for now */
  353 
  354 /*
  355  *  This is the first jump from kernel to user mode.
  356  *  Fake a return from interrupt.
  357  *
  358  *  Enter with R0 containing the user stack pointer.
  359  *  UTZERO + 0x20 is always the entry point.
  360  *  
  361  */
  362 TEXT touser(SB),$-4
  363         /* store the user stack pointer into the USR_r13 */
  364         MOVM.DB.W [R0], (R13)
  365         MOVM.S.IA.W (R13),[R13]
  366 
  367         /* set up a PSR for user level */
  368         MOVW    $(PsrMusr), R0
  369         MOVW    R0,SPSR
  370 
  371         /* save the PC on the stack */
  372         MOVW    $(UTZERO+0x20), R0
  373         MOVM.DB.W [R0],(R13)
  374 
  375         /* return from interrupt */
  376         RFE                             /* MOVM.IA.S.W (R13), [R15] */
  377         
  378 /*
  379  *  here to jump to a newly forked process
  380  */
  381 TEXT forkret(SB),$-4
  382         ADD     $(4*15), R13            /* make r13 point to ureg->type */
  383         MOVW    8(R13), R14             /* restore link */
  384         MOVW    4(R13), R0              /* restore SPSR */
  385         MOVW    R0, SPSR                /* ... */
  386         MOVM.DB.S (R13), [R0-R14]       /* restore registers */
  387         ADD     $8, R13                 /* pop past ureg->{type+psr} */
  388         RFE                             /* MOVM.IA.S.W (R13), [R15] */
  389 
  390 TEXT splhi(SB), $-4
  391         /* save caller pc in Mach */
  392         MOVW    $(MACHADDR+0x04),R2
  393         MOVW    R14,0(R2)
  394         /* turn off interrupts */
  395         MOVW    CPSR, R0
  396         ORR     $(PsrDirq), R0, R1
  397         MOVW    R1, CPSR
  398         RET
  399 
  400 TEXT spllo(SB), $-4
  401         MOVW    CPSR, R0
  402         BIC     $(PsrDirq), R0, R1
  403         MOVW    R1, CPSR
  404         RET
  405 
  406 TEXT splx(SB), $-4
  407         /* save caller pc in Mach */
  408         MOVW    $(MACHADDR+0x04),R2
  409         MOVW    R14,0(R2)
  410         /* reset interrupt level */
  411         MOVW    R0, R1
  412         MOVW    CPSR, R0
  413         MOVW    R1, CPSR
  414         RET
  415 
  416 TEXT splxpc(SB), $-4                            /* for iunlock */
  417         MOVW    R0, R1
  418         MOVW    CPSR, R0
  419         MOVW    R1, CPSR
  420         RET
  421 
  422 TEXT spldone(SB), $0
  423         RET
  424 
  425 TEXT islo(SB), $-4
  426         MOVW    CPSR, R0
  427         AND     $(PsrDirq), R0
  428         EOR     $(PsrDirq), R0
  429         RET
  430 
  431 TEXT cpsrr(SB), $-4
  432         MOVW    CPSR, R0
  433         RET
  434 
  435 TEXT spsrr(SB), $-4
  436         MOVW    SPSR, R0
  437         RET
  438 
  439 TEXT getsp(SB), $-4
  440         MOVW    R13, R0
  441         RET
  442 
  443 TEXT getlink(SB), $-4
  444         MOVW    R14, R0
  445         RET
  446 
  447 TEXT getcallerpc(SB), $-4
  448         MOVW    0(R13), R0
  449         RET
  450 
  451 TEXT tas(SB), $-4
  452         MOVW    R0, R1
  453         MOVW    $0xDEADDEAD, R0
  454         MOVW    R0, R3
  455         SWPW    R0, (R1)
  456         CMP.S   R0, R3
  457         BEQ     _tasout
  458         EOR     R3, R3
  459         CMP.S   R0, R3
  460         BEQ     _tasout
  461         MOVW    $1,R15
  462 _tasout:
  463         RET
  464 
  465 TEXT setlabel(SB), $-4
  466         MOVW    R13, 0(R0)                      /* sp */
  467         MOVW    R14, 4(R0)                      /* pc */
  468         MOVW    $0, R0
  469         RET
  470 
  471 TEXT gotolabel(SB), $-4
  472         MOVW    0(R0), R13                      /* sp */
  473         MOVW    4(R0), R14                      /* pc */
  474         MOVW    $1, R0
  475         RET
  476 
  477 /* save the state machine in power_state[] for an upcoming suspend
  478  */
  479 TEXT setpowerlabel(SB), $-4
  480         MOVW    $power_state+0(SB), R0
  481         /* svc */                               /* power_state[]: what */
  482         MOVW    R1, 0(R0)
  483         MOVW    R2, 4(R0)
  484         MOVW    R3, 8(R0)
  485         MOVW    R4, 12(R0)
  486         MOVW    R5, 16(R0)
  487         MOVW    R6, 20(R0)
  488         MOVW    R7, 24(R0)
  489         MOVW    R8, 28(R0)
  490         MOVW    R9, 32(R0)
  491         MOVW    R10,36(R0)
  492         MOVW    R11,40(R0)
  493         MOVW    R12,44(R0)
  494         MOVW    R13,48(R0)
  495         MOVW    R14,52(R0)
  496         MOVW    SPSR, R1
  497         MOVW    R1, 56(R0)
  498         MOVW    CPSR, R2
  499         MOVW    R2, 60(R0)
  500         /* copro */
  501         MRC             CpMMU, 0, R3, C(CpDAC), C(0x0)
  502         MOVW    R3, 144(R0)
  503         MRC             CpMMU, 0, R3, C(CpTTB), C(0x0)
  504         MOVW    R3, 148(R0)
  505         MRC             CpMMU, 0, R3, C(CpControl), C(0x0)
  506         MOVW    R3, 152(R0)
  507         MRC             CpMMU, 0, R3, C(CpFSR), C(0x0)
  508         MOVW    R3, 156(R0)
  509         MRC             CpMMU, 0, R3, C(CpFAR), C(0x0)
  510         MOVW    R3, 160(R0)
  511         MRC             CpMMU, 0, R3, C(CpPID), C(0x0)
  512         MOVW    R3, 164(R0)
  513         /* usr */
  514         BIC             $(PsrMask), R2, R3
  515         ORR             $(0xdf), R3
  516         MOVW            R3, CPSR
  517         MOVW            SPSR, R11
  518         MOVW            R11, 168(R0)
  519         MOVW            R12, 172(R0)
  520         MOVW            R13, 176(R0)
  521         MOVW            R14, 180(R0)
  522         /* irq */
  523         BIC             $(PsrMask), R2, R3
  524         ORR             $(0xd2), R3
  525         MOVW    R3, CPSR
  526         MOVW    SPSR, R11
  527         MOVW    R11, 64(R0)
  528         MOVW    R12, 68(R0)
  529         MOVW    R13, 72(R0)
  530         MOVW    R14, 76(R0)
  531         /* und */
  532         BIC             $(PsrMask), R2, R3
  533         ORR             $(0xdb), R3
  534         MOVW    R3, CPSR
  535         MOVW    SPSR, R11
  536         MOVW    R11, 80(R0)
  537         MOVW    R12, 84(R0)
  538         MOVW    R13, 88(R0)
  539         MOVW    R14, 92(R0)
  540         /* abt */
  541         BIC             $(PsrMask), R2, R3
  542         ORR             $(0xd7), R3
  543         MOVW    R3, CPSR
  544         MOVW    SPSR, R11
  545         MOVW    R11, 96(R0)
  546         MOVW    R12, 100(R0)
  547         MOVW    R13, 104(R0)
  548         MOVW    R14, 108(R0)
  549         /* fiq */
  550         BIC             $(PsrMask), R2, R3
  551         ORR             $(0xd1), R3
  552         MOVW    R3, CPSR
  553         MOVW    SPSR, R7
  554         MOVW    R7, 112(R0)
  555         MOVW    R8, 116(R0)
  556         MOVW    R9, 120(R0)
  557         MOVW    R10,124(R0)
  558         MOVW    R11,128(R0)
  559         MOVW    R12,132(R0)
  560         MOVW    R13,136(R0)
  561         MOVW    R14,140(R0)
  562         /* done */
  563         MOVW    R2, CPSR
  564         MOVW    R1, SPSR
  565         MOVW    $0, R0
  566         RET
  567 
  568 /* Entered after a resume from suspend state.
  569  * The bootldr jumps here after a processor reset.
  570  */
  571 TEXT power_resume(SB), $-4
  572         MOVW    $setR12(SB), R12                /* load the SB */
  573         /* SVC mode, interrupts disabled */
  574         MOVW    $(PsrDirq|PsrDfiq|PsrMsvc), R1
  575         MOVW    R1, CPSR
  576         /* gotopowerlabel() */
  577         /* svc */
  578 
  579         MOVW    $power_state+0(SB), R0
  580         MOVW    56(R0), R1              /* R1: SPSR, R2: CPSR */
  581         MOVW    60(R0), R2
  582         MOVW    R1, SPSR
  583         MOVW    R2, CPSR
  584         /* copro */
  585         /* flush caches */
  586         MCR             CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
  587         /* drain prefetch */
  588         MOVW    R0,R0                                           
  589         MOVW    R0,R0
  590         MOVW    R0,R0
  591         MOVW    R0,R0
  592         /* drain write buffer */
  593         MCR             CpMMU, 0, R0, C(CpCacheFlush), C(0xa), 4
  594         MCR             CpMMU, 0, R0, C(CpTLBFlush), C(0x7)
  595         MOVW    144(R0), R3
  596         MCR             CpMMU, 0, R3, C(CpDAC), C(0x0)
  597         MOVW    148(R0), R3
  598         MCR             CpMMU, 0, R3, C(CpTTB), C(0x0)
  599         MOVW    156(R0), R3
  600         MCR             CpMMU, 0, R3, C(CpFSR), C(0x0)
  601         MOVW    160(R0), R3
  602         MCR             CpMMU, 0, R3, C(CpFAR), C(0x0)
  603         MOVW    164(R0), R3
  604         MCR             CpMMU, 0, R3, C(CpPID), C(0x0)
  605         MOVW    152(R0), R3
  606         MCR             CpMMU, 0, R3, C(CpControl), C(0x0)      /* Enable cache */
  607         MOVW    R0,R0                                           
  608         MOVW    R0,R0
  609         MOVW    R0,R0
  610         MOVW    R0,R0
  611         /* flush i&d caches */
  612         MCR             CpMMU, 0, R0, C(CpCacheFlush), C(0x7), 0
  613         /* flush tlb */
  614         MCR             CpMMU, 0, R0, C(CpTLBFlush), C(0x7), 0
  615         /* drain prefetch */
  616         MOVW    R0,R0                                           
  617         MOVW    R0,R0
  618         MOVW    R0,R0
  619         MOVW    R0,R0
  620         /* usr */
  621         BIC             $(PsrMask), R2, R3
  622         ORR             $(0xdf), R3
  623         MOVW            168(R0), R11
  624         MOVW            172(R0), R12
  625         MOVW            176(R0), R13
  626         MOVW            180(R0), R14
  627         MOVW            R11, SPSR
  628         /* irq */
  629         BIC             $(PsrMask), R2, R3
  630         ORR             $(0xd2), R3
  631         MOVW    R3, CPSR
  632         MOVW    64(R0), R11
  633         MOVW    68(R0), R12
  634         MOVW    72(R0), R13
  635         MOVW    76(R0), R14
  636         MOVW    R11, SPSR
  637         /* und */
  638         BIC             $(PsrMask), R2, R3
  639         ORR             $(0xdb), R3
  640         MOVW    R3, CPSR
  641         MOVW    80(R0), R11
  642         MOVW    84(R0), R12
  643         MOVW    88(R0), R13
  644         MOVW    92(R0), R14
  645         MOVW    R11, SPSR
  646         /* abt */
  647         BIC             $(PsrMask), R2, R3
  648         ORR             $(0xd7), R3
  649         MOVW    R3, CPSR
  650         MOVW    96(R0), R11
  651         MOVW    100(R0), R12
  652         MOVW    104(R0), R13
  653         MOVW    108(R0), R14
  654         MOVW    R11, SPSR
  655         /* fiq */
  656         BIC             $(PsrMask), R2, R3
  657         ORR             $(0xd1), R3
  658         MOVW    R3, CPSR
  659         MOVW    112(R0), R7
  660         MOVW    116(R0), R8
  661         MOVW    120(R0), R9
  662         MOVW    124(R0), R10
  663         MOVW    128(R0), R11
  664         MOVW    132(R0), R12
  665         MOVW    136(R0), R13
  666         MOVW    140(R0), R14
  667         MOVW    R7, SPSR
  668         /* svc */
  669         MOVW    56(R0), R1
  670         MOVW    60(R0), R2
  671         MOVW    R1, SPSR
  672         MOVW    R2, CPSR
  673         MOVW    0(R0), R1
  674         MOVW    4(R0), R2
  675         MOVW    8(R0), R3
  676         MOVW    12(R0),R4
  677         MOVW    16(R0),R5
  678         MOVW    20(R0),R6
  679         MOVW    24(R0),R7
  680         MOVW    28(R0),R8
  681         MOVW    32(R0),R9
  682         MOVW    36(R0),R10
  683         MOVW    40(R0),R11
  684         MOVW    44(R0),R12
  685         MOVW    48(R0),R13
  686         MOVW    52(R0),R14
  687         RET
  688 loop:
  689         B               loop
  690 
  691 TEXT power_down(SB), $-4
  692 
  693         TEXT    sa1100_power_off<>+0(SB),$8
  694         MOVW    resetregs+0(SB),R7
  695         MOVW    gpioregs+0(SB),R6
  696         MOVW    memconfregs+0(SB),R5
  697         MOVW    powerregs+0(SB),R3
  698 
  699 
  700         /* wakeup on power | rtc */
  701         MOVW    $(PWR_rtc|PWR_gpio0),R2
  702         MOVW    R2,0xc(R3)
  703 
  704         /* clear reset status */
  705         MOVW    $(RCSR_all), R2
  706         MOVW    R2, 0x4(R7)
  707         /* float */
  708         MOVW    $(PCFR_opde|PCFR_fp|PCFR_fs), R2
  709         MOVW    R2,0x10(R3)
  710         /* sleep state */
  711         MOVW    $0,R2
  712         MOVW    R2,0x18(R3)
  713         /* set resume address (pspr)*/
  714         MOVW    $resumeaddr+0(SB),R1
  715         MOVW    0x0(R1), R2
  716         MOVW    R2,0x8(R3)
  717 
  718         BL      cacheflush(SB)
  719 
  720         /* disable clock switching */
  721         MCR     CpPWR, 0, R1, C(CpTest), C(0x2), 2
  722 
  723         /* adjust mem timing */
  724         MOVW    memconfregs+0(SB),R5
  725         MOVW    0x1c(R5), R2
  726         ORR     $(MDREFR_k1db2), R2
  727         MOVW    R2, 0x1c(R5)
  728 
  729         /* set PLL to lower speed w/ delay (ppcr = 0)*/
  730         MOVW    powerregs+0(SB),R3
  731         MOVW    $(120*206),R0
  732 l11:    SUB     $1,R0
  733         BGT     l11
  734         MOVW    $0, R2
  735         MOVW    R2, 0x14(R3)
  736         MOVW    $(120*206),R0
  737 l12:    SUB     $1,R0
  738         BGT     l12
  739 
  740         /* setup registers for suspend procedure:
  741          * 1. clear RT in mscx (R1, R7, R8)
  742          * 2. clear DRI in mdrefr (R4)
  743          * 3. set slfrsh in mdrefr (R6)
  744          * 4. clear DE in mdcnfg (R9)
  745          * 5. clear dram refresh (R10)
  746          * 6. force sleep (R2)
  747          */
  748         /* 1 */
  749         MOVW    0x10(R5), R2
  750         BIC     $(MSC_rt), R2
  751         MOVW    R2, R1
  752         MOVW    0x14(R5), R2
  753         BIC     $(MSC_rt), R2
  754         MOVW    R2, R7
  755         MOVW    0x2c(R5), R2
  756         BIC     $(MSC_rt), R2
  757         MOVW    R2, R8
  758         /* 2 */
  759         MOVW    0x1c(R5), R2
  760         BIC     $(0xff00), R2
  761         BIC     $(0x00f0), R2
  762         MOVW    R2, R4
  763         /* 3 */
  764         ORR     $(MDREFR_slfrsh), R2, R6
  765         /* 4 */
  766         MOVW    0x0(R5), R9
  767         BIC     $(MDCFNG_de), R9, R9
  768         /* 5 */
  769         MOVW    R4, R2
  770         BIC     $(MDREFR_slfrsh), R2, R2
  771         BIC     $(MDREFR_e1pin), R2, R2
  772         MOVW    R2, R10
  773         /* 6 */
  774         MOVW    $1,R2
  775 
  776 TEXT power_magic(SB), $-4
  777         /* power_code gets copied into the area of no-ops below,
  778          * at a cache-line boundary (8 instructions)
  779          */
  780         MOVW    R0, R0
  781         MOVW    R0, R0
  782         MOVW    R0, R0
  783         MOVW    R0, R0
  784         MOVW    R0, R0
  785         MOVW    R0, R0
  786         MOVW    R0, R0
  787         MOVW    R0, R0
  788         MOVW    R0, R0
  789         MOVW    R0, R0
  790         MOVW    R0, R0
  791         MOVW    R0, R0
  792         MOVW    R0, R0
  793         MOVW    R0, R0
  794         MOVW    R0, R0
  795         MOVW    R0, R0
  796 
  797 TEXT power_code(SB), $-4
  798         /* Follow the procedure; this code gets copied to the no-op
  799          * area preceding this code
  800          */
  801         /* 1 */
  802         MOVW    R1, 0x10(R5)
  803         MOVW    R7, 0x14(R5)
  804         MOVW    R8, 0x2c(R5)
  805         /* 2 */
  806         MOVW    R4, 0x1c(R5)
  807         /* 3 */
  808         MOVW    R6, 0x1c(R5)
  809         /* 4 */
  810         MOVW    R9, 0x0(R5)
  811         /* 5 */
  812         MOVW    R10, 0x1c(R5)
  813         /* 6 */
  814         MOVW    R2, 0x0(R3)
  815 slloop:
  816         B               slloop                  /* loop waiting for sleep */
  817 
  818 /* The first MCR instruction of this function needs to be on a cache-line
  819  * boundary; to make this happen, it will be copied to the first cache-line
  820  * boundary 8 words from the start of doze.
  821  *
  822  * Doze puts the machine into idle mode.  Any interrupt will get it out
  823  * at the next instruction (the RET, to be precise).
  824  */
  825 TEXT doze(SB), $-4
  826         MOVW    $UCDRAMZERO, R1
  827         MOVW    R0,R0
  828         MOVW    R0,R0
  829         MOVW    R0,R0
  830         MOVW    R0,R0
  831         MOVW    R0,R0
  832         MOVW    R0,R0
  833         MOVW    R0,R0
  834         MOVW    R0,R0
  835         MOVW    R0,R0
  836         MOVW    R0,R0
  837         MOVW    R0,R0
  838         MOVW    R0,R0
  839         MOVW    R0,R0
  840         MOVW    R0,R0
  841         MOVW    R0,R0
  842         MOVW    R0,R0
  843         MOVW    R0,R0
  844         MOVW    R0,R0
  845         MOVW    R0,R0
  846         MOVW    R0,R0
  847         MOVW    R0,R0
  848         MOVW    R0,R0
  849         MOVW    R0,R0
  850         MOVW    R0,R0
  851         RET
  852         
  853 TEXT doze_code(SB), $-4
  854         MCR     CpPWR, 0, R0, C(CpTest), C(0x2), 2
  855         MOVW    (R1), R0
  856         MCR     CpPWR, 0, R0, C(CpTest), C(0x8), 2

Cache object: 8a5ed695d4218057165df59a0bb09e90


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]


This page is part of the FreeBSD/Linux Linux Kernel Cross-Reference, and was automatically generated using a modified version of the LXR engine.