The Design and Implementation of the FreeBSD Operating System, Second Edition
Now available: The Design and Implementation of the FreeBSD Operating System (Second Edition)


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]

FreeBSD/Linux Kernel Cross Reference
sys/arm/arm/cpufunc_asm_xscale_c3.S

Version: -  FREEBSD  -  FREEBSD-13-STABLE  -  FREEBSD-13-0  -  FREEBSD-12-STABLE  -  FREEBSD-12-0  -  FREEBSD-11-STABLE  -  FREEBSD-11-0  -  FREEBSD-10-STABLE  -  FREEBSD-10-0  -  FREEBSD-9-STABLE  -  FREEBSD-9-0  -  FREEBSD-8-STABLE  -  FREEBSD-8-0  -  FREEBSD-7-STABLE  -  FREEBSD-7-0  -  FREEBSD-6-STABLE  -  FREEBSD-6-0  -  FREEBSD-5-STABLE  -  FREEBSD-5-0  -  FREEBSD-4-STABLE  -  FREEBSD-3-STABLE  -  FREEBSD22  -  l41  -  OPENBSD  -  linux-2.6  -  MK84  -  PLAN9  -  xnu-8792 
SearchContext: -  none  -  3  -  10 

    1 /*      $NetBSD: cpufunc_asm_xscale.S,v 1.16 2002/08/17 16:36:32 thorpej Exp $  */
    2 
    3 /*-
    4  * Copyright (c) 2007 Olivier Houchard
    5  * Copyright (c) 2001, 2002 Wasabi Systems, Inc.
    6  * All rights reserved.
    7  *
    8  * Written by Allen Briggs and Jason R. Thorpe for Wasabi Systems, Inc.
    9  *
   10  * Redistribution and use in source and binary forms, with or without
   11  * modification, are permitted provided that the following conditions
   12  * are met:
   13  * 1. Redistributions of source code must retain the above copyright
   14  *    notice, this list of conditions and the following disclaimer.
   15  * 2. Redistributions in binary form must reproduce the above copyright
   16  *    notice, this list of conditions and the following disclaimer in the
   17  *    documentation and/or other materials provided with the distribution.
   18  * 3. All advertising materials mentioning features or use of this software
   19  *    must display the following acknowledgement:
   20  *      This product includes software developed for the NetBSD Project by
   21  *      Wasabi Systems, Inc.
   22  * 4. The name of Wasabi Systems, Inc. may not be used to endorse
   23  *    or promote products derived from this software without specific prior
   24  *    written permission.
   25  *
   26  * THIS SOFTWARE IS PROVIDED BY WASABI SYSTEMS, INC. ``AS IS'' AND
   27  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
   28  * TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
   29  * PURPOSE ARE DISCLAIMED.  IN NO EVENT SHALL WASABI SYSTEMS, INC
   30  * BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
   31  * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
   32  * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
   33  * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
   34  * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
   35  * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
   36  * POSSIBILITY OF SUCH DAMAGE.
   37  *
   38  */
   39 
   40 /*-
   41  * Copyright (c) 2001 Matt Thomas.
   42  * Copyright (c) 1997,1998 Mark Brinicombe.
   43  * Copyright (c) 1997 Causality Limited
   44  * All rights reserved.
   45  *
   46  * Redistribution and use in source and binary forms, with or without
   47  * modification, are permitted provided that the following conditions
   48  * are met:
   49  * 1. Redistributions of source code must retain the above copyright
   50  *    notice, this list of conditions and the following disclaimer.
   51  * 2. Redistributions in binary form must reproduce the above copyright
   52  *    notice, this list of conditions and the following disclaimer in the
   53  *    documentation and/or other materials provided with the distribution.
   54  * 3. All advertising materials mentioning features or use of this software
   55  *    must display the following acknowledgement:
   56  *      This product includes software developed by Causality Limited.
   57  * 4. The name of Causality Limited may not be used to endorse or promote
   58  *    products derived from this software without specific prior written
   59  *    permission.
   60  *
   61  * THIS SOFTWARE IS PROVIDED BY CAUSALITY LIMITED ``AS IS'' AND ANY EXPRESS
   62  * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
   63  * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
   64  * DISCLAIMED. IN NO EVENT SHALL CAUSALITY LIMITED BE LIABLE FOR ANY DIRECT,
   65  * INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
   66  * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
   67  * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
   68  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
   69  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
   70  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
   71  * SUCH DAMAGE.
   72  *
   73  * XScale core 3 assembly functions for CPU / MMU / TLB specific operations
   74  */
   75 
   76 #include <machine/armreg.h>
   77 #include <machine/asm.h>
   78 __FBSDID("$FreeBSD: releng/10.2/sys/arm/arm/cpufunc_asm_xscale_c3.S 278613 2015-02-12 03:50:33Z ian $");
   79 
   80 /*
   81  * Size of the XScale core D-cache.
   82  */
   83 #define DCACHE_SIZE             0x00008000
   84 
   85 .Lblock_userspace_access:
   86         .word   _C_LABEL(block_userspace_access)
   87 
   88 /*
   89  * CPWAIT -- Canonical method to wait for CP15 update.
   90  * From: Intel 80200 manual, section 2.3.3.
   91  *
   92  * NOTE: Clobbers the specified temp reg.
   93  */
   94 #define CPWAIT_BRANCH                                                    \
   95         sub     pc, pc, #4
   96 
   97 #define CPWAIT(tmp)                                                      \
   98         mrc     p15, 0, tmp, c2, c0, 0  /* arbitrary read of CP15 */    ;\
   99         mov     tmp, tmp                /* wait for it to complete */   ;\
  100         CPWAIT_BRANCH                   /* branch to next insn */
  101 
  102 #define CPWAIT_AND_RETURN_SHIFTER       lsr #32
  103 
  104 #define CPWAIT_AND_RETURN(tmp)                                           \
  105         mrc     p15, 0, tmp, c2, c0, 0  /* arbitrary read of CP15 */    ;\
  106         /* Wait for it to complete and branch to the return address */   \
  107         sub     pc, lr, tmp, CPWAIT_AND_RETURN_SHIFTER
  108 
  109 #define ARM_USE_L2_CACHE
  110 
  111 #define L2_CACHE_SIZE           0x80000
  112 #define L2_CACHE_WAYS           8
  113 #define L2_CACHE_LINE_SIZE      32
  114 #define L2_CACHE_SETS           (L2_CACHE_SIZE / \
  115     (L2_CACHE_WAYS * L2_CACHE_LINE_SIZE))
  116 
  117 #define L1_DCACHE_SIZE          32 * 1024
  118 #define L1_DCACHE_WAYS          4
  119 #define L1_DCACHE_LINE_SIZE     32
  120 #define L1_DCACHE_SETS          (L1_DCACHE_SIZE / \
  121     (L1_DCACHE_WAYS * L1_DCACHE_LINE_SIZE))
  122 #ifdef CACHE_CLEAN_BLOCK_INTR
  123 #define XSCALE_CACHE_CLEAN_BLOCK                                        \
  124         stmfd   sp!, {r4}                                       ;       \
  125         mrs     r4, cpsr                                        ;       \
  126         orr     r0, r4, #(PSR_I | PSR_F)                        ;       \
  127         msr     cpsr_fsxc, r0
  128 
  129 #define XSCALE_CACHE_CLEAN_UNBLOCK                                      \
  130         msr     cpsr_fsxc, r4                                   ;       \
  131         ldmfd   sp!, {r4}
  132 #else
  133 #define XSCALE_CACHE_CLEAN_BLOCK                                        \
  134         stmfd   sp!, {r4}                                       ;       \
  135         ldr     r4, .Lblock_userspace_access                    ;       \
  136         ldr     ip, [r4]                                        ;       \
  137         orr     r0, ip, #1                                      ;       \
  138         str     r0, [r4]        
  139 
  140 #define XSCALE_CACHE_CLEAN_UNBLOCK                                      \
  141         str     ip, [r3]                                        ;       \
  142         ldmfd   sp!, {r4}
  143 #endif /* CACHE_CLEAN_BLOCK_INTR */
  144 
  145 
  146 ENTRY_NP(xscalec3_cache_syncI)
  147 EENTRY_NP(xscalec3_cache_purgeID)
  148         mcr     p15, 0, r0, c7, c5, 0   /* flush I cache (D cleaned below) */
  149 EENTRY_NP(xscalec3_cache_cleanID)
  150 EENTRY_NP(xscalec3_cache_purgeD)
  151 EENTRY(xscalec3_cache_cleanD)
  152 
  153         XSCALE_CACHE_CLEAN_BLOCK
  154         mov     r0, #0
  155 1:
  156         mov     r1, r0, asl #30
  157         mov     r2, #0
  158 2:
  159         orr     r3, r1, r2, asl #5
  160         mcr     p15, 0, r3, c7, c14, 2  /* clean and invalidate */
  161         add     r2, r2, #1
  162         cmp     r2, #L1_DCACHE_SETS
  163         bne     2b
  164         add     r0, r0, #1
  165         cmp     r0, #4
  166         bne     1b
  167         CPWAIT(r0)
  168         XSCALE_CACHE_CLEAN_UNBLOCK
  169         mcr     p15, 0, r0, c7, c10, 4  /* drain write buffer */
  170 
  171         RET
  172 EEND(xscalec3_cache_purgeID)
  173 EEND(xscalec3_cache_cleanID)
  174 EEND(xscalec3_cache_purgeD)
  175 EEND(xscalec3_cache_cleanD)
  176 END(xscalec3_cache_syncI)
  177 
  178 ENTRY(xscalec3_cache_purgeID_rng)
  179 
  180         cmp     r1, #0x4000
  181         bcs     _C_LABEL(xscalec3_cache_cleanID)
  182         and     r2, r0, #0x1f
  183         add     r1, r1, r2
  184         bic     r0, r0, #0x1f
  185 
  186 1:      mcr     p15, 0, r0, c7, c14, 1  /* clean/invalidate L1 D cache entry */
  187         nop
  188         mcr     p15, 0, r0, c7, c5, 1   /* flush I cache single entry */
  189         add     r0, r0, #32
  190         subs    r1, r1, #32
  191         bhi     1b
  192 
  193         CPWAIT(r0)
  194 
  195         mcr     p15, 0, r0, c7, c10, 4  /* drain write buffer */
  196 
  197         CPWAIT_AND_RETURN(r0)
  198 END(xscalec3_cache_purgeID_rng)
  199 
  200 ENTRY(xscalec3_cache_syncI_rng)
  201         cmp     r1, #0x4000
  202         bcs     _C_LABEL(xscalec3_cache_syncI)
  203 
  204         and     r2, r0, #0x1f
  205         add     r1, r1, r2
  206         bic     r0, r0, #0x1f
  207 
  208 1:      mcr     p15, 0, r0, c7, c10, 1  /* clean D cache entry */
  209         mcr     p15, 0, r0, c7, c5, 1   /* flush I cache single entry */
  210         add     r0, r0, #32
  211         subs    r1, r1, #32
  212         bhi     1b
  213 
  214         CPWAIT(r0)
  215 
  216         mcr     p15, 0, r0, c7, c10, 4  /* drain write buffer */
  217 
  218         CPWAIT_AND_RETURN(r0)
  219 END(xscalec3_cache_syncI_rng)
  220         
  221 ENTRY(xscalec3_cache_purgeD_rng)
  222 
  223         cmp     r1, #0x4000
  224         bcs     _C_LABEL(xscalec3_cache_cleanID)
  225         and     r2, r0, #0x1f
  226         add     r1, r1, r2
  227         bic     r0, r0, #0x1f
  228 
  229 1:      mcr     p15, 0, r0, c7, c14, 1  /* Clean and invalidate D cache entry */
  230         add     r0, r0, #32
  231         subs    r1, r1, #32
  232         bhi     1b
  233 
  234         CPWAIT(r0)
  235 
  236         mcr     p15, 0, r0, c7, c10, 4  /* drain write buffer */
  237 
  238         CPWAIT_AND_RETURN(r0)
  239 END(xscalec3_cache_purgeD_rng)
  240 
  241 ENTRY(xscalec3_cache_cleanID_rng)
  242 EENTRY(xscalec3_cache_cleanD_rng)
  243 
  244         cmp     r1, #0x4000
  245         bcs     _C_LABEL(xscalec3_cache_cleanID)
  246         and     r2, r0, #0x1f
  247         add     r1, r1, r2
  248         bic     r0, r0, #0x1f
  249 
  250 1:      mcr     p15, 0, r0, c7, c10, 1  /* clean L1 D cache entry */
  251         nop
  252         add     r0, r0, #32
  253         subs    r1, r1, #32
  254         bhi     1b
  255 
  256         CPWAIT(r0)
  257 
  258         mcr     p15, 0, r0, c7, c10, 4  /* drain write buffer */
  259 
  260         CPWAIT_AND_RETURN(r0)
  261 EEND(xscalec3_cache_cleanD_rng)
  262 END(xscalec3_cache_cleanID_rng)
  263 
  264 ENTRY(xscalec3_l2cache_purge)
  265         /* Clean-up the L2 cache */
  266         mcr     p15, 0, r0, c7, c10, 5  /* Data memory barrier */
  267         mov     r0, #0
  268 1:
  269         mov     r1, r0, asl #29
  270         mov     r2, #0
  271 2:
  272         orr     r3, r1, r2, asl #5
  273         mcr     p15, 1, r3, c7, c15, 2
  274         add     r2, r2, #1
  275         cmp     r2, #L2_CACHE_SETS
  276         bne     2b
  277         add     r0, r0, #1
  278         cmp     r0, #8
  279         bne     1b
  280         mcr     p15, 0, r0, c7, c10, 4          @ data write barrier
  281 
  282         CPWAIT(r0)
  283         mcr     p15, 0, r0, c7, c10, 5  /* Data memory barrier */
  284         RET
  285 END(xscalec3_l2cache_purge)
  286 
  287 ENTRY(xscalec3_l2cache_clean_rng)
  288         mcr     p15, 0, r0, c7, c10, 5  /* Data memory barrier */
  289 
  290         and     r2, r0, #0x1f
  291         add     r1, r1, r2
  292         bic     r0, r0, #0x1f
  293 
  294 1:      mcr     p15, 1, r0, c7, c11, 1  /* Clean L2 D cache entry */
  295         add     r0, r0, #32
  296         subs    r1, r1, #32
  297         bhi     1b
  298 
  299 
  300         CPWAIT(r0)
  301 
  302         mcr     p15, 0, r0, c7, c10, 4          @ data write barrier
  303         mcr     p15, 0, r0, c7, c10, 5
  304 
  305         CPWAIT_AND_RETURN(r0)
  306 END(xscalec3_l2cache_clean_rng)
  307 
  308 ENTRY(xscalec3_l2cache_purge_rng)
  309 
  310         mcr     p15, 0, r0, c7, c10, 5  /* Data memory barrier */
  311 
  312         and     r2, r0, #0x1f
  313         add     r1, r1, r2
  314         bic     r0, r0, #0x1f
  315 
  316 1:      mcr     p15, 1, r0, c7, c11, 1  /* Clean L2 D cache entry */
  317         mcr     p15, 1, r0, c7, c7, 1   /* Invalidate L2 D cache entry */
  318         add     r0, r0, #32
  319         subs    r1, r1, #32
  320         bhi     1b
  321 
  322         mcr     p15, 0, r0, c7, c10, 4          @ data write barrier
  323         mcr     p15, 0, r0, c7, c10, 5
  324 
  325         CPWAIT_AND_RETURN(r0)
  326 END(xscalec3_l2cache_purge_rng)
  327 
  328 ENTRY(xscalec3_l2cache_flush_rng)
  329         mcr     p15, 0, r0, c7, c10, 5  /* Data memory barrier */
  330 
  331         and     r2, r0, #0x1f
  332         add     r1, r1, r2
  333         bic     r0, r0, #0x1f
  334 
  335 1:      mcr     p15, 1, r0, c7, c7, 1   /* Invalidate L2 cache line */
  336         add     r0, r0, #32
  337         subs    r1, r1, #32
  338         bhi     1b
  339         mcr     p15, 0, r0, c7, c10, 4          @ data write barrier
  340         mcr     p15, 0, r0, c7, c10, 5
  341         CPWAIT_AND_RETURN(r0)
  342 END(xscalec3_l2cache_flush_rng)
  343 
  344 /*
  345  * Functions to set the MMU Translation Table Base register
  346  *
  347  * We need to clean and flush the cache as it uses virtual
  348  * addresses that are about to change.
  349  */
  350 ENTRY(xscalec3_setttb)
  351 #ifdef CACHE_CLEAN_BLOCK_INTR
  352         mrs     r3, cpsr
  353         orr     r1, r3, #(PSR_I | PSR_F)
  354         msr     cpsr_fsxc, r1
  355 #else
  356         ldr     r3, .Lblock_userspace_access
  357         ldr     r2, [r3]
  358         orr     r1, r2, #1
  359         str     r1, [r3]
  360 #endif
  361         stmfd   sp!, {r0-r3, lr}
  362         bl      _C_LABEL(xscalec3_cache_cleanID)
  363         mcr     p15, 0, r0, c7, c5, 0   /* invalidate I$ and BTB */
  364         mcr     p15, 0, r0, c7, c10, 4  /* drain write and fill buffer */
  365 
  366         CPWAIT(r0)
  367 
  368         ldmfd   sp!, {r0-r3, lr}
  369 
  370 #ifdef ARM_USE_L2_CACHE
  371         orr     r0, r0, #0x18   /* cache the page table in L2 */
  372 #endif
  373         /* Write the TTB */
  374         mcr     p15, 0, r0, c2, c0, 0
  375 
  376         /* If we have updated the TTB we must flush the TLB */
  377         mcr     p15, 0, r0, c8, c7, 0   /* invalidate I+D TLB */
  378 
  379         CPWAIT(r0)
  380 
  381 #ifdef CACHE_CLEAN_BLOCK_INTR
  382         msr     cpsr_fsxc, r3
  383 #else
  384         str     r2, [r3]
  385 #endif
  386         RET
  387 END(xscalec3_setttb)
  388 
  389 /*
  390  * Context switch.
  391  *
  392  * These is the CPU-specific parts of the context switcher cpu_switch()
  393  * These functions actually perform the TTB reload.
  394  *
  395  * NOTE: Special calling convention
  396  *      r1, r4-r13 must be preserved
  397  */
  398 ENTRY(xscalec3_context_switch)
  399         /*
  400          * CF_CACHE_PURGE_ID will *ALWAYS* be called prior to this.
  401          * Thus the data cache will contain only kernel data and the
  402          * instruction cache will contain only kernel code, and all
  403          * kernel mappings are shared by all processes.
  404          */
  405 #ifdef ARM_USE_L2_CACHE
  406         orr     r0, r0, #0x18   /* Cache the page table in L2 */
  407 #endif
  408         /* Write the TTB */
  409         mcr     p15, 0, r0, c2, c0, 0
  410 
  411         /* If we have updated the TTB we must flush the TLB */
  412         mcr     p15, 0, r0, c8, c7, 0   /* flush the I+D tlb */
  413 
  414         CPWAIT_AND_RETURN(r0)
  415 END(xscalec3_context_switch)
  416 

Cache object: 8a60eb66b79de26228284395c73dd3a0


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]


This page is part of the FreeBSD/Linux Linux Kernel Cross-Reference, and was automatically generated using a modified version of the LXR engine.