The Design and Implementation of the FreeBSD Operating System, Second Edition
Now available: The Design and Implementation of the FreeBSD Operating System (Second Edition)


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]

FreeBSD/Linux Kernel Cross Reference
sys/mips/include/atomic.h

Version: -  FREEBSD  -  FREEBSD-13-STABLE  -  FREEBSD-13-0  -  FREEBSD-12-STABLE  -  FREEBSD-12-0  -  FREEBSD-11-STABLE  -  FREEBSD-11-0  -  FREEBSD-10-STABLE  -  FREEBSD-10-0  -  FREEBSD-9-STABLE  -  FREEBSD-9-0  -  FREEBSD-8-STABLE  -  FREEBSD-8-0  -  FREEBSD-7-STABLE  -  FREEBSD-7-0  -  FREEBSD-6-STABLE  -  FREEBSD-6-0  -  FREEBSD-5-STABLE  -  FREEBSD-5-0  -  FREEBSD-4-STABLE  -  FREEBSD-3-STABLE  -  FREEBSD22  -  l41  -  OPENBSD  -  linux-2.6  -  MK84  -  PLAN9  -  xnu-8792 
SearchContext: -  none  -  3  -  10 

    1 /*-
    2  * Copyright (c) 1998 Doug Rabson
    3  * All rights reserved.
    4  *
    5  * Redistribution and use in source and binary forms, with or without
    6  * modification, are permitted provided that the following conditions
    7  * are met:
    8  * 1. Redistributions of source code must retain the above copyright
    9  *    notice, this list of conditions and the following disclaimer.
   10  * 2. Redistributions in binary form must reproduce the above copyright
   11  *    notice, this list of conditions and the following disclaimer in the
   12  *    documentation and/or other materials provided with the distribution.
   13  *
   14  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
   15  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
   16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
   17  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
   18  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
   19  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
   20  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
   21  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
   22  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
   23  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
   24  * SUCH DAMAGE.
   25  *
   26  *      from: src/sys/alpha/include/atomic.h,v 1.21.2.3 2005/10/06 18:12:05 jhb
   27  * $FreeBSD: releng/8.1/sys/mips/include/atomic.h 185427 2008-11-29 05:16:14Z imp $
   28  */
   29 
   30 #ifndef _MACHINE_ATOMIC_H_
   31 #define _MACHINE_ATOMIC_H_
   32 
   33 #ifndef _SYS_CDEFS_H_
   34 #error this file needs sys/cdefs.h as a prerequisite
   35 #endif
   36 
   37 static __inline  void
   38 mips_sync(void)
   39 {
   40         __asm __volatile (".set noreorder\n\t"
   41                         "sync\n\t"
   42                         "nop\n\t"
   43                         "nop\n\t"
   44                         "nop\n\t"
   45                         "nop\n\t"
   46                         "nop\n\t"
   47                         "nop\n\t"
   48                         "nop\n\t"
   49                         "nop\n\t"
   50                         ".set reorder\n"
   51                         : : : "memory");
   52 }
   53 
   54 #define mb()    mips_sync()
   55 #define wmb()   mips_sync()
   56 #define rmb()   mips_sync()
   57 
   58 /*
   59  * Various simple arithmetic on memory which is atomic in the presence
   60  * of interrupts and SMP safe.
   61  */
   62 
   63 void atomic_set_8(__volatile uint8_t *, uint8_t);
   64 void atomic_clear_8(__volatile uint8_t *, uint8_t);
   65 void atomic_add_8(__volatile uint8_t *, uint8_t);
   66 void atomic_subtract_8(__volatile uint8_t *, uint8_t);
   67 
   68 void atomic_set_16(__volatile uint16_t *, uint16_t);
   69 void atomic_clear_16(__volatile uint16_t *, uint16_t);
   70 void atomic_add_16(__volatile uint16_t *, uint16_t);
   71 void atomic_subtract_16(__volatile uint16_t *, uint16_t);
   72 
   73 static __inline void
   74 atomic_set_32(__volatile uint32_t *p, uint32_t v)
   75 {
   76         uint32_t temp;
   77 
   78         __asm __volatile (
   79                 "1:\tll %0, %3\n\t"             /* load old value */
   80                 "or     %0, %2, %0\n\t"         /* calculate new value */
   81                 "sc     %0, %1\n\t"             /* attempt to store */
   82                 "beqz   %0, 1b\n\t"             /* spin if failed */
   83                 : "=&r" (temp), "=m" (*p)
   84                 : "r" (v), "m" (*p)
   85                 : "memory");
   86 
   87 }
   88 
   89 static __inline void
   90 atomic_clear_32(__volatile uint32_t *p, uint32_t v)
   91 {
   92         uint32_t temp;
   93         v = ~v;
   94 
   95         __asm __volatile (
   96                 "1:\tll %0, %3\n\t"             /* load old value */
   97                 "and    %0, %2, %0\n\t"         /* calculate new value */
   98                 "sc     %0, %1\n\t"             /* attempt to store */
   99                 "beqz   %0, 1b\n\t"             /* spin if failed */
  100                 : "=&r" (temp), "=m" (*p)
  101                 : "r" (v), "m" (*p)
  102                 : "memory");
  103 }
  104 
  105 static __inline void
  106 atomic_add_32(__volatile uint32_t *p, uint32_t v)
  107 {
  108         uint32_t temp;
  109 
  110         __asm __volatile (
  111                 "1:\tll %0, %3\n\t"             /* load old value */
  112                 "addu   %0, %2, %0\n\t"         /* calculate new value */
  113                 "sc     %0, %1\n\t"             /* attempt to store */
  114                 "beqz   %0, 1b\n\t"             /* spin if failed */
  115                 : "=&r" (temp), "=m" (*p)
  116                 : "r" (v), "m" (*p)
  117                 : "memory");
  118 }
  119 
  120 static __inline void
  121 atomic_subtract_32(__volatile uint32_t *p, uint32_t v)
  122 {
  123         uint32_t temp;
  124 
  125         __asm __volatile (
  126                 "1:\tll %0, %3\n\t"             /* load old value */
  127                 "subu   %0, %2\n\t"             /* calculate new value */
  128                 "sc     %0, %1\n\t"             /* attempt to store */
  129                 "beqz   %0, 1b\n\t"                     /* spin if failed */
  130                 : "=&r" (temp), "=m" (*p)
  131                 : "r" (v), "m" (*p)
  132                 : "memory");
  133 }
  134 
  135 static __inline uint32_t
  136 atomic_readandclear_32(__volatile uint32_t *addr)
  137 {
  138         uint32_t result,temp;
  139 
  140         __asm __volatile (
  141                 "1:\tll  %0,%3\n\t"     /* load current value, asserting lock */
  142                 "li      %1,0\n\t"              /* value to store */
  143                 "sc      %1,%2\n\t"     /* attempt to store */
  144                 "beqz    %1, 1b\n\t"            /* if the store failed, spin */
  145                 : "=&r"(result), "=&r"(temp), "=m" (*addr)
  146                 : "m" (*addr)
  147                 : "memory");
  148 
  149         return result;
  150 }
  151 
  152 static __inline uint32_t
  153 atomic_readandset_32(__volatile uint32_t *addr, uint32_t value)
  154 {
  155         uint32_t result,temp;
  156 
  157         __asm __volatile (
  158                 "1:\tll  %0,%3\n\t"     /* load current value, asserting lock */
  159                 "or      %1,$0,%4\n\t"
  160                 "sc      %1,%2\n\t"     /* attempt to store */
  161                 "beqz    %1, 1b\n\t"            /* if the store failed, spin */
  162                 : "=&r"(result), "=&r"(temp), "=m" (*addr)
  163                 : "m" (*addr), "r" (value)
  164                 : "memory");
  165 
  166         return result;
  167 }
  168 
  169 #define ATOMIC_ACQ_REL(NAME, WIDTH)                                     \
  170 static __inline  void                                                   \
  171 atomic_##NAME##_acq_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
  172 {                                                                       \
  173         atomic_##NAME##_##WIDTH(p, v);                                  \
  174         mips_sync();                                                    \
  175 }                                                                       \
  176                                                                         \
  177 static __inline  void                                                   \
  178 atomic_##NAME##_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
  179 {                                                                       \
  180         mips_sync();                                                    \
  181         atomic_##NAME##_##WIDTH(p, v);                                  \
  182 }
  183 
  184 /* Variants of simple arithmetic with memory barriers. */
  185 ATOMIC_ACQ_REL(set, 8)
  186 ATOMIC_ACQ_REL(clear, 8)
  187 ATOMIC_ACQ_REL(add, 8)
  188 ATOMIC_ACQ_REL(subtract, 8)
  189 ATOMIC_ACQ_REL(set, 16)
  190 ATOMIC_ACQ_REL(clear, 16)
  191 ATOMIC_ACQ_REL(add, 16)
  192 ATOMIC_ACQ_REL(subtract, 16)
  193 ATOMIC_ACQ_REL(set, 32)
  194 ATOMIC_ACQ_REL(clear, 32)
  195 ATOMIC_ACQ_REL(add, 32)
  196 ATOMIC_ACQ_REL(subtract, 32)
  197 #if 0
  198 ATOMIC_ACQ_REL(set, 64)
  199 ATOMIC_ACQ_REL(clear, 64)
  200 ATOMIC_ACQ_REL(add, 64)
  201 ATOMIC_ACQ_REL(subtract, 64)
  202 #endif
  203 
  204 #undef ATOMIC_ACQ_REL
  205 
  206 /*
  207  * We assume that a = b will do atomic loads and stores.
  208  */
  209 #define ATOMIC_STORE_LOAD(WIDTH)                        \
  210 static __inline  uint##WIDTH##_t                        \
  211 atomic_load_acq_##WIDTH(__volatile uint##WIDTH##_t *p)  \
  212 {                                                       \
  213         uint##WIDTH##_t v;                              \
  214                                                         \
  215         v = *p;                                         \
  216         mips_sync();                                    \
  217         return (v);                                     \
  218 }                                                       \
  219                                                         \
  220 static __inline  void                                   \
  221 atomic_store_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
  222 {                                                       \
  223         mips_sync();                                    \
  224         *p = v;                                         \
  225 }
  226 
  227 ATOMIC_STORE_LOAD(32)
  228 ATOMIC_STORE_LOAD(64)
  229 void atomic_store_64 (__volatile uint64_t *, uint64_t *);
  230 void atomic_load_64 (__volatile uint64_t *, uint64_t *);
  231 
  232 #undef ATOMIC_STORE_LOAD
  233 
  234 /*
  235  * Atomically compare the value stored at *p with cmpval and if the
  236  * two values are equal, update the value of *p with newval. Returns
  237  * zero if the compare failed, nonzero otherwise.
  238  */
  239 static __inline uint32_t
  240 atomic_cmpset_32(__volatile uint32_t* p, uint32_t cmpval, uint32_t newval)
  241 {
  242         uint32_t ret;
  243 
  244         __asm __volatile (
  245                 "1:\tll %0, %4\n\t"             /* load old value */
  246                 "bne %0, %2, 2f\n\t"            /* compare */
  247                 "move %0, %3\n\t"               /* value to store */
  248                 "sc %0, %1\n\t"                 /* attempt to store */
  249                 "beqz %0, 1b\n\t"               /* if it failed, spin */
  250                 "j 3f\n\t"
  251                 "2:\n\t"
  252                 "li     %0, 0\n\t"
  253                 "3:\n"
  254                 : "=&r" (ret), "=m" (*p)
  255                 : "r" (cmpval), "r" (newval), "m" (*p)
  256                 : "memory");
  257 
  258         return ret;
  259 }
  260 
  261 /*
  262  * Atomically compare the value stored at *p with cmpval and if the
  263  * two values are equal, update the value of *p with newval. Returns
  264  * zero if the compare failed, nonzero otherwise.
  265  */
  266 static __inline uint32_t
  267 atomic_cmpset_acq_32(__volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
  268 {
  269         int retval;
  270 
  271         retval = atomic_cmpset_32(p, cmpval, newval);
  272         mips_sync();
  273         return (retval);
  274 }
  275 
  276 static __inline uint32_t
  277 atomic_cmpset_rel_32(__volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
  278 {
  279         mips_sync();
  280         return (atomic_cmpset_32(p, cmpval, newval));
  281 }
  282 
  283 /*
  284  * Atomically add the value of v to the integer pointed to by p and return
  285  * the previous value of *p.
  286  */
  287 static __inline uint32_t
  288 atomic_fetchadd_32(__volatile uint32_t *p, uint32_t v)
  289 {
  290         uint32_t value, temp;
  291 
  292         __asm __volatile (
  293                 "1:\tll %0, %1\n\t"             /* load old value */
  294                 "addu %2, %3, %0\n\t"           /* calculate new value */
  295                 "sc %2, %1\n\t"                 /* attempt to store */
  296                 "beqz %2, 1b\n\t"               /* spin if failed */
  297                 : "=&r" (value), "=m" (*p), "=r" (temp)
  298                 : "r" (v), "m" (*p));
  299         return (value);
  300 }
  301 
  302 /* Operations on chars. */
  303 #define atomic_set_char         atomic_set_8
  304 #define atomic_set_acq_char     atomic_set_acq_8
  305 #define atomic_set_rel_char     atomic_set_rel_8
  306 #define atomic_clear_char       atomic_clear_8
  307 #define atomic_clear_acq_char   atomic_clear_acq_8
  308 #define atomic_clear_rel_char   atomic_clear_rel_8
  309 #define atomic_add_char         atomic_add_8
  310 #define atomic_add_acq_char     atomic_add_acq_8
  311 #define atomic_add_rel_char     atomic_add_rel_8
  312 #define atomic_subtract_char    atomic_subtract_8
  313 #define atomic_subtract_acq_char        atomic_subtract_acq_8
  314 #define atomic_subtract_rel_char        atomic_subtract_rel_8
  315 
  316 /* Operations on shorts. */
  317 #define atomic_set_short        atomic_set_16
  318 #define atomic_set_acq_short    atomic_set_acq_16
  319 #define atomic_set_rel_short    atomic_set_rel_16
  320 #define atomic_clear_short      atomic_clear_16
  321 #define atomic_clear_acq_short  atomic_clear_acq_16
  322 #define atomic_clear_rel_short  atomic_clear_rel_16
  323 #define atomic_add_short        atomic_add_16
  324 #define atomic_add_acq_short    atomic_add_acq_16
  325 #define atomic_add_rel_short    atomic_add_rel_16
  326 #define atomic_subtract_short   atomic_subtract_16
  327 #define atomic_subtract_acq_short       atomic_subtract_acq_16
  328 #define atomic_subtract_rel_short       atomic_subtract_rel_16
  329 
  330 /* Operations on ints. */
  331 #define atomic_set_int          atomic_set_32
  332 #define atomic_set_acq_int      atomic_set_acq_32
  333 #define atomic_set_rel_int      atomic_set_rel_32
  334 #define atomic_clear_int        atomic_clear_32
  335 #define atomic_clear_acq_int    atomic_clear_acq_32
  336 #define atomic_clear_rel_int    atomic_clear_rel_32
  337 #define atomic_add_int          atomic_add_32
  338 #define atomic_add_acq_int      atomic_add_acq_32
  339 #define atomic_add_rel_int      atomic_add_rel_32
  340 #define atomic_subtract_int     atomic_subtract_32
  341 #define atomic_subtract_acq_int atomic_subtract_acq_32
  342 #define atomic_subtract_rel_int atomic_subtract_rel_32
  343 #define atomic_cmpset_int       atomic_cmpset_32
  344 #define atomic_cmpset_acq_int   atomic_cmpset_acq_32
  345 #define atomic_cmpset_rel_int   atomic_cmpset_rel_32
  346 #define atomic_load_acq_int     atomic_load_acq_32
  347 #define atomic_store_rel_int    atomic_store_rel_32
  348 #define atomic_readandclear_int atomic_readandclear_32
  349 #define atomic_readandset_int   atomic_readandset_32
  350 #define atomic_fetchadd_int     atomic_fetchadd_32
  351 
  352 #ifdef __mips64
  353 /* Operations on longs. */
  354 #define atomic_set_long         atomic_set_64
  355 #define atomic_set_acq_long     atomic_set_acq_64
  356 #define atomic_set_rel_long     atomic_set_rel_64
  357 #define atomic_clear_long       atomic_clear_64
  358 #define atomic_clear_acq_long   atomic_clear_acq_64
  359 #define atomic_clear_rel_long   atomic_clear_rel_64
  360 #define atomic_add_long         atomic_add_64
  361 #define atomic_add_acq_long     atomic_add_acq_64
  362 #define atomic_add_rel_long     atomic_add_rel_64
  363 #define atomic_subtract_long    atomic_subtract_64
  364 #define atomic_subtract_acq_long        atomic_subtract_acq_64
  365 #define atomic_subtract_rel_long        atomic_subtract_rel_64
  366 #define atomic_cmpset_long      atomic_cmpset_64
  367 #define atomic_cmpset_acq_long  atomic_cmpset_acq_64
  368 #define atomic_cmpset_rel_long  atomic_cmpset_rel_64
  369 #define atomic_load_acq_long    atomic_load_acq_64
  370 #define atomic_store_rel_long   atomic_store_rel_64
  371 #define atomic_fetchadd_long    atomic_fetchadd_64
  372 #define atomic_readandclear_long        atomic_readandclear_64
  373 
  374 /* Operations on pointers. */
  375 #define atomic_set_ptr          atomic_set_64
  376 #define atomic_set_acq_ptr      atomic_set_acq_64
  377 #define atomic_set_rel_ptr      atomic_set_rel_64
  378 #define atomic_clear_ptr        atomic_clear_64
  379 #define atomic_clear_acq_ptr    atomic_clear_acq_64
  380 #define atomic_clear_rel_ptr    atomic_clear_rel_64
  381 #define atomic_add_ptr          atomic_add_64
  382 #define atomic_add_acq_ptr      atomic_add_acq_64
  383 #define atomic_add_rel_ptr      atomic_add_rel_64
  384 #define atomic_subtract_ptr     atomic_subtract_64
  385 #define atomic_subtract_acq_ptr atomic_subtract_acq_64
  386 #define atomic_subtract_rel_ptr atomic_subtract_rel_64
  387 #define atomic_cmpset_ptr       atomic_cmpset_64
  388 #define atomic_cmpset_acq_ptr   atomic_cmpset_acq_64
  389 #define atomic_cmpset_rel_ptr   atomic_cmpset_rel_64
  390 #define atomic_load_acq_ptr     atomic_load_acq_64
  391 #define atomic_store_rel_ptr    atomic_store_rel_64
  392 #define atomic_readandclear_ptr atomic_readandclear_64
  393 
  394 #else /* __mips64 */
  395 
  396 /* Operations on longs. */
  397 #define atomic_set_long         atomic_set_32
  398 #define atomic_set_acq_long     atomic_set_acq_32
  399 #define atomic_set_rel_long     atomic_set_rel_32
  400 #define atomic_clear_long       atomic_clear_32
  401 #define atomic_clear_acq_long   atomic_clear_acq_32
  402 #define atomic_clear_rel_long   atomic_clear_rel_32
  403 #define atomic_add_long(p, v) \
  404         atomic_add_32((volatile u_int *)(p), (u_int)(v))
  405 #define atomic_add_acq_long     atomic_add_acq_32
  406 #define atomic_add_rel_long     atomic_add_rel_32
  407 #define atomic_subtract_long(p, v) \
  408         atomic_subtract_32((volatile u_int *)(p), (u_int)(v))
  409 #define atomic_subtract_acq_long        atomic_subtract_acq_32
  410 #define atomic_subtract_rel_long        atomic_subtract_rel_32
  411 #define atomic_cmpset_long      atomic_cmpset_32
  412 #define atomic_cmpset_acq_long(p, cmpval, newval) \
  413         atomic_cmpset_acq_32((volatile u_int *)(p), \
  414             (u_int)(cmpval), (u_int)(newval))
  415 #define atomic_cmpset_rel_long(p, cmpval, newval) \
  416         atomic_cmpset_rel_32((volatile u_int *)(p), \
  417             (u_int)(cmpval), (u_int)(newval))
  418 #define atomic_load_acq_long    atomic_load_acq_32
  419 #define atomic_store_rel_long   atomic_store_rel_32
  420 #define atomic_fetchadd_long(p, v) \
  421         atomic_fetchadd_32((volatile u_int *)(p), (u_int)(v))
  422 #define atomic_readandclear_long        atomic_readandclear_32
  423 
  424 /* Operations on pointers. */
  425 #define atomic_set_ptr          atomic_set_32
  426 #define atomic_set_acq_ptr      atomic_set_acq_32
  427 #define atomic_set_rel_ptr      atomic_set_rel_32
  428 #define atomic_clear_ptr        atomic_clear_32
  429 #define atomic_clear_acq_ptr    atomic_clear_acq_32
  430 #define atomic_clear_rel_ptr    atomic_clear_rel_32
  431 #define atomic_add_ptr          atomic_add_32
  432 #define atomic_add_acq_ptr      atomic_add_acq_32
  433 #define atomic_add_rel_ptr      atomic_add_rel_32
  434 #define atomic_subtract_ptr     atomic_subtract_32
  435 #define atomic_subtract_acq_ptr atomic_subtract_acq_32
  436 #define atomic_subtract_rel_ptr atomic_subtract_rel_32
  437 #define atomic_cmpset_ptr       atomic_cmpset_32
  438 #define atomic_cmpset_acq_ptr   atomic_cmpset_acq_32
  439 #define atomic_cmpset_rel_ptr   atomic_cmpset_rel_32
  440 #define atomic_load_acq_ptr     atomic_load_acq_32
  441 #define atomic_store_rel_ptr    atomic_store_rel_32
  442 #define atomic_readandclear_ptr atomic_readandclear_32
  443 #endif /* __mips64 */
  444 
  445 #endif /* ! _MACHINE_ATOMIC_H_ */

Cache object: 4c7a5d8f807da52121d7640f5a118490


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]


This page is part of the FreeBSD/Linux Linux Kernel Cross-Reference, and was automatically generated using a modified version of the LXR engine.