The Design and Implementation of the FreeBSD Operating System, Second Edition
Now available: The Design and Implementation of the FreeBSD Operating System (Second Edition)


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]

FreeBSD/Linux Kernel Cross Reference
sys/amd64/include/atomic.h

Version: -  FREEBSD  -  FREEBSD-13-STABLE  -  FREEBSD-13-0  -  FREEBSD-12-STABLE  -  FREEBSD-12-0  -  FREEBSD-11-STABLE  -  FREEBSD-11-0  -  FREEBSD-10-STABLE  -  FREEBSD-10-0  -  FREEBSD-9-STABLE  -  FREEBSD-9-0  -  FREEBSD-8-STABLE  -  FREEBSD-8-0  -  FREEBSD-7-STABLE  -  FREEBSD-7-0  -  FREEBSD-6-STABLE  -  FREEBSD-6-0  -  FREEBSD-5-STABLE  -  FREEBSD-5-0  -  FREEBSD-4-STABLE  -  FREEBSD-3-STABLE  -  FREEBSD22  -  l41  -  OPENBSD  -  linux-2.6  -  MK84  -  PLAN9  -  xnu-8792 
SearchContext: -  none  -  3  -  10 

    1 /*-
    2  * Copyright (c) 1998 Doug Rabson
    3  * All rights reserved.
    4  *
    5  * Redistribution and use in source and binary forms, with or without
    6  * modification, are permitted provided that the following conditions
    7  * are met:
    8  * 1. Redistributions of source code must retain the above copyright
    9  *    notice, this list of conditions and the following disclaimer.
   10  * 2. Redistributions in binary form must reproduce the above copyright
   11  *    notice, this list of conditions and the following disclaimer in the
   12  *    documentation and/or other materials provided with the distribution.
   13  *
   14  * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
   15  * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
   16  * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
   17  * ARE DISCLAIMED.  IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
   18  * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
   19  * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
   20  * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
   21  * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
   22  * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
   23  * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
   24  * SUCH DAMAGE.
   25  *
   26  * $FreeBSD$
   27  */
   28 #ifndef _MACHINE_ATOMIC_H_
   29 #define _MACHINE_ATOMIC_H_
   30 
   31 #ifndef _SYS_CDEFS_H_
   32 #error this file needs sys/cdefs.h as a prerequisite
   33 #endif
   34 
   35 #define mb()    __asm __volatile("mfence;" : : : "memory")
   36 #define wmb()   __asm __volatile("sfence;" : : : "memory")
   37 #define rmb()   __asm __volatile("lfence;" : : : "memory")
   38 
   39 /*
   40  * Various simple operations on memory, each of which is atomic in the
   41  * presence of interrupts and multiple processors.
   42  *
   43  * atomic_set_char(P, V)        (*(u_char *)(P) |= (V))
   44  * atomic_clear_char(P, V)      (*(u_char *)(P) &= ~(V))
   45  * atomic_add_char(P, V)        (*(u_char *)(P) += (V))
   46  * atomic_subtract_char(P, V)   (*(u_char *)(P) -= (V))
   47  *
   48  * atomic_set_short(P, V)       (*(u_short *)(P) |= (V))
   49  * atomic_clear_short(P, V)     (*(u_short *)(P) &= ~(V))
   50  * atomic_add_short(P, V)       (*(u_short *)(P) += (V))
   51  * atomic_subtract_short(P, V)  (*(u_short *)(P) -= (V))
   52  *
   53  * atomic_set_int(P, V)         (*(u_int *)(P) |= (V))
   54  * atomic_clear_int(P, V)       (*(u_int *)(P) &= ~(V))
   55  * atomic_add_int(P, V)         (*(u_int *)(P) += (V))
   56  * atomic_subtract_int(P, V)    (*(u_int *)(P) -= (V))
   57  * atomic_swap_int(P, V)        (return (*(u_int *)(P)); *(u_int *)(P) = (V);)
   58  * atomic_readandclear_int(P)   (return (*(u_int *)(P)); *(u_int *)(P) = 0;)
   59  *
   60  * atomic_set_long(P, V)        (*(u_long *)(P) |= (V))
   61  * atomic_clear_long(P, V)      (*(u_long *)(P) &= ~(V))
   62  * atomic_add_long(P, V)        (*(u_long *)(P) += (V))
   63  * atomic_subtract_long(P, V)   (*(u_long *)(P) -= (V))
   64  * atomic_swap_long(P, V)       (return (*(u_long *)(P)); *(u_long *)(P) = (V);)
   65  * atomic_readandclear_long(P)  (return (*(u_long *)(P)); *(u_long *)(P) = 0;)
   66  */
   67 
   68 /*
   69  * The above functions are expanded inline in the statically-linked
   70  * kernel.  Lock prefixes are generated if an SMP kernel is being
   71  * built.
   72  *
   73  * Kernel modules call real functions which are built into the kernel.
   74  * This allows kernel modules to be portable between UP and SMP systems.
   75  */
   76 #if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM)
   77 #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V)                     \
   78 void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v);  \
   79 void atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
   80 
   81 int     atomic_cmpset_char(volatile u_char *dst, u_char expect, u_char src);
   82 int     atomic_cmpset_short(volatile u_short *dst, u_short expect, u_short src);
   83 int     atomic_cmpset_int(volatile u_int *dst, u_int expect, u_int src);
   84 int     atomic_cmpset_long(volatile u_long *dst, u_long expect, u_long src);
   85 int     atomic_fcmpset_char(volatile u_char *dst, u_char *expect, u_char src);
   86 int     atomic_fcmpset_short(volatile u_short *dst, u_short *expect,
   87             u_short src);
   88 int     atomic_fcmpset_int(volatile u_int *dst, u_int *expect, u_int src);
   89 int     atomic_fcmpset_long(volatile u_long *dst, u_long *expect, u_long src);
   90 u_int   atomic_fetchadd_int(volatile u_int *p, u_int v);
   91 u_long  atomic_fetchadd_long(volatile u_long *p, u_long v);
   92 int     atomic_testandset_int(volatile u_int *p, u_int v);
   93 int     atomic_testandset_long(volatile u_long *p, u_int v);
   94 int     atomic_testandclear_int(volatile u_int *p, u_int v);
   95 int     atomic_testandclear_long(volatile u_long *p, u_int v);
   96 
   97 #define ATOMIC_LOAD(TYPE, LOP)                                  \
   98 u_##TYPE        atomic_load_acq_##TYPE(volatile u_##TYPE *p)
   99 #define ATOMIC_STORE(TYPE)                                      \
  100 void            atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
  101 
  102 #else /* !KLD_MODULE && __GNUCLIKE_ASM */
  103 
  104 /*
  105  * For userland, always use lock prefixes so that the binaries will run
  106  * on both SMP and !SMP systems.
  107  */
  108 #if defined(SMP) || !defined(_KERNEL)
  109 #define MPLOCKED        "lock ; "
  110 #else
  111 #define MPLOCKED
  112 #endif
  113 
  114 /*
  115  * The assembly is volatilized to avoid code chunk removal by the compiler.
  116  * GCC aggressively reorders operations and memory clobbering is necessary
  117  * in order to avoid that for memory barriers.
  118  */
  119 #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V)             \
  120 static __inline void                                    \
  121 atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
  122 {                                                       \
  123         __asm __volatile(MPLOCKED OP                    \
  124         : "+m" (*p)                                     \
  125         : CONS (V)                                      \
  126         : "cc");                                        \
  127 }                                                       \
  128                                                         \
  129 static __inline void                                    \
  130 atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
  131 {                                                       \
  132         __asm __volatile(MPLOCKED OP                    \
  133         : "+m" (*p)                                     \
  134         : CONS (V)                                      \
  135         : "memory", "cc");                              \
  136 }                                                       \
  137 struct __hack
  138 
  139 /*
  140  * Atomic compare and set, used by the mutex functions.
  141  *
  142  * cmpset:
  143  *      if (*dst == expect)
  144  *              *dst = src
  145  *
  146  * fcmpset:
  147  *      if (*dst == *expect)
  148  *              *dst = src
  149  *      else
  150  *              *expect = *dst
  151  *
  152  * Returns 0 on failure, non-zero on success.
  153  */
  154 #define ATOMIC_CMPSET(TYPE)                             \
  155 static __inline int                                     \
  156 atomic_cmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE expect, u_##TYPE src) \
  157 {                                                       \
  158         u_char res;                                     \
  159                                                         \
  160         __asm __volatile(                               \
  161         "       " MPLOCKED "            "               \
  162         "       cmpxchg %3,%1 ; "                       \
  163         "       sete    %0 ;            "               \
  164         "# atomic_cmpset_" #TYPE "      "               \
  165         : "=q" (res),                   /* 0 */         \
  166           "+m" (*dst),                  /* 1 */         \
  167           "+a" (expect)                 /* 2 */         \
  168         : "r" (src)                     /* 3 */         \
  169         : "memory", "cc");                              \
  170         return (res);                                   \
  171 }                                                       \
  172                                                         \
  173 static __inline int                                     \
  174 atomic_fcmpset_##TYPE(volatile u_##TYPE *dst, u_##TYPE *expect, u_##TYPE src) \
  175 {                                                       \
  176         u_char res;                                     \
  177                                                         \
  178         __asm __volatile(                               \
  179         "       " MPLOCKED "            "               \
  180         "       cmpxchg %3,%1 ;         "               \
  181         "       sete    %0 ;            "               \
  182         "# atomic_fcmpset_" #TYPE "     "               \
  183         : "=q" (res),                   /* 0 */         \
  184           "+m" (*dst),                  /* 1 */         \
  185           "+a" (*expect)                /* 2 */         \
  186         : "r" (src)                     /* 3 */         \
  187         : "memory", "cc");                              \
  188         return (res);                                   \
  189 }
  190 
  191 ATOMIC_CMPSET(char);
  192 ATOMIC_CMPSET(short);
  193 ATOMIC_CMPSET(int);
  194 ATOMIC_CMPSET(long);
  195 
  196 /*
  197  * Atomically add the value of v to the integer pointed to by p and return
  198  * the previous value of *p.
  199  */
  200 static __inline u_int
  201 atomic_fetchadd_int(volatile u_int *p, u_int v)
  202 {
  203 
  204         __asm __volatile(
  205         "       " MPLOCKED "            "
  206         "       xaddl   %0,%1 ;         "
  207         "# atomic_fetchadd_int"
  208         : "+r" (v),                     /* 0 */
  209           "+m" (*p)                     /* 1 */
  210         : : "cc");
  211         return (v);
  212 }
  213 
  214 /*
  215  * Atomically add the value of v to the long integer pointed to by p and return
  216  * the previous value of *p.
  217  */
  218 static __inline u_long
  219 atomic_fetchadd_long(volatile u_long *p, u_long v)
  220 {
  221 
  222         __asm __volatile(
  223         "       " MPLOCKED "            "
  224         "       xaddq   %0,%1 ;         "
  225         "# atomic_fetchadd_long"
  226         : "+r" (v),                     /* 0 */
  227           "+m" (*p)                     /* 1 */
  228         : : "cc");
  229         return (v);
  230 }
  231 
  232 static __inline int
  233 atomic_testandset_int(volatile u_int *p, u_int v)
  234 {
  235         u_char res;
  236 
  237         __asm __volatile(
  238         "       " MPLOCKED "            "
  239         "       btsl    %2,%1 ;         "
  240         "       setc    %0 ;            "
  241         "# atomic_testandset_int"
  242         : "=q" (res),                   /* 0 */
  243           "+m" (*p)                     /* 1 */
  244         : "Ir" (v & 0x1f)               /* 2 */
  245         : "cc");
  246         return (res);
  247 }
  248 
  249 static __inline int
  250 atomic_testandset_long(volatile u_long *p, u_int v)
  251 {
  252         u_char res;
  253 
  254         __asm __volatile(
  255         "       " MPLOCKED "            "
  256         "       btsq    %2,%1 ;         "
  257         "       setc    %0 ;            "
  258         "# atomic_testandset_long"
  259         : "=q" (res),                   /* 0 */
  260           "+m" (*p)                     /* 1 */
  261         : "Jr" ((u_long)(v & 0x3f))     /* 2 */
  262         : "cc");
  263         return (res);
  264 }
  265 
  266 static __inline int
  267 atomic_testandclear_int(volatile u_int *p, u_int v)
  268 {
  269         u_char res;
  270 
  271         __asm __volatile(
  272         "       " MPLOCKED "            "
  273         "       btrl    %2,%1 ;         "
  274         "       setc    %0 ;            "
  275         "# atomic_testandclear_int"
  276         : "=q" (res),                   /* 0 */
  277           "+m" (*p)                     /* 1 */
  278         : "Ir" (v & 0x1f)               /* 2 */
  279         : "cc");
  280         return (res);
  281 }
  282 
  283 static __inline int
  284 atomic_testandclear_long(volatile u_long *p, u_int v)
  285 {
  286         u_char res;
  287 
  288         __asm __volatile(
  289         "       " MPLOCKED "            "
  290         "       btrq    %2,%1 ;         "
  291         "       setc    %0 ;            "
  292         "# atomic_testandclear_long"
  293         : "=q" (res),                   /* 0 */
  294           "+m" (*p)                     /* 1 */
  295         : "Jr" ((u_long)(v & 0x3f))     /* 2 */
  296         : "cc");
  297         return (res);
  298 }
  299 
  300 /*
  301  * We assume that a = b will do atomic loads and stores.  Due to the
  302  * IA32 memory model, a simple store guarantees release semantics.
  303  *
  304  * However, loads may pass stores, so for atomic_load_acq we have to
  305  * ensure a Store/Load barrier to do the load in SMP kernels.  We use
  306  * "lock cmpxchg" as recommended by the AMD Software Optimization
  307  * Guide, and not mfence.  For UP kernels, however, the cache of the
  308  * single processor is always consistent, so we only need to take care
  309  * of the compiler.
  310  */
  311 #define ATOMIC_STORE(TYPE)                              \
  312 static __inline void                                    \
  313 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
  314 {                                                       \
  315         __compiler_membar();                            \
  316         *p = v;                                         \
  317 }                                                       \
  318 struct __hack
  319 
  320 #if defined(_KERNEL) && !defined(SMP)
  321 
  322 #define ATOMIC_LOAD(TYPE, LOP)                          \
  323 static __inline u_##TYPE                                \
  324 atomic_load_acq_##TYPE(volatile u_##TYPE *p)            \
  325 {                                                       \
  326         u_##TYPE tmp;                                   \
  327                                                         \
  328         tmp = *p;                                       \
  329         __compiler_membar();                            \
  330         return (tmp);                                   \
  331 }                                                       \
  332 struct __hack
  333 
  334 #else /* !(_KERNEL && !SMP) */
  335 
  336 #define ATOMIC_LOAD(TYPE, LOP)                          \
  337 static __inline u_##TYPE                                \
  338 atomic_load_acq_##TYPE(volatile u_##TYPE *p)            \
  339 {                                                       \
  340         u_##TYPE res;                                   \
  341                                                         \
  342         __asm __volatile(MPLOCKED LOP                   \
  343         : "=a" (res),                   /* 0 */         \
  344           "+m" (*p)                     /* 1 */         \
  345         : : "memory", "cc");                            \
  346         return (res);                                   \
  347 }                                                       \
  348 struct __hack
  349 
  350 #endif /* _KERNEL && !SMP */
  351 
  352 #endif /* KLD_MODULE || !__GNUCLIKE_ASM */
  353 
  354 ATOMIC_ASM(set,      char,  "orb %b1,%0",  "iq",  v);
  355 ATOMIC_ASM(clear,    char,  "andb %b1,%0", "iq", ~v);
  356 ATOMIC_ASM(add,      char,  "addb %b1,%0", "iq",  v);
  357 ATOMIC_ASM(subtract, char,  "subb %b1,%0", "iq",  v);
  358 
  359 ATOMIC_ASM(set,      short, "orw %w1,%0",  "ir",  v);
  360 ATOMIC_ASM(clear,    short, "andw %w1,%0", "ir", ~v);
  361 ATOMIC_ASM(add,      short, "addw %w1,%0", "ir",  v);
  362 ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir",  v);
  363 
  364 ATOMIC_ASM(set,      int,   "orl %1,%0",   "ir",  v);
  365 ATOMIC_ASM(clear,    int,   "andl %1,%0",  "ir", ~v);
  366 ATOMIC_ASM(add,      int,   "addl %1,%0",  "ir",  v);
  367 ATOMIC_ASM(subtract, int,   "subl %1,%0",  "ir",  v);
  368 
  369 ATOMIC_ASM(set,      long,  "orq %1,%0",   "ir",  v);
  370 ATOMIC_ASM(clear,    long,  "andq %1,%0",  "ir", ~v);
  371 ATOMIC_ASM(add,      long,  "addq %1,%0",  "ir",  v);
  372 ATOMIC_ASM(subtract, long,  "subq %1,%0",  "ir",  v);
  373 
  374 ATOMIC_LOAD(char,  "cmpxchgb %b0,%1");
  375 ATOMIC_LOAD(short, "cmpxchgw %w0,%1");
  376 ATOMIC_LOAD(int,   "cmpxchgl %0,%1");
  377 ATOMIC_LOAD(long,  "cmpxchgq %0,%1");
  378 
  379 ATOMIC_STORE(char);
  380 ATOMIC_STORE(short);
  381 ATOMIC_STORE(int);
  382 ATOMIC_STORE(long);
  383 
  384 #undef ATOMIC_ASM
  385 #undef ATOMIC_LOAD
  386 #undef ATOMIC_STORE
  387 
  388 #ifndef WANT_FUNCTIONS
  389 
  390 /* Read the current value and store a new value in the destination. */
  391 #ifdef __GNUCLIKE_ASM
  392 
  393 static __inline u_int
  394 atomic_swap_int(volatile u_int *p, u_int v)
  395 {
  396 
  397         __asm __volatile(
  398         "       xchgl   %1,%0 ;         "
  399         "# atomic_swap_int"
  400         : "+r" (v),                     /* 0 */
  401           "+m" (*p));                   /* 1 */
  402         return (v);
  403 }
  404 
  405 static __inline u_long
  406 atomic_swap_long(volatile u_long *p, u_long v)
  407 {
  408 
  409         __asm __volatile(
  410         "       xchgq   %1,%0 ;         "
  411         "# atomic_swap_long"
  412         : "+r" (v),                     /* 0 */
  413           "+m" (*p));                   /* 1 */
  414         return (v);
  415 }
  416 
  417 #else /* !__GNUCLIKE_ASM */
  418 
  419 u_int   atomic_swap_int(volatile u_int *p, u_int v);
  420 u_long  atomic_swap_long(volatile u_long *p, u_long v);
  421 
  422 #endif /* __GNUCLIKE_ASM */
  423 
  424 #define atomic_set_acq_char             atomic_set_barr_char
  425 #define atomic_set_rel_char             atomic_set_barr_char
  426 #define atomic_clear_acq_char           atomic_clear_barr_char
  427 #define atomic_clear_rel_char           atomic_clear_barr_char
  428 #define atomic_add_acq_char             atomic_add_barr_char
  429 #define atomic_add_rel_char             atomic_add_barr_char
  430 #define atomic_subtract_acq_char        atomic_subtract_barr_char
  431 #define atomic_subtract_rel_char        atomic_subtract_barr_char
  432 #define atomic_cmpset_acq_char          atomic_cmpset_char
  433 #define atomic_cmpset_rel_char          atomic_cmpset_char
  434 #define atomic_fcmpset_acq_char         atomic_fcmpset_char
  435 #define atomic_fcmpset_rel_char         atomic_fcmpset_char
  436 
  437 #define atomic_set_acq_short            atomic_set_barr_short
  438 #define atomic_set_rel_short            atomic_set_barr_short
  439 #define atomic_clear_acq_short          atomic_clear_barr_short
  440 #define atomic_clear_rel_short          atomic_clear_barr_short
  441 #define atomic_add_acq_short            atomic_add_barr_short
  442 #define atomic_add_rel_short            atomic_add_barr_short
  443 #define atomic_subtract_acq_short       atomic_subtract_barr_short
  444 #define atomic_subtract_rel_short       atomic_subtract_barr_short
  445 #define atomic_cmpset_acq_short         atomic_cmpset_short
  446 #define atomic_cmpset_rel_short         atomic_cmpset_short
  447 #define atomic_fcmpset_acq_short        atomic_fcmpset_short
  448 #define atomic_fcmpset_rel_short        atomic_fcmpset_short
  449 
  450 #define atomic_set_acq_int              atomic_set_barr_int
  451 #define atomic_set_rel_int              atomic_set_barr_int
  452 #define atomic_clear_acq_int            atomic_clear_barr_int
  453 #define atomic_clear_rel_int            atomic_clear_barr_int
  454 #define atomic_add_acq_int              atomic_add_barr_int
  455 #define atomic_add_rel_int              atomic_add_barr_int
  456 #define atomic_subtract_acq_int         atomic_subtract_barr_int
  457 #define atomic_subtract_rel_int         atomic_subtract_barr_int
  458 #define atomic_cmpset_acq_int           atomic_cmpset_int
  459 #define atomic_cmpset_rel_int           atomic_cmpset_int
  460 #define atomic_fcmpset_acq_int          atomic_fcmpset_int
  461 #define atomic_fcmpset_rel_int          atomic_fcmpset_int
  462 
  463 #define atomic_set_acq_long             atomic_set_barr_long
  464 #define atomic_set_rel_long             atomic_set_barr_long
  465 #define atomic_clear_acq_long           atomic_clear_barr_long
  466 #define atomic_clear_rel_long           atomic_clear_barr_long
  467 #define atomic_add_acq_long             atomic_add_barr_long
  468 #define atomic_add_rel_long             atomic_add_barr_long
  469 #define atomic_subtract_acq_long        atomic_subtract_barr_long
  470 #define atomic_subtract_rel_long        atomic_subtract_barr_long
  471 #define atomic_cmpset_acq_long          atomic_cmpset_long
  472 #define atomic_cmpset_rel_long          atomic_cmpset_long
  473 #define atomic_fcmpset_acq_long         atomic_fcmpset_long
  474 #define atomic_fcmpset_rel_long         atomic_fcmpset_long
  475 
  476 #define atomic_readandclear_int(p)      atomic_swap_int(p, 0)
  477 #define atomic_readandclear_long(p)     atomic_swap_long(p, 0)
  478 
  479 /* Operations on 8-bit bytes. */
  480 #define atomic_set_8            atomic_set_char
  481 #define atomic_set_acq_8        atomic_set_acq_char
  482 #define atomic_set_rel_8        atomic_set_rel_char
  483 #define atomic_clear_8          atomic_clear_char
  484 #define atomic_clear_acq_8      atomic_clear_acq_char
  485 #define atomic_clear_rel_8      atomic_clear_rel_char
  486 #define atomic_add_8            atomic_add_char
  487 #define atomic_add_acq_8        atomic_add_acq_char
  488 #define atomic_add_rel_8        atomic_add_rel_char
  489 #define atomic_subtract_8       atomic_subtract_char
  490 #define atomic_subtract_acq_8   atomic_subtract_acq_char
  491 #define atomic_subtract_rel_8   atomic_subtract_rel_char
  492 #define atomic_load_acq_8       atomic_load_acq_char
  493 #define atomic_store_rel_8      atomic_store_rel_char
  494 #define atomic_cmpset_8         atomic_cmpset_char
  495 #define atomic_cmpset_acq_8     atomic_cmpset_acq_char
  496 #define atomic_cmpset_rel_8     atomic_cmpset_rel_char
  497 #define atomic_fcmpset_8        atomic_fcmpset_char
  498 #define atomic_fcmpset_acq_8    atomic_fcmpset_acq_char
  499 #define atomic_fcmpset_rel_8    atomic_fcmpset_rel_char
  500 
  501 /* Operations on 16-bit words. */
  502 #define atomic_set_16           atomic_set_short
  503 #define atomic_set_acq_16       atomic_set_acq_short
  504 #define atomic_set_rel_16       atomic_set_rel_short
  505 #define atomic_clear_16         atomic_clear_short
  506 #define atomic_clear_acq_16     atomic_clear_acq_short
  507 #define atomic_clear_rel_16     atomic_clear_rel_short
  508 #define atomic_add_16           atomic_add_short
  509 #define atomic_add_acq_16       atomic_add_acq_short
  510 #define atomic_add_rel_16       atomic_add_rel_short
  511 #define atomic_subtract_16      atomic_subtract_short
  512 #define atomic_subtract_acq_16  atomic_subtract_acq_short
  513 #define atomic_subtract_rel_16  atomic_subtract_rel_short
  514 #define atomic_load_acq_16      atomic_load_acq_short
  515 #define atomic_store_rel_16     atomic_store_rel_short
  516 #define atomic_cmpset_16        atomic_cmpset_short
  517 #define atomic_cmpset_acq_16    atomic_cmpset_acq_short
  518 #define atomic_cmpset_rel_16    atomic_cmpset_rel_short
  519 #define atomic_fcmpset_16       atomic_fcmpset_short
  520 #define atomic_fcmpset_acq_16   atomic_fcmpset_acq_short
  521 #define atomic_fcmpset_rel_16   atomic_fcmpset_rel_short
  522 
  523 /* Operations on 32-bit double words. */
  524 #define atomic_set_32           atomic_set_int
  525 #define atomic_set_acq_32       atomic_set_acq_int
  526 #define atomic_set_rel_32       atomic_set_rel_int
  527 #define atomic_clear_32         atomic_clear_int
  528 #define atomic_clear_acq_32     atomic_clear_acq_int
  529 #define atomic_clear_rel_32     atomic_clear_rel_int
  530 #define atomic_add_32           atomic_add_int
  531 #define atomic_add_acq_32       atomic_add_acq_int
  532 #define atomic_add_rel_32       atomic_add_rel_int
  533 #define atomic_subtract_32      atomic_subtract_int
  534 #define atomic_subtract_acq_32  atomic_subtract_acq_int
  535 #define atomic_subtract_rel_32  atomic_subtract_rel_int
  536 #define atomic_load_acq_32      atomic_load_acq_int
  537 #define atomic_store_rel_32     atomic_store_rel_int
  538 #define atomic_cmpset_32        atomic_cmpset_int
  539 #define atomic_cmpset_acq_32    atomic_cmpset_acq_int
  540 #define atomic_cmpset_rel_32    atomic_cmpset_rel_int
  541 #define atomic_fcmpset_32       atomic_fcmpset_int
  542 #define atomic_fcmpset_acq_32   atomic_fcmpset_acq_int
  543 #define atomic_fcmpset_rel_32   atomic_fcmpset_rel_int
  544 #define atomic_swap_32          atomic_swap_int
  545 #define atomic_readandclear_32  atomic_readandclear_int
  546 #define atomic_fetchadd_32      atomic_fetchadd_int
  547 #define atomic_testandset_32    atomic_testandset_int
  548 #define atomic_testandclear_32  atomic_testandclear_int
  549 
  550 /* Operations on 64-bit quad words. */
  551 #define atomic_set_64           atomic_set_long
  552 #define atomic_set_acq_64       atomic_set_acq_long
  553 #define atomic_set_rel_64       atomic_set_rel_long
  554 #define atomic_clear_64         atomic_clear_long
  555 #define atomic_clear_acq_64     atomic_clear_acq_long
  556 #define atomic_clear_rel_64     atomic_clear_rel_long
  557 #define atomic_add_64           atomic_add_long
  558 #define atomic_add_acq_64       atomic_add_acq_long
  559 #define atomic_add_rel_64       atomic_add_rel_long
  560 #define atomic_subtract_64      atomic_subtract_long
  561 #define atomic_subtract_acq_64  atomic_subtract_acq_long
  562 #define atomic_subtract_rel_64  atomic_subtract_rel_long
  563 #define atomic_load_acq_64      atomic_load_acq_long
  564 #define atomic_store_rel_64     atomic_store_rel_long
  565 #define atomic_cmpset_64        atomic_cmpset_long
  566 #define atomic_cmpset_acq_64    atomic_cmpset_acq_long
  567 #define atomic_cmpset_rel_64    atomic_cmpset_rel_long
  568 #define atomic_fcmpset_64       atomic_fcmpset_long
  569 #define atomic_fcmpset_acq_64   atomic_fcmpset_acq_long
  570 #define atomic_fcmpset_rel_64   atomic_fcmpset_rel_long
  571 #define atomic_swap_64          atomic_swap_long
  572 #define atomic_readandclear_64  atomic_readandclear_long
  573 #define atomic_fetchadd_64      atomic_fetchadd_long
  574 #define atomic_testandset_64    atomic_testandset_long
  575 #define atomic_testandclear_64  atomic_testandclear_long
  576 
  577 /* Operations on pointers. */
  578 #define atomic_set_ptr          atomic_set_long
  579 #define atomic_set_acq_ptr      atomic_set_acq_long
  580 #define atomic_set_rel_ptr      atomic_set_rel_long
  581 #define atomic_clear_ptr        atomic_clear_long
  582 #define atomic_clear_acq_ptr    atomic_clear_acq_long
  583 #define atomic_clear_rel_ptr    atomic_clear_rel_long
  584 #define atomic_add_ptr          atomic_add_long
  585 #define atomic_add_acq_ptr      atomic_add_acq_long
  586 #define atomic_add_rel_ptr      atomic_add_rel_long
  587 #define atomic_subtract_ptr     atomic_subtract_long
  588 #define atomic_subtract_acq_ptr atomic_subtract_acq_long
  589 #define atomic_subtract_rel_ptr atomic_subtract_rel_long
  590 #define atomic_load_acq_ptr     atomic_load_acq_long
  591 #define atomic_store_rel_ptr    atomic_store_rel_long
  592 #define atomic_cmpset_ptr       atomic_cmpset_long
  593 #define atomic_cmpset_acq_ptr   atomic_cmpset_acq_long
  594 #define atomic_cmpset_rel_ptr   atomic_cmpset_rel_long
  595 #define atomic_fcmpset_ptr      atomic_fcmpset_long
  596 #define atomic_fcmpset_acq_ptr  atomic_fcmpset_acq_long
  597 #define atomic_fcmpset_rel_ptr  atomic_fcmpset_rel_long
  598 #define atomic_swap_ptr         atomic_swap_long
  599 #define atomic_readandclear_ptr atomic_readandclear_long
  600 
  601 #endif /* !WANT_FUNCTIONS */
  602 
  603 #endif /* !_MACHINE_ATOMIC_H_ */

Cache object: 1afbd827c71701b1ae2922c4c4cb907f


[ source navigation ] [ diff markup ] [ identifier search ] [ freetext search ] [ file search ] [ list types ] [ track identifier ]


This page is part of the FreeBSD/Linux Linux Kernel Cross-Reference, and was automatically generated using a modified version of the LXR engine.