1 /*-
2 * Copyright (c) 2010 Isilon Systems, Inc.
3 * Copyright (c) 2010 iX Systems, Inc.
4 * Copyright (c) 2010 Panasas, Inc.
5 * Copyright (c) 2013-2017 Mellanox Technologies, Ltd.
6 * All rights reserved.
7 *
8 * Redistribution and use in source and binary forms, with or without
9 * modification, are permitted provided that the following conditions
10 * are met:
11 * 1. Redistributions of source code must retain the above copyright
12 * notice unmodified, this list of conditions, and the following
13 * disclaimer.
14 * 2. Redistributions in binary form must reproduce the above copyright
15 * notice, this list of conditions and the following disclaimer in the
16 * documentation and/or other materials provided with the distribution.
17 *
18 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
19 * IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
20 * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
21 * IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
22 * INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
23 * NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
24 * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
25 * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
26 * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
27 * THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28 *
29 * $FreeBSD$
30 */
31 #ifndef _LINUXKPI_ASM_ATOMIC_LONG_H_
32 #define _LINUXKPI_ASM_ATOMIC_LONG_H_
33
34 #include <linux/compiler.h>
35 #include <sys/types.h>
36 #include <machine/atomic.h>
37 #define ATOMIC_LONG_INIT(x) { .counter = (x) }
38
39 typedef struct {
40 volatile long counter;
41 } atomic_long_t;
42
43 #define atomic_long_add(i, v) atomic_long_add_return((i), (v))
44 #define atomic_long_sub(i, v) atomic_long_sub_return((i), (v))
45 #define atomic_long_inc_return(v) atomic_long_add_return(1, (v))
46 #define atomic_long_inc_not_zero(v) atomic_long_add_unless((v), 1, 0)
47
48 static inline long
49 atomic_long_add_return(long i, atomic_long_t *v)
50 {
51 return i + atomic_fetchadd_long(&v->counter, i);
52 }
53
54 static inline long
55 atomic_long_sub_return(long i, atomic_long_t *v)
56 {
57 return atomic_fetchadd_long(&v->counter, -i) - i;
58 }
59
60 static inline void
61 atomic_long_set(atomic_long_t *v, long i)
62 {
63 WRITE_ONCE(v->counter, i);
64 }
65
66 static inline long
67 atomic_long_read(atomic_long_t *v)
68 {
69 return READ_ONCE(v->counter);
70 }
71
72 static inline long
73 atomic_long_inc(atomic_long_t *v)
74 {
75 return atomic_fetchadd_long(&v->counter, 1) + 1;
76 }
77
78 static inline long
79 atomic_long_dec(atomic_long_t *v)
80 {
81 return atomic_fetchadd_long(&v->counter, -1) - 1;
82 }
83
84 static inline long
85 atomic_long_xchg(atomic_long_t *v, long val)
86 {
87 return atomic_swap_long(&v->counter, val);
88 }
89
90 static inline long
91 atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
92 {
93 long ret = old;
94
95 for (;;) {
96 if (atomic_fcmpset_long(&v->counter, &ret, new))
97 break;
98 if (ret != old)
99 break;
100 }
101 return (ret);
102 }
103
104 static inline int
105 atomic_long_add_unless(atomic_long_t *v, long a, long u)
106 {
107 long c = atomic_long_read(v);
108
109 for (;;) {
110 if (unlikely(c == u))
111 break;
112 if (likely(atomic_fcmpset_long(&v->counter, &c, c + a)))
113 break;
114 }
115 return (c != u);
116 }
117
118 static inline long
119 atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
120 {
121 long c = atomic_long_read(v);
122
123 for (;;) {
124 if (unlikely(c == u))
125 break;
126 if (likely(atomic_fcmpset_long(&v->counter, &c, c + a)))
127 break;
128 }
129 return (c);
130 }
131
132 static inline long
133 atomic_long_dec_and_test(atomic_long_t *v)
134 {
135 long i = atomic_long_add(-1, v);
136 return i == 0 ;
137 }
138
139 #endif /* _LINUXKPI_ASM_ATOMIC_LONG_H_ */
Cache object: 3b542859e8d9b59057671450cce4340e
|