1 /*-
2 * Copyright (c) 1998 Doug Rabson
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24 * SUCH DAMAGE.
25 *
26 * $FreeBSD$
27 */
28 #ifndef _MACHINE_ATOMIC_H_
29 #define _MACHINE_ATOMIC_H_
30
31 #ifndef _SYS_CDEFS_H_
32 #error this file needs sys/cdefs.h as a prerequisite
33 #endif
34
35 /*
36 * Various simple operations on memory, each of which is atomic in the
37 * presence of interrupts and multiple processors.
38 *
39 * atomic_set_char(P, V) (*(u_char *)(P) |= (V))
40 * atomic_clear_char(P, V) (*(u_char *)(P) &= ~(V))
41 * atomic_add_char(P, V) (*(u_char *)(P) += (V))
42 * atomic_subtract_char(P, V) (*(u_char *)(P) -= (V))
43 *
44 * atomic_set_short(P, V) (*(u_short *)(P) |= (V))
45 * atomic_clear_short(P, V) (*(u_short *)(P) &= ~(V))
46 * atomic_add_short(P, V) (*(u_short *)(P) += (V))
47 * atomic_subtract_short(P, V) (*(u_short *)(P) -= (V))
48 *
49 * atomic_set_int(P, V) (*(u_int *)(P) |= (V))
50 * atomic_clear_int(P, V) (*(u_int *)(P) &= ~(V))
51 * atomic_add_int(P, V) (*(u_int *)(P) += (V))
52 * atomic_subtract_int(P, V) (*(u_int *)(P) -= (V))
53 * atomic_readandclear_int(P) (return (*(u_int *)(P)); *(u_int *)(P) = 0;)
54 *
55 * atomic_set_long(P, V) (*(u_long *)(P) |= (V))
56 * atomic_clear_long(P, V) (*(u_long *)(P) &= ~(V))
57 * atomic_add_long(P, V) (*(u_long *)(P) += (V))
58 * atomic_subtract_long(P, V) (*(u_long *)(P) -= (V))
59 * atomic_readandclear_long(P) (return (*(u_long *)(P)); *(u_long *)(P) = 0;)
60 */
61
62 /*
63 * The above functions are expanded inline in the statically-linked
64 * kernel. Lock prefixes are generated if an SMP kernel is being
65 * built.
66 *
67 * Kernel modules call real functions which are built into the kernel.
68 * This allows kernel modules to be portable between UP and SMP systems.
69 */
70 #if defined(KLD_MODULE) || !defined(__GNUCLIKE_ASM)
71 #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
72 void atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v); \
73 void atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
74
75 int atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src);
76 int atomic_cmpset_long(volatile u_long *dst, u_long exp, u_long src);
77 u_int atomic_fetchadd_int(volatile u_int *p, u_int v);
78 u_long atomic_fetchadd_long(volatile u_long *p, u_long v);
79
80 #define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
81 u_##TYPE atomic_load_acq_##TYPE(volatile u_##TYPE *p); \
82 void atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)
83
84 #else /* !KLD_MODULE && __GNUCLIKE_ASM */
85
86 /*
87 * For userland, always use lock prefixes so that the binaries will run
88 * on both SMP and !SMP systems.
89 */
90 #if defined(SMP) || !defined(_KERNEL)
91 #define MPLOCKED "lock ; "
92 #else
93 #define MPLOCKED
94 #endif
95
96 /*
97 * The assembly is volatilized to avoid code chunk removal by the compiler.
98 * GCC aggressively reorders operations and memory clobbering is necessary
99 * in order to avoid that for memory barriers.
100 */
101 #define ATOMIC_ASM(NAME, TYPE, OP, CONS, V) \
102 static __inline void \
103 atomic_##NAME##_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
104 { \
105 __asm __volatile(MPLOCKED OP \
106 : "=m" (*p) \
107 : CONS (V), "m" (*p)); \
108 } \
109 \
110 static __inline void \
111 atomic_##NAME##_barr_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
112 { \
113 __asm __volatile(MPLOCKED OP \
114 : "=m" (*p) \
115 : CONS (V), "m" (*p) \
116 : "memory"); \
117 } \
118 struct __hack
119
120 /*
121 * Atomic compare and set, used by the mutex functions
122 *
123 * if (*dst == exp) *dst = src (all 32 bit words)
124 *
125 * Returns 0 on failure, non-zero on success
126 */
127
128 static __inline int
129 atomic_cmpset_int(volatile u_int *dst, u_int exp, u_int src)
130 {
131 u_char res;
132
133 __asm __volatile(
134 " " MPLOCKED " "
135 " cmpxchgl %2,%1 ; "
136 " sete %0 ; "
137 "1: "
138 "# atomic_cmpset_int"
139 : "=a" (res), /* 0 */
140 "=m" (*dst) /* 1 */
141 : "r" (src), /* 2 */
142 "a" (exp), /* 3 */
143 "m" (*dst) /* 4 */
144 : "memory");
145
146 return (res);
147 }
148
149 static __inline int
150 atomic_cmpset_long(volatile u_long *dst, u_long exp, u_long src)
151 {
152 u_char res;
153
154 __asm __volatile(
155 " " MPLOCKED " "
156 " cmpxchgq %2,%1 ; "
157 " sete %0 ; "
158 "1: "
159 "# atomic_cmpset_long"
160 : "=a" (res), /* 0 */
161 "=m" (*dst) /* 1 */
162 : "r" (src), /* 2 */
163 "a" (exp), /* 3 */
164 "m" (*dst) /* 4 */
165 : "memory");
166
167 return (res);
168 }
169
170 /*
171 * Atomically add the value of v to the integer pointed to by p and return
172 * the previous value of *p.
173 */
174 static __inline u_int
175 atomic_fetchadd_int(volatile u_int *p, u_int v)
176 {
177
178 __asm __volatile(
179 " " MPLOCKED " "
180 " xaddl %0, %1 ; "
181 "# atomic_fetchadd_int"
182 : "+r" (v), /* 0 (result) */
183 "=m" (*p) /* 1 */
184 : "m" (*p)); /* 2 */
185
186 return (v);
187 }
188
189 /*
190 * Atomically add the value of v to the long integer pointed to by p and return
191 * the previous value of *p.
192 */
193 static __inline u_long
194 atomic_fetchadd_long(volatile u_long *p, u_long v)
195 {
196
197 __asm __volatile(
198 " " MPLOCKED " "
199 " xaddq %0, %1 ; "
200 "# atomic_fetchadd_long"
201 : "+r" (v), /* 0 (result) */
202 "=m" (*p) /* 1 */
203 : "m" (*p)); /* 2 */
204
205 return (v);
206 }
207
208 #if defined(_KERNEL) && !defined(SMP)
209
210 /*
211 * We assume that a = b will do atomic loads and stores. However, on a
212 * PentiumPro or higher, reads may pass writes, so for that case we have
213 * to use a serializing instruction (i.e. with LOCK) to do the load in
214 * SMP kernels. For UP kernels, however, the cache of the single processor
215 * is always consistent, so we only need to take care of compiler.
216 */
217 #define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
218 static __inline u_##TYPE \
219 atomic_load_acq_##TYPE(volatile u_##TYPE *p) \
220 { \
221 u_##TYPE tmp; \
222 \
223 tmp = *p; \
224 __asm __volatile ("" : : : "memory"); \
225 return (tmp); \
226 } \
227 \
228 static __inline void \
229 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
230 { \
231 __asm __volatile ("" : : : "memory"); \
232 *p = v; \
233 } \
234 struct __hack
235
236 #else /* !(_KERNEL && !SMP) */
237
238 #define ATOMIC_STORE_LOAD(TYPE, LOP, SOP) \
239 static __inline u_##TYPE \
240 atomic_load_acq_##TYPE(volatile u_##TYPE *p) \
241 { \
242 u_##TYPE res; \
243 \
244 __asm __volatile(MPLOCKED LOP \
245 : "=a" (res), /* 0 */ \
246 "=m" (*p) /* 1 */ \
247 : "m" (*p) /* 2 */ \
248 : "memory"); \
249 \
250 return (res); \
251 } \
252 \
253 /* \
254 * The XCHG instruction asserts LOCK automagically. \
255 */ \
256 static __inline void \
257 atomic_store_rel_##TYPE(volatile u_##TYPE *p, u_##TYPE v)\
258 { \
259 __asm __volatile(SOP \
260 : "=m" (*p), /* 0 */ \
261 "+r" (v) /* 1 */ \
262 : "m" (*p) /* 2 */ \
263 : "memory"); \
264 } \
265 struct __hack
266
267 #endif /* _KERNEL && !SMP */
268
269 #endif /* KLD_MODULE || !__GNUCLIKE_ASM */
270
271 ATOMIC_ASM(set, char, "orb %b1,%0", "iq", v);
272 ATOMIC_ASM(clear, char, "andb %b1,%0", "iq", ~v);
273 ATOMIC_ASM(add, char, "addb %b1,%0", "iq", v);
274 ATOMIC_ASM(subtract, char, "subb %b1,%0", "iq", v);
275
276 ATOMIC_ASM(set, short, "orw %w1,%0", "ir", v);
277 ATOMIC_ASM(clear, short, "andw %w1,%0", "ir", ~v);
278 ATOMIC_ASM(add, short, "addw %w1,%0", "ir", v);
279 ATOMIC_ASM(subtract, short, "subw %w1,%0", "ir", v);
280
281 ATOMIC_ASM(set, int, "orl %1,%0", "ir", v);
282 ATOMIC_ASM(clear, int, "andl %1,%0", "ir", ~v);
283 ATOMIC_ASM(add, int, "addl %1,%0", "ir", v);
284 ATOMIC_ASM(subtract, int, "subl %1,%0", "ir", v);
285
286 ATOMIC_ASM(set, long, "orq %1,%0", "ir", v);
287 ATOMIC_ASM(clear, long, "andq %1,%0", "ir", ~v);
288 ATOMIC_ASM(add, long, "addq %1,%0", "ir", v);
289 ATOMIC_ASM(subtract, long, "subq %1,%0", "ir", v);
290
291 ATOMIC_STORE_LOAD(char, "cmpxchgb %b0,%1", "xchgb %b1,%0");
292 ATOMIC_STORE_LOAD(short,"cmpxchgw %w0,%1", "xchgw %w1,%0");
293 ATOMIC_STORE_LOAD(int, "cmpxchgl %0,%1", "xchgl %1,%0");
294 ATOMIC_STORE_LOAD(long, "cmpxchgq %0,%1", "xchgq %1,%0");
295
296 #undef ATOMIC_ASM
297 #undef ATOMIC_STORE_LOAD
298
299 #ifndef WANT_FUNCTIONS
300
301 /* Read the current value and store a zero in the destination. */
302 #ifdef __GNUCLIKE_ASM
303
304 static __inline u_int
305 atomic_readandclear_int(volatile u_int *addr)
306 {
307 u_int res;
308
309 res = 0;
310 __asm __volatile(
311 " xchgl %1,%0 ; "
312 "# atomic_readandclear_int"
313 : "+r" (res), /* 0 */
314 "=m" (*addr) /* 1 */
315 : "m" (*addr));
316
317 return (res);
318 }
319
320 static __inline u_long
321 atomic_readandclear_long(volatile u_long *addr)
322 {
323 u_long res;
324
325 res = 0;
326 __asm __volatile(
327 " xchgq %1,%0 ; "
328 "# atomic_readandclear_long"
329 : "+r" (res), /* 0 */
330 "=m" (*addr) /* 1 */
331 : "m" (*addr));
332
333 return (res);
334 }
335
336 #else /* !__GNUCLIKE_ASM */
337
338 u_int atomic_readandclear_int(volatile u_int *addr);
339 u_long atomic_readandclear_long(volatile u_long *addr);
340
341 #endif /* __GNUCLIKE_ASM */
342
343 #define atomic_set_acq_char atomic_set_barr_char
344 #define atomic_set_rel_char atomic_set_barr_char
345 #define atomic_clear_acq_char atomic_clear_barr_char
346 #define atomic_clear_rel_char atomic_clear_barr_char
347 #define atomic_add_acq_char atomic_add_barr_char
348 #define atomic_add_rel_char atomic_add_barr_char
349 #define atomic_subtract_acq_char atomic_subtract_barr_char
350 #define atomic_subtract_rel_char atomic_subtract_barr_char
351
352 #define atomic_set_acq_short atomic_set_barr_short
353 #define atomic_set_rel_short atomic_set_barr_short
354 #define atomic_clear_acq_short atomic_clear_barr_short
355 #define atomic_clear_rel_short atomic_clear_barr_short
356 #define atomic_add_acq_short atomic_add_barr_short
357 #define atomic_add_rel_short atomic_add_barr_short
358 #define atomic_subtract_acq_short atomic_subtract_barr_short
359 #define atomic_subtract_rel_short atomic_subtract_barr_short
360
361 #define atomic_set_acq_int atomic_set_barr_int
362 #define atomic_set_rel_int atomic_set_barr_int
363 #define atomic_clear_acq_int atomic_clear_barr_int
364 #define atomic_clear_rel_int atomic_clear_barr_int
365 #define atomic_add_acq_int atomic_add_barr_int
366 #define atomic_add_rel_int atomic_add_barr_int
367 #define atomic_subtract_acq_int atomic_subtract_barr_int
368 #define atomic_subtract_rel_int atomic_subtract_barr_int
369 #define atomic_cmpset_acq_int atomic_cmpset_int
370 #define atomic_cmpset_rel_int atomic_cmpset_int
371
372 #define atomic_set_acq_long atomic_set_barr_long
373 #define atomic_set_rel_long atomic_set_barr_long
374 #define atomic_clear_acq_long atomic_clear_barr_long
375 #define atomic_clear_rel_long atomic_clear_barr_long
376 #define atomic_add_acq_long atomic_add_barr_long
377 #define atomic_add_rel_long atomic_add_barr_long
378 #define atomic_subtract_acq_long atomic_subtract_barr_long
379 #define atomic_subtract_rel_long atomic_subtract_barr_long
380 #define atomic_cmpset_acq_long atomic_cmpset_long
381 #define atomic_cmpset_rel_long atomic_cmpset_long
382
383 /* Operations on 8-bit bytes. */
384 #define atomic_set_8 atomic_set_char
385 #define atomic_set_acq_8 atomic_set_acq_char
386 #define atomic_set_rel_8 atomic_set_rel_char
387 #define atomic_clear_8 atomic_clear_char
388 #define atomic_clear_acq_8 atomic_clear_acq_char
389 #define atomic_clear_rel_8 atomic_clear_rel_char
390 #define atomic_add_8 atomic_add_char
391 #define atomic_add_acq_8 atomic_add_acq_char
392 #define atomic_add_rel_8 atomic_add_rel_char
393 #define atomic_subtract_8 atomic_subtract_char
394 #define atomic_subtract_acq_8 atomic_subtract_acq_char
395 #define atomic_subtract_rel_8 atomic_subtract_rel_char
396 #define atomic_load_acq_8 atomic_load_acq_char
397 #define atomic_store_rel_8 atomic_store_rel_char
398
399 /* Operations on 16-bit words. */
400 #define atomic_set_16 atomic_set_short
401 #define atomic_set_acq_16 atomic_set_acq_short
402 #define atomic_set_rel_16 atomic_set_rel_short
403 #define atomic_clear_16 atomic_clear_short
404 #define atomic_clear_acq_16 atomic_clear_acq_short
405 #define atomic_clear_rel_16 atomic_clear_rel_short
406 #define atomic_add_16 atomic_add_short
407 #define atomic_add_acq_16 atomic_add_acq_short
408 #define atomic_add_rel_16 atomic_add_rel_short
409 #define atomic_subtract_16 atomic_subtract_short
410 #define atomic_subtract_acq_16 atomic_subtract_acq_short
411 #define atomic_subtract_rel_16 atomic_subtract_rel_short
412 #define atomic_load_acq_16 atomic_load_acq_short
413 #define atomic_store_rel_16 atomic_store_rel_short
414
415 /* Operations on 32-bit double words. */
416 #define atomic_set_32 atomic_set_int
417 #define atomic_set_acq_32 atomic_set_acq_int
418 #define atomic_set_rel_32 atomic_set_rel_int
419 #define atomic_clear_32 atomic_clear_int
420 #define atomic_clear_acq_32 atomic_clear_acq_int
421 #define atomic_clear_rel_32 atomic_clear_rel_int
422 #define atomic_add_32 atomic_add_int
423 #define atomic_add_acq_32 atomic_add_acq_int
424 #define atomic_add_rel_32 atomic_add_rel_int
425 #define atomic_subtract_32 atomic_subtract_int
426 #define atomic_subtract_acq_32 atomic_subtract_acq_int
427 #define atomic_subtract_rel_32 atomic_subtract_rel_int
428 #define atomic_load_acq_32 atomic_load_acq_int
429 #define atomic_store_rel_32 atomic_store_rel_int
430 #define atomic_cmpset_32 atomic_cmpset_int
431 #define atomic_cmpset_acq_32 atomic_cmpset_acq_int
432 #define atomic_cmpset_rel_32 atomic_cmpset_rel_int
433 #define atomic_readandclear_32 atomic_readandclear_int
434 #define atomic_fetchadd_32 atomic_fetchadd_int
435
436 /* Operations on 64-bit quad words. */
437 #define atomic_set_64 atomic_set_long
438 #define atomic_set_acq_64 atomic_set_acq_long
439 #define atomic_set_rel_64 atomic_set_rel_long
440 #define atomic_clear_64 atomic_clear_long
441 #define atomic_clear_acq_64 atomic_clear_acq_long
442 #define atomic_clear_rel_64 atomic_clear_rel_long
443 #define atomic_add_64 atomic_add_long
444 #define atomic_add_acq_64 atomic_add_acq_long
445 #define atomic_add_rel_64 atomic_add_rel_long
446 #define atomic_subtract_64 atomic_subtract_long
447 #define atomic_subtract_acq_64 atomic_subtract_acq_long
448 #define atomic_subtract_rel_64 atomic_subtract_rel_long
449 #define atomic_load_acq_64 atomic_load_acq_long
450 #define atomic_store_rel_64 atomic_store_rel_long
451 #define atomic_cmpset_64 atomic_cmpset_long
452 #define atomic_cmpset_acq_64 atomic_cmpset_acq_long
453 #define atomic_cmpset_rel_64 atomic_cmpset_rel_long
454 #define atomic_readandclear_64 atomic_readandclear_long
455
456 /* Operations on pointers. */
457 #define atomic_set_ptr atomic_set_long
458 #define atomic_set_acq_ptr atomic_set_acq_long
459 #define atomic_set_rel_ptr atomic_set_rel_long
460 #define atomic_clear_ptr atomic_clear_long
461 #define atomic_clear_acq_ptr atomic_clear_acq_long
462 #define atomic_clear_rel_ptr atomic_clear_rel_long
463 #define atomic_add_ptr atomic_add_long
464 #define atomic_add_acq_ptr atomic_add_acq_long
465 #define atomic_add_rel_ptr atomic_add_rel_long
466 #define atomic_subtract_ptr atomic_subtract_long
467 #define atomic_subtract_acq_ptr atomic_subtract_acq_long
468 #define atomic_subtract_rel_ptr atomic_subtract_rel_long
469 #define atomic_load_acq_ptr atomic_load_acq_long
470 #define atomic_store_rel_ptr atomic_store_rel_long
471 #define atomic_cmpset_ptr atomic_cmpset_long
472 #define atomic_cmpset_acq_ptr atomic_cmpset_acq_long
473 #define atomic_cmpset_rel_ptr atomic_cmpset_rel_long
474 #define atomic_readandclear_ptr atomic_readandclear_long
475
476 #endif /* !WANT_FUNCTIONS */
477
478 #endif /* !_MACHINE_ATOMIC_H_ */
Cache object: 9c38493fdee385b5faa773a6a96d4527
|