1 /*-
2 * Copyright (c) 1998 Doug Rabson
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24 * SUCH DAMAGE.
25 *
26 * from: src/sys/alpha/include/atomic.h,v 1.21.2.3 2005/10/06 18:12:05 jhb
27 * $FreeBSD: releng/11.0/sys/mips/include/atomic.h 285283 2015-07-08 18:12:24Z kib $
28 */
29
30 #ifndef _MACHINE_ATOMIC_H_
31 #define _MACHINE_ATOMIC_H_
32
33 #ifndef _SYS_CDEFS_H_
34 #error this file needs sys/cdefs.h as a prerequisite
35 #endif
36
37 /*
38 * Note: All the 64-bit atomic operations are only atomic when running
39 * in 64-bit mode. It is assumed that code compiled for n32 and n64
40 * fits into this definition and no further safeties are needed.
41 *
42 * It is also assumed that the add, subtract and other arithmetic is
43 * done on numbers not pointers. The special rules for n32 pointers
44 * do not have atomic operations defined for them, but generally shouldn't
45 * need atomic operations.
46 */
47 #ifndef __MIPS_PLATFORM_SYNC_NOPS
48 #define __MIPS_PLATFORM_SYNC_NOPS ""
49 #endif
50
51 static __inline void
52 mips_sync(void)
53 {
54 __asm __volatile (".set noreorder\n"
55 "\tsync\n"
56 __MIPS_PLATFORM_SYNC_NOPS
57 ".set reorder\n"
58 : : : "memory");
59 }
60
61 #define mb() mips_sync()
62 #define wmb() mips_sync()
63 #define rmb() mips_sync()
64
65 /*
66 * Various simple arithmetic on memory which is atomic in the presence
67 * of interrupts and SMP safe.
68 */
69
70 void atomic_set_8(__volatile uint8_t *, uint8_t);
71 void atomic_clear_8(__volatile uint8_t *, uint8_t);
72 void atomic_add_8(__volatile uint8_t *, uint8_t);
73 void atomic_subtract_8(__volatile uint8_t *, uint8_t);
74
75 void atomic_set_16(__volatile uint16_t *, uint16_t);
76 void atomic_clear_16(__volatile uint16_t *, uint16_t);
77 void atomic_add_16(__volatile uint16_t *, uint16_t);
78 void atomic_subtract_16(__volatile uint16_t *, uint16_t);
79
80 static __inline void
81 atomic_set_32(__volatile uint32_t *p, uint32_t v)
82 {
83 uint32_t temp;
84
85 __asm __volatile (
86 "1:\tll %0, %3\n\t" /* load old value */
87 "or %0, %2, %0\n\t" /* calculate new value */
88 "sc %0, %1\n\t" /* attempt to store */
89 "beqz %0, 1b\n\t" /* spin if failed */
90 : "=&r" (temp), "=m" (*p)
91 : "r" (v), "m" (*p)
92 : "memory");
93
94 }
95
96 static __inline void
97 atomic_clear_32(__volatile uint32_t *p, uint32_t v)
98 {
99 uint32_t temp;
100 v = ~v;
101
102 __asm __volatile (
103 "1:\tll %0, %3\n\t" /* load old value */
104 "and %0, %2, %0\n\t" /* calculate new value */
105 "sc %0, %1\n\t" /* attempt to store */
106 "beqz %0, 1b\n\t" /* spin if failed */
107 : "=&r" (temp), "=m" (*p)
108 : "r" (v), "m" (*p)
109 : "memory");
110 }
111
112 static __inline void
113 atomic_add_32(__volatile uint32_t *p, uint32_t v)
114 {
115 uint32_t temp;
116
117 __asm __volatile (
118 "1:\tll %0, %3\n\t" /* load old value */
119 "addu %0, %2, %0\n\t" /* calculate new value */
120 "sc %0, %1\n\t" /* attempt to store */
121 "beqz %0, 1b\n\t" /* spin if failed */
122 : "=&r" (temp), "=m" (*p)
123 : "r" (v), "m" (*p)
124 : "memory");
125 }
126
127 static __inline void
128 atomic_subtract_32(__volatile uint32_t *p, uint32_t v)
129 {
130 uint32_t temp;
131
132 __asm __volatile (
133 "1:\tll %0, %3\n\t" /* load old value */
134 "subu %0, %2\n\t" /* calculate new value */
135 "sc %0, %1\n\t" /* attempt to store */
136 "beqz %0, 1b\n\t" /* spin if failed */
137 : "=&r" (temp), "=m" (*p)
138 : "r" (v), "m" (*p)
139 : "memory");
140 }
141
142 static __inline uint32_t
143 atomic_readandclear_32(__volatile uint32_t *addr)
144 {
145 uint32_t result,temp;
146
147 __asm __volatile (
148 "1:\tll %0,%3\n\t" /* load current value, asserting lock */
149 "li %1,0\n\t" /* value to store */
150 "sc %1,%2\n\t" /* attempt to store */
151 "beqz %1, 1b\n\t" /* if the store failed, spin */
152 : "=&r"(result), "=&r"(temp), "=m" (*addr)
153 : "m" (*addr)
154 : "memory");
155
156 return result;
157 }
158
159 static __inline uint32_t
160 atomic_readandset_32(__volatile uint32_t *addr, uint32_t value)
161 {
162 uint32_t result,temp;
163
164 __asm __volatile (
165 "1:\tll %0,%3\n\t" /* load current value, asserting lock */
166 "or %1,$0,%4\n\t"
167 "sc %1,%2\n\t" /* attempt to store */
168 "beqz %1, 1b\n\t" /* if the store failed, spin */
169 : "=&r"(result), "=&r"(temp), "=m" (*addr)
170 : "m" (*addr), "r" (value)
171 : "memory");
172
173 return result;
174 }
175
176 #if defined(__mips_n64) || defined(__mips_n32)
177 static __inline void
178 atomic_set_64(__volatile uint64_t *p, uint64_t v)
179 {
180 uint64_t temp;
181
182 __asm __volatile (
183 "1:\n\t"
184 "lld %0, %3\n\t" /* load old value */
185 "or %0, %2, %0\n\t" /* calculate new value */
186 "scd %0, %1\n\t" /* attempt to store */
187 "beqz %0, 1b\n\t" /* spin if failed */
188 : "=&r" (temp), "=m" (*p)
189 : "r" (v), "m" (*p)
190 : "memory");
191
192 }
193
194 static __inline void
195 atomic_clear_64(__volatile uint64_t *p, uint64_t v)
196 {
197 uint64_t temp;
198 v = ~v;
199
200 __asm __volatile (
201 "1:\n\t"
202 "lld %0, %3\n\t" /* load old value */
203 "and %0, %2, %0\n\t" /* calculate new value */
204 "scd %0, %1\n\t" /* attempt to store */
205 "beqz %0, 1b\n\t" /* spin if failed */
206 : "=&r" (temp), "=m" (*p)
207 : "r" (v), "m" (*p)
208 : "memory");
209 }
210
211 static __inline void
212 atomic_add_64(__volatile uint64_t *p, uint64_t v)
213 {
214 uint64_t temp;
215
216 __asm __volatile (
217 "1:\n\t"
218 "lld %0, %3\n\t" /* load old value */
219 "daddu %0, %2, %0\n\t" /* calculate new value */
220 "scd %0, %1\n\t" /* attempt to store */
221 "beqz %0, 1b\n\t" /* spin if failed */
222 : "=&r" (temp), "=m" (*p)
223 : "r" (v), "m" (*p)
224 : "memory");
225 }
226
227 static __inline void
228 atomic_subtract_64(__volatile uint64_t *p, uint64_t v)
229 {
230 uint64_t temp;
231
232 __asm __volatile (
233 "1:\n\t"
234 "lld %0, %3\n\t" /* load old value */
235 "dsubu %0, %2\n\t" /* calculate new value */
236 "scd %0, %1\n\t" /* attempt to store */
237 "beqz %0, 1b\n\t" /* spin if failed */
238 : "=&r" (temp), "=m" (*p)
239 : "r" (v), "m" (*p)
240 : "memory");
241 }
242
243 static __inline uint64_t
244 atomic_readandclear_64(__volatile uint64_t *addr)
245 {
246 uint64_t result,temp;
247
248 __asm __volatile (
249 "1:\n\t"
250 "lld %0, %3\n\t" /* load old value */
251 "li %1, 0\n\t" /* value to store */
252 "scd %1, %2\n\t" /* attempt to store */
253 "beqz %1, 1b\n\t" /* if the store failed, spin */
254 : "=&r"(result), "=&r"(temp), "=m" (*addr)
255 : "m" (*addr)
256 : "memory");
257
258 return result;
259 }
260
261 static __inline uint64_t
262 atomic_readandset_64(__volatile uint64_t *addr, uint64_t value)
263 {
264 uint64_t result,temp;
265
266 __asm __volatile (
267 "1:\n\t"
268 "lld %0,%3\n\t" /* Load old value*/
269 "or %1,$0,%4\n\t"
270 "scd %1,%2\n\t" /* attempt to store */
271 "beqz %1, 1b\n\t" /* if the store failed, spin */
272 : "=&r"(result), "=&r"(temp), "=m" (*addr)
273 : "m" (*addr), "r" (value)
274 : "memory");
275
276 return result;
277 }
278 #endif
279
280 #define ATOMIC_ACQ_REL(NAME, WIDTH) \
281 static __inline void \
282 atomic_##NAME##_acq_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
283 { \
284 atomic_##NAME##_##WIDTH(p, v); \
285 mips_sync(); \
286 } \
287 \
288 static __inline void \
289 atomic_##NAME##_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
290 { \
291 mips_sync(); \
292 atomic_##NAME##_##WIDTH(p, v); \
293 }
294
295 /* Variants of simple arithmetic with memory barriers. */
296 ATOMIC_ACQ_REL(set, 8)
297 ATOMIC_ACQ_REL(clear, 8)
298 ATOMIC_ACQ_REL(add, 8)
299 ATOMIC_ACQ_REL(subtract, 8)
300 ATOMIC_ACQ_REL(set, 16)
301 ATOMIC_ACQ_REL(clear, 16)
302 ATOMIC_ACQ_REL(add, 16)
303 ATOMIC_ACQ_REL(subtract, 16)
304 ATOMIC_ACQ_REL(set, 32)
305 ATOMIC_ACQ_REL(clear, 32)
306 ATOMIC_ACQ_REL(add, 32)
307 ATOMIC_ACQ_REL(subtract, 32)
308 #if defined(__mips_n64) || defined(__mips_n32)
309 ATOMIC_ACQ_REL(set, 64)
310 ATOMIC_ACQ_REL(clear, 64)
311 ATOMIC_ACQ_REL(add, 64)
312 ATOMIC_ACQ_REL(subtract, 64)
313 #endif
314
315 #undef ATOMIC_ACQ_REL
316
317 /*
318 * We assume that a = b will do atomic loads and stores.
319 */
320 #define ATOMIC_STORE_LOAD(WIDTH) \
321 static __inline uint##WIDTH##_t \
322 atomic_load_acq_##WIDTH(__volatile uint##WIDTH##_t *p) \
323 { \
324 uint##WIDTH##_t v; \
325 \
326 v = *p; \
327 mips_sync(); \
328 return (v); \
329 } \
330 \
331 static __inline void \
332 atomic_store_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
333 { \
334 mips_sync(); \
335 *p = v; \
336 }
337
338 ATOMIC_STORE_LOAD(32)
339 ATOMIC_STORE_LOAD(64)
340 #if !defined(__mips_n64) && !defined(__mips_n32)
341 void atomic_store_64(__volatile uint64_t *, uint64_t *);
342 void atomic_load_64(__volatile uint64_t *, uint64_t *);
343 #else
344 static __inline void
345 atomic_store_64(__volatile uint64_t *p, uint64_t *v)
346 {
347 *p = *v;
348 }
349
350 static __inline void
351 atomic_load_64(__volatile uint64_t *p, uint64_t *v)
352 {
353 *v = *p;
354 }
355 #endif
356
357 #undef ATOMIC_STORE_LOAD
358
359 /*
360 * Atomically compare the value stored at *p with cmpval and if the
361 * two values are equal, update the value of *p with newval. Returns
362 * zero if the compare failed, nonzero otherwise.
363 */
364 static __inline uint32_t
365 atomic_cmpset_32(__volatile uint32_t* p, uint32_t cmpval, uint32_t newval)
366 {
367 uint32_t ret;
368
369 __asm __volatile (
370 "1:\tll %0, %4\n\t" /* load old value */
371 "bne %0, %2, 2f\n\t" /* compare */
372 "move %0, %3\n\t" /* value to store */
373 "sc %0, %1\n\t" /* attempt to store */
374 "beqz %0, 1b\n\t" /* if it failed, spin */
375 "j 3f\n\t"
376 "2:\n\t"
377 "li %0, 0\n\t"
378 "3:\n"
379 : "=&r" (ret), "=m" (*p)
380 : "r" (cmpval), "r" (newval), "m" (*p)
381 : "memory");
382
383 return ret;
384 }
385
386 /*
387 * Atomically compare the value stored at *p with cmpval and if the
388 * two values are equal, update the value of *p with newval. Returns
389 * zero if the compare failed, nonzero otherwise.
390 */
391 static __inline uint32_t
392 atomic_cmpset_acq_32(__volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
393 {
394 int retval;
395
396 retval = atomic_cmpset_32(p, cmpval, newval);
397 mips_sync();
398 return (retval);
399 }
400
401 static __inline uint32_t
402 atomic_cmpset_rel_32(__volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
403 {
404 mips_sync();
405 return (atomic_cmpset_32(p, cmpval, newval));
406 }
407
408 /*
409 * Atomically add the value of v to the integer pointed to by p and return
410 * the previous value of *p.
411 */
412 static __inline uint32_t
413 atomic_fetchadd_32(__volatile uint32_t *p, uint32_t v)
414 {
415 uint32_t value, temp;
416
417 __asm __volatile (
418 "1:\tll %0, %1\n\t" /* load old value */
419 "addu %2, %3, %0\n\t" /* calculate new value */
420 "sc %2, %1\n\t" /* attempt to store */
421 "beqz %2, 1b\n\t" /* spin if failed */
422 : "=&r" (value), "=m" (*p), "=&r" (temp)
423 : "r" (v), "m" (*p));
424 return (value);
425 }
426
427 #if defined(__mips_n64) || defined(__mips_n32)
428 /*
429 * Atomically compare the value stored at *p with cmpval and if the
430 * two values are equal, update the value of *p with newval. Returns
431 * zero if the compare failed, nonzero otherwise.
432 */
433 static __inline uint64_t
434 atomic_cmpset_64(__volatile uint64_t* p, uint64_t cmpval, uint64_t newval)
435 {
436 uint64_t ret;
437
438 __asm __volatile (
439 "1:\n\t"
440 "lld %0, %4\n\t" /* load old value */
441 "bne %0, %2, 2f\n\t" /* compare */
442 "move %0, %3\n\t" /* value to store */
443 "scd %0, %1\n\t" /* attempt to store */
444 "beqz %0, 1b\n\t" /* if it failed, spin */
445 "j 3f\n\t"
446 "2:\n\t"
447 "li %0, 0\n\t"
448 "3:\n"
449 : "=&r" (ret), "=m" (*p)
450 : "r" (cmpval), "r" (newval), "m" (*p)
451 : "memory");
452
453 return ret;
454 }
455
456 /*
457 * Atomically compare the value stored at *p with cmpval and if the
458 * two values are equal, update the value of *p with newval. Returns
459 * zero if the compare failed, nonzero otherwise.
460 */
461 static __inline uint64_t
462 atomic_cmpset_acq_64(__volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
463 {
464 int retval;
465
466 retval = atomic_cmpset_64(p, cmpval, newval);
467 mips_sync();
468 return (retval);
469 }
470
471 static __inline uint64_t
472 atomic_cmpset_rel_64(__volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
473 {
474 mips_sync();
475 return (atomic_cmpset_64(p, cmpval, newval));
476 }
477
478 /*
479 * Atomically add the value of v to the integer pointed to by p and return
480 * the previous value of *p.
481 */
482 static __inline uint64_t
483 atomic_fetchadd_64(__volatile uint64_t *p, uint64_t v)
484 {
485 uint64_t value, temp;
486
487 __asm __volatile (
488 "1:\n\t"
489 "lld %0, %1\n\t" /* load old value */
490 "daddu %2, %3, %0\n\t" /* calculate new value */
491 "scd %2, %1\n\t" /* attempt to store */
492 "beqz %2, 1b\n\t" /* spin if failed */
493 : "=&r" (value), "=m" (*p), "=&r" (temp)
494 : "r" (v), "m" (*p));
495 return (value);
496 }
497 #endif
498
499 static __inline void
500 atomic_thread_fence_acq(void)
501 {
502
503 mips_sync();
504 }
505
506 static __inline void
507 atomic_thread_fence_rel(void)
508 {
509
510 mips_sync();
511 }
512
513 static __inline void
514 atomic_thread_fence_acq_rel(void)
515 {
516
517 mips_sync();
518 }
519
520 static __inline void
521 atomic_thread_fence_seq_cst(void)
522 {
523
524 mips_sync();
525 }
526
527 /* Operations on chars. */
528 #define atomic_set_char atomic_set_8
529 #define atomic_set_acq_char atomic_set_acq_8
530 #define atomic_set_rel_char atomic_set_rel_8
531 #define atomic_clear_char atomic_clear_8
532 #define atomic_clear_acq_char atomic_clear_acq_8
533 #define atomic_clear_rel_char atomic_clear_rel_8
534 #define atomic_add_char atomic_add_8
535 #define atomic_add_acq_char atomic_add_acq_8
536 #define atomic_add_rel_char atomic_add_rel_8
537 #define atomic_subtract_char atomic_subtract_8
538 #define atomic_subtract_acq_char atomic_subtract_acq_8
539 #define atomic_subtract_rel_char atomic_subtract_rel_8
540
541 /* Operations on shorts. */
542 #define atomic_set_short atomic_set_16
543 #define atomic_set_acq_short atomic_set_acq_16
544 #define atomic_set_rel_short atomic_set_rel_16
545 #define atomic_clear_short atomic_clear_16
546 #define atomic_clear_acq_short atomic_clear_acq_16
547 #define atomic_clear_rel_short atomic_clear_rel_16
548 #define atomic_add_short atomic_add_16
549 #define atomic_add_acq_short atomic_add_acq_16
550 #define atomic_add_rel_short atomic_add_rel_16
551 #define atomic_subtract_short atomic_subtract_16
552 #define atomic_subtract_acq_short atomic_subtract_acq_16
553 #define atomic_subtract_rel_short atomic_subtract_rel_16
554
555 /* Operations on ints. */
556 #define atomic_set_int atomic_set_32
557 #define atomic_set_acq_int atomic_set_acq_32
558 #define atomic_set_rel_int atomic_set_rel_32
559 #define atomic_clear_int atomic_clear_32
560 #define atomic_clear_acq_int atomic_clear_acq_32
561 #define atomic_clear_rel_int atomic_clear_rel_32
562 #define atomic_add_int atomic_add_32
563 #define atomic_add_acq_int atomic_add_acq_32
564 #define atomic_add_rel_int atomic_add_rel_32
565 #define atomic_subtract_int atomic_subtract_32
566 #define atomic_subtract_acq_int atomic_subtract_acq_32
567 #define atomic_subtract_rel_int atomic_subtract_rel_32
568 #define atomic_cmpset_int atomic_cmpset_32
569 #define atomic_cmpset_acq_int atomic_cmpset_acq_32
570 #define atomic_cmpset_rel_int atomic_cmpset_rel_32
571 #define atomic_load_acq_int atomic_load_acq_32
572 #define atomic_store_rel_int atomic_store_rel_32
573 #define atomic_readandclear_int atomic_readandclear_32
574 #define atomic_readandset_int atomic_readandset_32
575 #define atomic_fetchadd_int atomic_fetchadd_32
576
577 /*
578 * I think the following is right, even for n32. For n32 the pointers
579 * are still 32-bits, so we need to operate on them as 32-bit quantities,
580 * even though they are sign extended in operation. For longs, there's
581 * no question because they are always 32-bits.
582 */
583 #ifdef __mips_n64
584 /* Operations on longs. */
585 #define atomic_set_long atomic_set_64
586 #define atomic_set_acq_long atomic_set_acq_64
587 #define atomic_set_rel_long atomic_set_rel_64
588 #define atomic_clear_long atomic_clear_64
589 #define atomic_clear_acq_long atomic_clear_acq_64
590 #define atomic_clear_rel_long atomic_clear_rel_64
591 #define atomic_add_long atomic_add_64
592 #define atomic_add_acq_long atomic_add_acq_64
593 #define atomic_add_rel_long atomic_add_rel_64
594 #define atomic_subtract_long atomic_subtract_64
595 #define atomic_subtract_acq_long atomic_subtract_acq_64
596 #define atomic_subtract_rel_long atomic_subtract_rel_64
597 #define atomic_cmpset_long atomic_cmpset_64
598 #define atomic_cmpset_acq_long atomic_cmpset_acq_64
599 #define atomic_cmpset_rel_long atomic_cmpset_rel_64
600 #define atomic_load_acq_long atomic_load_acq_64
601 #define atomic_store_rel_long atomic_store_rel_64
602 #define atomic_fetchadd_long atomic_fetchadd_64
603 #define atomic_readandclear_long atomic_readandclear_64
604
605 #else /* !__mips_n64 */
606
607 /* Operations on longs. */
608 #define atomic_set_long(p, v) \
609 atomic_set_32((volatile u_int *)(p), (u_int)(v))
610 #define atomic_set_acq_long(p, v) \
611 atomic_set_acq_32((volatile u_int *)(p), (u_int)(v))
612 #define atomic_set_rel_long(p, v) \
613 atomic_set_rel_32((volatile u_int *)(p), (u_int)(v))
614 #define atomic_clear_long(p, v) \
615 atomic_clear_32((volatile u_int *)(p), (u_int)(v))
616 #define atomic_clear_acq_long(p, v) \
617 atomic_clear_acq_32((volatile u_int *)(p), (u_int)(v))
618 #define atomic_clear_rel_long(p, v) \
619 atomic_clear_rel_32((volatile u_int *)(p), (u_int)(v))
620 #define atomic_add_long(p, v) \
621 atomic_add_32((volatile u_int *)(p), (u_int)(v))
622 #define atomic_add_acq_long(p, v) \
623 atomic_add_32((volatile u_int *)(p), (u_int)(v))
624 #define atomic_add_rel_long(p, v) \
625 atomic_add_32((volatile u_int *)(p), (u_int)(v))
626 #define atomic_subtract_long(p, v) \
627 atomic_subtract_32((volatile u_int *)(p), (u_int)(v))
628 #define atomic_subtract_acq_long(p, v) \
629 atomic_subtract_acq_32((volatile u_int *)(p), (u_int)(v))
630 #define atomic_subtract_rel_long(p, v) \
631 atomic_subtract_rel_32((volatile u_int *)(p), (u_int)(v))
632 #define atomic_cmpset_long(p, cmpval, newval) \
633 atomic_cmpset_32((volatile u_int *)(p), (u_int)(cmpval), \
634 (u_int)(newval))
635 #define atomic_cmpset_acq_long(p, cmpval, newval) \
636 atomic_cmpset_acq_32((volatile u_int *)(p), (u_int)(cmpval), \
637 (u_int)(newval))
638 #define atomic_cmpset_rel_long(p, cmpval, newval) \
639 atomic_cmpset_rel_32((volatile u_int *)(p), (u_int)(cmpval), \
640 (u_int)(newval))
641 #define atomic_load_acq_long(p) \
642 (u_long)atomic_load_acq_32((volatile u_int *)(p))
643 #define atomic_store_rel_long(p, v) \
644 atomic_store_rel_32((volatile u_int *)(p), (u_int)(v))
645 #define atomic_fetchadd_long(p, v) \
646 atomic_fetchadd_32((volatile u_int *)(p), (u_int)(v))
647 #define atomic_readandclear_long(p) \
648 atomic_readandclear_32((volatile u_int *)(p))
649
650 #endif /* __mips_n64 */
651
652 /* Operations on pointers. */
653 #define atomic_set_ptr atomic_set_long
654 #define atomic_set_acq_ptr atomic_set_acq_long
655 #define atomic_set_rel_ptr atomic_set_rel_long
656 #define atomic_clear_ptr atomic_clear_long
657 #define atomic_clear_acq_ptr atomic_clear_acq_long
658 #define atomic_clear_rel_ptr atomic_clear_rel_long
659 #define atomic_add_ptr atomic_add_long
660 #define atomic_add_acq_ptr atomic_add_acq_long
661 #define atomic_add_rel_ptr atomic_add_rel_long
662 #define atomic_subtract_ptr atomic_subtract_long
663 #define atomic_subtract_acq_ptr atomic_subtract_acq_long
664 #define atomic_subtract_rel_ptr atomic_subtract_rel_long
665 #define atomic_cmpset_ptr atomic_cmpset_long
666 #define atomic_cmpset_acq_ptr atomic_cmpset_acq_long
667 #define atomic_cmpset_rel_ptr atomic_cmpset_rel_long
668 #define atomic_load_acq_ptr atomic_load_acq_long
669 #define atomic_store_rel_ptr atomic_store_rel_long
670 #define atomic_readandclear_ptr atomic_readandclear_long
671
672 #endif /* ! _MACHINE_ATOMIC_H_ */
Cache object: ec12db5238c9df11a43bf7250db045f5
|