1 /*-
2 * Copyright (c) 1998 Doug Rabson
3 * All rights reserved.
4 *
5 * Redistribution and use in source and binary forms, with or without
6 * modification, are permitted provided that the following conditions
7 * are met:
8 * 1. Redistributions of source code must retain the above copyright
9 * notice, this list of conditions and the following disclaimer.
10 * 2. Redistributions in binary form must reproduce the above copyright
11 * notice, this list of conditions and the following disclaimer in the
12 * documentation and/or other materials provided with the distribution.
13 *
14 * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
15 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
16 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
17 * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
18 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
19 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
20 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
21 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
22 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
23 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
24 * SUCH DAMAGE.
25 *
26 * from: src/sys/alpha/include/atomic.h,v 1.21.2.3 2005/10/06 18:12:05 jhb
27 * $FreeBSD: releng/9.0/sys/mips/include/atomic.h 222234 2011-05-23 23:35:50Z attilio $
28 */
29
30 #ifndef _MACHINE_ATOMIC_H_
31 #define _MACHINE_ATOMIC_H_
32
33 #ifndef _SYS_CDEFS_H_
34 #error this file needs sys/cdefs.h as a prerequisite
35 #endif
36
37 /*
38 * Note: All the 64-bit atomic operations are only atomic when running
39 * in 64-bit mode. It is assumed that code compiled for n32 and n64
40 * fits into this definition and no further safeties are needed.
41 *
42 * It is also assumed that the add, subtract and other arithmetic is
43 * done on numbers not pointers. The special rules for n32 pointers
44 * do not have atomic operations defined for them, but generally shouldn't
45 * need atomic operations.
46 */
47
48 static __inline void
49 mips_sync(void)
50 {
51 __asm __volatile (".set noreorder\n\t"
52 "sync\n\t"
53 "nop\n\t"
54 "nop\n\t"
55 "nop\n\t"
56 "nop\n\t"
57 "nop\n\t"
58 "nop\n\t"
59 "nop\n\t"
60 "nop\n\t"
61 ".set reorder\n"
62 : : : "memory");
63 }
64
65 #define mb() mips_sync()
66 #define wmb() mips_sync()
67 #define rmb() mips_sync()
68
69 /*
70 * Various simple arithmetic on memory which is atomic in the presence
71 * of interrupts and SMP safe.
72 */
73
74 void atomic_set_8(__volatile uint8_t *, uint8_t);
75 void atomic_clear_8(__volatile uint8_t *, uint8_t);
76 void atomic_add_8(__volatile uint8_t *, uint8_t);
77 void atomic_subtract_8(__volatile uint8_t *, uint8_t);
78
79 void atomic_set_16(__volatile uint16_t *, uint16_t);
80 void atomic_clear_16(__volatile uint16_t *, uint16_t);
81 void atomic_add_16(__volatile uint16_t *, uint16_t);
82 void atomic_subtract_16(__volatile uint16_t *, uint16_t);
83
84 static __inline void
85 atomic_set_32(__volatile uint32_t *p, uint32_t v)
86 {
87 uint32_t temp;
88
89 __asm __volatile (
90 "1:\tll %0, %3\n\t" /* load old value */
91 "or %0, %2, %0\n\t" /* calculate new value */
92 "sc %0, %1\n\t" /* attempt to store */
93 "beqz %0, 1b\n\t" /* spin if failed */
94 : "=&r" (temp), "=m" (*p)
95 : "r" (v), "m" (*p)
96 : "memory");
97
98 }
99
100 static __inline void
101 atomic_clear_32(__volatile uint32_t *p, uint32_t v)
102 {
103 uint32_t temp;
104 v = ~v;
105
106 __asm __volatile (
107 "1:\tll %0, %3\n\t" /* load old value */
108 "and %0, %2, %0\n\t" /* calculate new value */
109 "sc %0, %1\n\t" /* attempt to store */
110 "beqz %0, 1b\n\t" /* spin if failed */
111 : "=&r" (temp), "=m" (*p)
112 : "r" (v), "m" (*p)
113 : "memory");
114 }
115
116 static __inline void
117 atomic_add_32(__volatile uint32_t *p, uint32_t v)
118 {
119 uint32_t temp;
120
121 __asm __volatile (
122 "1:\tll %0, %3\n\t" /* load old value */
123 "addu %0, %2, %0\n\t" /* calculate new value */
124 "sc %0, %1\n\t" /* attempt to store */
125 "beqz %0, 1b\n\t" /* spin if failed */
126 : "=&r" (temp), "=m" (*p)
127 : "r" (v), "m" (*p)
128 : "memory");
129 }
130
131 static __inline void
132 atomic_subtract_32(__volatile uint32_t *p, uint32_t v)
133 {
134 uint32_t temp;
135
136 __asm __volatile (
137 "1:\tll %0, %3\n\t" /* load old value */
138 "subu %0, %2\n\t" /* calculate new value */
139 "sc %0, %1\n\t" /* attempt to store */
140 "beqz %0, 1b\n\t" /* spin if failed */
141 : "=&r" (temp), "=m" (*p)
142 : "r" (v), "m" (*p)
143 : "memory");
144 }
145
146 static __inline uint32_t
147 atomic_readandclear_32(__volatile uint32_t *addr)
148 {
149 uint32_t result,temp;
150
151 __asm __volatile (
152 "1:\tll %0,%3\n\t" /* load current value, asserting lock */
153 "li %1,0\n\t" /* value to store */
154 "sc %1,%2\n\t" /* attempt to store */
155 "beqz %1, 1b\n\t" /* if the store failed, spin */
156 : "=&r"(result), "=&r"(temp), "=m" (*addr)
157 : "m" (*addr)
158 : "memory");
159
160 return result;
161 }
162
163 static __inline uint32_t
164 atomic_readandset_32(__volatile uint32_t *addr, uint32_t value)
165 {
166 uint32_t result,temp;
167
168 __asm __volatile (
169 "1:\tll %0,%3\n\t" /* load current value, asserting lock */
170 "or %1,$0,%4\n\t"
171 "sc %1,%2\n\t" /* attempt to store */
172 "beqz %1, 1b\n\t" /* if the store failed, spin */
173 : "=&r"(result), "=&r"(temp), "=m" (*addr)
174 : "m" (*addr), "r" (value)
175 : "memory");
176
177 return result;
178 }
179
180 #if defined(__mips_n64) || defined(__mips_n32)
181 static __inline void
182 atomic_set_64(__volatile uint64_t *p, uint64_t v)
183 {
184 uint64_t temp;
185
186 __asm __volatile (
187 "1:\n\t"
188 "lld %0, %3\n\t" /* load old value */
189 "or %0, %2, %0\n\t" /* calculate new value */
190 "scd %0, %1\n\t" /* attempt to store */
191 "beqz %0, 1b\n\t" /* spin if failed */
192 : "=&r" (temp), "=m" (*p)
193 : "r" (v), "m" (*p)
194 : "memory");
195
196 }
197
198 static __inline void
199 atomic_clear_64(__volatile uint64_t *p, uint64_t v)
200 {
201 uint64_t temp;
202 v = ~v;
203
204 __asm __volatile (
205 "1:\n\t"
206 "lld %0, %3\n\t" /* load old value */
207 "and %0, %2, %0\n\t" /* calculate new value */
208 "scd %0, %1\n\t" /* attempt to store */
209 "beqz %0, 1b\n\t" /* spin if failed */
210 : "=&r" (temp), "=m" (*p)
211 : "r" (v), "m" (*p)
212 : "memory");
213 }
214
215 static __inline void
216 atomic_add_64(__volatile uint64_t *p, uint64_t v)
217 {
218 uint64_t temp;
219
220 __asm __volatile (
221 "1:\n\t"
222 "lld %0, %3\n\t" /* load old value */
223 "daddu %0, %2, %0\n\t" /* calculate new value */
224 "scd %0, %1\n\t" /* attempt to store */
225 "beqz %0, 1b\n\t" /* spin if failed */
226 : "=&r" (temp), "=m" (*p)
227 : "r" (v), "m" (*p)
228 : "memory");
229 }
230
231 static __inline void
232 atomic_subtract_64(__volatile uint64_t *p, uint64_t v)
233 {
234 uint64_t temp;
235
236 __asm __volatile (
237 "1:\n\t"
238 "lld %0, %3\n\t" /* load old value */
239 "dsubu %0, %2\n\t" /* calculate new value */
240 "scd %0, %1\n\t" /* attempt to store */
241 "beqz %0, 1b\n\t" /* spin if failed */
242 : "=&r" (temp), "=m" (*p)
243 : "r" (v), "m" (*p)
244 : "memory");
245 }
246
247 static __inline uint64_t
248 atomic_readandclear_64(__volatile uint64_t *addr)
249 {
250 uint64_t result,temp;
251
252 __asm __volatile (
253 "1:\n\t"
254 "lld %0, %3\n\t" /* load old value */
255 "li %1, 0\n\t" /* value to store */
256 "scd %1, %2\n\t" /* attempt to store */
257 "beqz %1, 1b\n\t" /* if the store failed, spin */
258 : "=&r"(result), "=&r"(temp), "=m" (*addr)
259 : "m" (*addr)
260 : "memory");
261
262 return result;
263 }
264
265 static __inline uint64_t
266 atomic_readandset_64(__volatile uint64_t *addr, uint64_t value)
267 {
268 uint64_t result,temp;
269
270 __asm __volatile (
271 "1:\n\t"
272 "lld %0,%3\n\t" /* Load old value*/
273 "or %1,$0,%4\n\t"
274 "scd %1,%2\n\t" /* attempt to store */
275 "beqz %1, 1b\n\t" /* if the store failed, spin */
276 : "=&r"(result), "=&r"(temp), "=m" (*addr)
277 : "m" (*addr), "r" (value)
278 : "memory");
279
280 return result;
281 }
282 #endif
283
284 #define ATOMIC_ACQ_REL(NAME, WIDTH) \
285 static __inline void \
286 atomic_##NAME##_acq_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
287 { \
288 atomic_##NAME##_##WIDTH(p, v); \
289 mips_sync(); \
290 } \
291 \
292 static __inline void \
293 atomic_##NAME##_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
294 { \
295 mips_sync(); \
296 atomic_##NAME##_##WIDTH(p, v); \
297 }
298
299 /* Variants of simple arithmetic with memory barriers. */
300 ATOMIC_ACQ_REL(set, 8)
301 ATOMIC_ACQ_REL(clear, 8)
302 ATOMIC_ACQ_REL(add, 8)
303 ATOMIC_ACQ_REL(subtract, 8)
304 ATOMIC_ACQ_REL(set, 16)
305 ATOMIC_ACQ_REL(clear, 16)
306 ATOMIC_ACQ_REL(add, 16)
307 ATOMIC_ACQ_REL(subtract, 16)
308 ATOMIC_ACQ_REL(set, 32)
309 ATOMIC_ACQ_REL(clear, 32)
310 ATOMIC_ACQ_REL(add, 32)
311 ATOMIC_ACQ_REL(subtract, 32)
312 #if defined(__mips_n64) || defined(__mips_n32)
313 ATOMIC_ACQ_REL(set, 64)
314 ATOMIC_ACQ_REL(clear, 64)
315 ATOMIC_ACQ_REL(add, 64)
316 ATOMIC_ACQ_REL(subtract, 64)
317 #endif
318
319 #undef ATOMIC_ACQ_REL
320
321 /*
322 * We assume that a = b will do atomic loads and stores.
323 */
324 #define ATOMIC_STORE_LOAD(WIDTH) \
325 static __inline uint##WIDTH##_t \
326 atomic_load_acq_##WIDTH(__volatile uint##WIDTH##_t *p) \
327 { \
328 uint##WIDTH##_t v; \
329 \
330 v = *p; \
331 mips_sync(); \
332 return (v); \
333 } \
334 \
335 static __inline void \
336 atomic_store_rel_##WIDTH(__volatile uint##WIDTH##_t *p, uint##WIDTH##_t v)\
337 { \
338 mips_sync(); \
339 *p = v; \
340 }
341
342 ATOMIC_STORE_LOAD(32)
343 ATOMIC_STORE_LOAD(64)
344 #if !defined(__mips_n64) && !defined(__mips_n32)
345 void atomic_store_64(__volatile uint64_t *, uint64_t *);
346 void atomic_load_64(__volatile uint64_t *, uint64_t *);
347 #else
348 static __inline void
349 atomic_store_64(__volatile uint64_t *p, uint64_t *v)
350 {
351 *p = *v;
352 }
353
354 static __inline void
355 atomic_load_64(__volatile uint64_t *p, uint64_t *v)
356 {
357 *v = *p;
358 }
359 #endif
360
361 #undef ATOMIC_STORE_LOAD
362
363 /*
364 * Atomically compare the value stored at *p with cmpval and if the
365 * two values are equal, update the value of *p with newval. Returns
366 * zero if the compare failed, nonzero otherwise.
367 */
368 static __inline uint32_t
369 atomic_cmpset_32(__volatile uint32_t* p, uint32_t cmpval, uint32_t newval)
370 {
371 uint32_t ret;
372
373 __asm __volatile (
374 "1:\tll %0, %4\n\t" /* load old value */
375 "bne %0, %2, 2f\n\t" /* compare */
376 "move %0, %3\n\t" /* value to store */
377 "sc %0, %1\n\t" /* attempt to store */
378 "beqz %0, 1b\n\t" /* if it failed, spin */
379 "j 3f\n\t"
380 "2:\n\t"
381 "li %0, 0\n\t"
382 "3:\n"
383 : "=&r" (ret), "=m" (*p)
384 : "r" (cmpval), "r" (newval), "m" (*p)
385 : "memory");
386
387 return ret;
388 }
389
390 /*
391 * Atomically compare the value stored at *p with cmpval and if the
392 * two values are equal, update the value of *p with newval. Returns
393 * zero if the compare failed, nonzero otherwise.
394 */
395 static __inline uint32_t
396 atomic_cmpset_acq_32(__volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
397 {
398 int retval;
399
400 retval = atomic_cmpset_32(p, cmpval, newval);
401 mips_sync();
402 return (retval);
403 }
404
405 static __inline uint32_t
406 atomic_cmpset_rel_32(__volatile uint32_t *p, uint32_t cmpval, uint32_t newval)
407 {
408 mips_sync();
409 return (atomic_cmpset_32(p, cmpval, newval));
410 }
411
412 /*
413 * Atomically add the value of v to the integer pointed to by p and return
414 * the previous value of *p.
415 */
416 static __inline uint32_t
417 atomic_fetchadd_32(__volatile uint32_t *p, uint32_t v)
418 {
419 uint32_t value, temp;
420
421 __asm __volatile (
422 "1:\tll %0, %1\n\t" /* load old value */
423 "addu %2, %3, %0\n\t" /* calculate new value */
424 "sc %2, %1\n\t" /* attempt to store */
425 "beqz %2, 1b\n\t" /* spin if failed */
426 : "=&r" (value), "=m" (*p), "=&r" (temp)
427 : "r" (v), "m" (*p));
428 return (value);
429 }
430
431 #if defined(__mips_n64) || defined(__mips_n32)
432 /*
433 * Atomically compare the value stored at *p with cmpval and if the
434 * two values are equal, update the value of *p with newval. Returns
435 * zero if the compare failed, nonzero otherwise.
436 */
437 static __inline uint64_t
438 atomic_cmpset_64(__volatile uint64_t* p, uint64_t cmpval, uint64_t newval)
439 {
440 uint64_t ret;
441
442 __asm __volatile (
443 "1:\n\t"
444 "lld %0, %4\n\t" /* load old value */
445 "bne %0, %2, 2f\n\t" /* compare */
446 "move %0, %3\n\t" /* value to store */
447 "scd %0, %1\n\t" /* attempt to store */
448 "beqz %0, 1b\n\t" /* if it failed, spin */
449 "j 3f\n\t"
450 "2:\n\t"
451 "li %0, 0\n\t"
452 "3:\n"
453 : "=&r" (ret), "=m" (*p)
454 : "r" (cmpval), "r" (newval), "m" (*p)
455 : "memory");
456
457 return ret;
458 }
459
460 /*
461 * Atomically compare the value stored at *p with cmpval and if the
462 * two values are equal, update the value of *p with newval. Returns
463 * zero if the compare failed, nonzero otherwise.
464 */
465 static __inline uint64_t
466 atomic_cmpset_acq_64(__volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
467 {
468 int retval;
469
470 retval = atomic_cmpset_64(p, cmpval, newval);
471 mips_sync();
472 return (retval);
473 }
474
475 static __inline uint64_t
476 atomic_cmpset_rel_64(__volatile uint64_t *p, uint64_t cmpval, uint64_t newval)
477 {
478 mips_sync();
479 return (atomic_cmpset_64(p, cmpval, newval));
480 }
481
482 /*
483 * Atomically add the value of v to the integer pointed to by p and return
484 * the previous value of *p.
485 */
486 static __inline uint64_t
487 atomic_fetchadd_64(__volatile uint64_t *p, uint64_t v)
488 {
489 uint64_t value, temp;
490
491 __asm __volatile (
492 "1:\n\t"
493 "lld %0, %1\n\t" /* load old value */
494 "daddu %2, %3, %0\n\t" /* calculate new value */
495 "scd %2, %1\n\t" /* attempt to store */
496 "beqz %2, 1b\n\t" /* spin if failed */
497 : "=&r" (value), "=m" (*p), "=&r" (temp)
498 : "r" (v), "m" (*p));
499 return (value);
500 }
501 #endif
502
503 /* Operations on chars. */
504 #define atomic_set_char atomic_set_8
505 #define atomic_set_acq_char atomic_set_acq_8
506 #define atomic_set_rel_char atomic_set_rel_8
507 #define atomic_clear_char atomic_clear_8
508 #define atomic_clear_acq_char atomic_clear_acq_8
509 #define atomic_clear_rel_char atomic_clear_rel_8
510 #define atomic_add_char atomic_add_8
511 #define atomic_add_acq_char atomic_add_acq_8
512 #define atomic_add_rel_char atomic_add_rel_8
513 #define atomic_subtract_char atomic_subtract_8
514 #define atomic_subtract_acq_char atomic_subtract_acq_8
515 #define atomic_subtract_rel_char atomic_subtract_rel_8
516
517 /* Operations on shorts. */
518 #define atomic_set_short atomic_set_16
519 #define atomic_set_acq_short atomic_set_acq_16
520 #define atomic_set_rel_short atomic_set_rel_16
521 #define atomic_clear_short atomic_clear_16
522 #define atomic_clear_acq_short atomic_clear_acq_16
523 #define atomic_clear_rel_short atomic_clear_rel_16
524 #define atomic_add_short atomic_add_16
525 #define atomic_add_acq_short atomic_add_acq_16
526 #define atomic_add_rel_short atomic_add_rel_16
527 #define atomic_subtract_short atomic_subtract_16
528 #define atomic_subtract_acq_short atomic_subtract_acq_16
529 #define atomic_subtract_rel_short atomic_subtract_rel_16
530
531 /* Operations on ints. */
532 #define atomic_set_int atomic_set_32
533 #define atomic_set_acq_int atomic_set_acq_32
534 #define atomic_set_rel_int atomic_set_rel_32
535 #define atomic_clear_int atomic_clear_32
536 #define atomic_clear_acq_int atomic_clear_acq_32
537 #define atomic_clear_rel_int atomic_clear_rel_32
538 #define atomic_add_int atomic_add_32
539 #define atomic_add_acq_int atomic_add_acq_32
540 #define atomic_add_rel_int atomic_add_rel_32
541 #define atomic_subtract_int atomic_subtract_32
542 #define atomic_subtract_acq_int atomic_subtract_acq_32
543 #define atomic_subtract_rel_int atomic_subtract_rel_32
544 #define atomic_cmpset_int atomic_cmpset_32
545 #define atomic_cmpset_acq_int atomic_cmpset_acq_32
546 #define atomic_cmpset_rel_int atomic_cmpset_rel_32
547 #define atomic_load_acq_int atomic_load_acq_32
548 #define atomic_store_rel_int atomic_store_rel_32
549 #define atomic_readandclear_int atomic_readandclear_32
550 #define atomic_readandset_int atomic_readandset_32
551 #define atomic_fetchadd_int atomic_fetchadd_32
552
553 /*
554 * I think the following is right, even for n32. For n32 the pointers
555 * are still 32-bits, so we need to operate on them as 32-bit quantities,
556 * even though they are sign extended in operation. For longs, there's
557 * no question because they are always 32-bits.
558 */
559 #ifdef __mips_n64
560 /* Operations on longs. */
561 #define atomic_set_long atomic_set_64
562 #define atomic_set_acq_long atomic_set_acq_64
563 #define atomic_set_rel_long atomic_set_rel_64
564 #define atomic_clear_long atomic_clear_64
565 #define atomic_clear_acq_long atomic_clear_acq_64
566 #define atomic_clear_rel_long atomic_clear_rel_64
567 #define atomic_add_long atomic_add_64
568 #define atomic_add_acq_long atomic_add_acq_64
569 #define atomic_add_rel_long atomic_add_rel_64
570 #define atomic_subtract_long atomic_subtract_64
571 #define atomic_subtract_acq_long atomic_subtract_acq_64
572 #define atomic_subtract_rel_long atomic_subtract_rel_64
573 #define atomic_cmpset_long atomic_cmpset_64
574 #define atomic_cmpset_acq_long atomic_cmpset_acq_64
575 #define atomic_cmpset_rel_long atomic_cmpset_rel_64
576 #define atomic_load_acq_long atomic_load_acq_64
577 #define atomic_store_rel_long atomic_store_rel_64
578 #define atomic_fetchadd_long atomic_fetchadd_64
579 #define atomic_readandclear_long atomic_readandclear_64
580
581 #else /* !__mips_n64 */
582
583 /* Operations on longs. */
584 #define atomic_set_long(p, v) \
585 atomic_set_32((volatile u_int *)(p), (u_int)(v))
586 #define atomic_set_acq_long(p, v) \
587 atomic_set_acq_32((volatile u_int *)(p), (u_int)(v))
588 #define atomic_set_rel_long(p, v) \
589 atomic_set_rel_32((volatile u_int *)(p), (u_int)(v))
590 #define atomic_clear_long(p, v) \
591 atomic_clear_32((volatile u_int *)(p), (u_int)(v))
592 #define atomic_clear_acq_long(p, v) \
593 atomic_clear_acq_32((volatile u_int *)(p), (u_int)(v))
594 #define atomic_clear_rel_long(p, v) \
595 atomic_clear_rel_32((volatile u_int *)(p), (u_int)(v))
596 #define atomic_add_long(p, v) \
597 atomic_add_32((volatile u_int *)(p), (u_int)(v))
598 #define atomic_add_acq_long(p, v) \
599 atomic_add_32((volatile u_int *)(p), (u_int)(v))
600 #define atomic_add_rel_long(p, v) \
601 atomic_add_32((volatile u_int *)(p), (u_int)(v))
602 #define atomic_subtract_long(p, v) \
603 atomic_subtract_32((volatile u_int *)(p), (u_int)(v))
604 #define atomic_subtract_acq_long(p, v) \
605 atomic_subtract_acq_32((volatile u_int *)(p), (u_int)(v))
606 #define atomic_subtract_rel_long(p, v) \
607 atomic_subtract_rel_32((volatile u_int *)(p), (u_int)(v))
608 #define atomic_cmpset_long(p, cmpval, newval) \
609 atomic_cmpset_32((volatile u_int *)(p), (u_int)(cmpval), \
610 (u_int)(newval))
611 #define atomic_cmpset_acq_long(p, cmpval, newval) \
612 atomic_cmpset_acq_32((volatile u_int *)(p), (u_int)(cmpval), \
613 (u_int)(newval))
614 #define atomic_cmpset_rel_long(p, cmpval, newval) \
615 atomic_cmpset_rel_32((volatile u_int *)(p), (u_int)(cmpval), \
616 (u_int)(newval))
617 #define atomic_load_acq_long(p) \
618 (u_long)atomic_load_acq_32((volatile u_int *)(p))
619 #define atomic_store_rel_long(p, v) \
620 atomic_store_rel_32((volatile u_int *)(p), (u_int)(v))
621 #define atomic_fetchadd_long(p, v) \
622 atomic_fetchadd_32((volatile u_int *)(p), (u_int)(v))
623 #define atomic_readandclear_long(p) \
624 atomic_readandclear_32((volatile u_int *)(p))
625
626 #endif /* __mips_n64 */
627
628 /* Operations on pointers. */
629 #define atomic_set_ptr atomic_set_long
630 #define atomic_set_acq_ptr atomic_set_acq_long
631 #define atomic_set_rel_ptr atomic_set_rel_long
632 #define atomic_clear_ptr atomic_clear_long
633 #define atomic_clear_acq_ptr atomic_clear_acq_long
634 #define atomic_clear_rel_ptr atomic_clear_rel_long
635 #define atomic_add_ptr atomic_add_long
636 #define atomic_add_acq_ptr atomic_add_acq_long
637 #define atomic_add_rel_ptr atomic_add_rel_long
638 #define atomic_subtract_ptr atomic_subtract_long
639 #define atomic_subtract_acq_ptr atomic_subtract_acq_long
640 #define atomic_subtract_rel_ptr atomic_subtract_rel_long
641 #define atomic_cmpset_ptr atomic_cmpset_long
642 #define atomic_cmpset_acq_ptr atomic_cmpset_acq_long
643 #define atomic_cmpset_rel_ptr atomic_cmpset_rel_long
644 #define atomic_load_acq_ptr atomic_load_acq_long
645 #define atomic_store_rel_ptr atomic_store_rel_long
646 #define atomic_readandclear_ptr atomic_readandclear_long
647
648 #endif /* ! _MACHINE_ATOMIC_H_ */
Cache object: 0301cad1402d39961762848dfb15b55e
|