2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
17 #include <linux/irqflags.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/cpu-features.h>
22 #include <asm/system.h>
24 #define ATOMIC_INIT(i) { (i) }
27 * atomic_read - read atomic variable
28 * @v: pointer of type atomic_t
30 * Atomically reads the value of @v.
32 #define atomic_read(v) (*(volatile int *)&(v)->counter)
35 * atomic_set - set atomic variable
36 * @v: pointer of type atomic_t
39 * Atomically sets the value of @v to @i.
41 #define atomic_set(v, i) ((v)->counter = (i))
44 * atomic_add - add integer to atomic variable
45 * @i: integer value to add
46 * @v: pointer of type atomic_t
48 * Atomically adds @i to @v.
50 static __inline__ void atomic_add(int i, atomic_t * v)
52 if (kernel_uses_llsc && R10000_LLSC_WAR) {
57 "1: ll %0, %1 # atomic_add \n"
62 : "=&r" (temp), "=m" (v->counter)
63 : "Ir" (i), "m" (v->counter));
64 } else if (kernel_uses_llsc) {
70 " ll %0, %1 # atomic_add \n"
74 : "=&r" (temp), "=m" (v->counter)
75 : "Ir" (i), "m" (v->counter));
76 } while (unlikely(!temp));
80 raw_local_irq_save(flags);
82 raw_local_irq_restore(flags);
87 * atomic_sub - subtract the atomic variable
88 * @i: integer value to subtract
89 * @v: pointer of type atomic_t
91 * Atomically subtracts @i from @v.
93 static __inline__ void atomic_sub(int i, atomic_t * v)
95 if (kernel_uses_llsc && R10000_LLSC_WAR) {
100 "1: ll %0, %1 # atomic_sub \n"
105 : "=&r" (temp), "=m" (v->counter)
106 : "Ir" (i), "m" (v->counter));
107 } else if (kernel_uses_llsc) {
111 __asm__ __volatile__(
113 " ll %0, %1 # atomic_sub \n"
117 : "=&r" (temp), "=m" (v->counter)
118 : "Ir" (i), "m" (v->counter));
119 } while (unlikely(!temp));
123 raw_local_irq_save(flags);
125 raw_local_irq_restore(flags);
130 * Same as above, but return the result value
132 static __inline__ int atomic_add_return(int i, atomic_t * v)
136 smp_mb__before_llsc();
138 if (kernel_uses_llsc && R10000_LLSC_WAR) {
141 __asm__ __volatile__(
143 "1: ll %1, %2 # atomic_add_return \n"
144 " addu %0, %1, %3 \n"
147 " addu %0, %1, %3 \n"
149 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
150 : "Ir" (i), "m" (v->counter)
152 } else if (kernel_uses_llsc) {
156 __asm__ __volatile__(
158 " ll %1, %2 # atomic_add_return \n"
159 " addu %0, %1, %3 \n"
162 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
163 : "Ir" (i), "m" (v->counter)
165 } while (unlikely(!result));
171 raw_local_irq_save(flags);
175 raw_local_irq_restore(flags);
183 static __inline__ int atomic_sub_return(int i, atomic_t * v)
187 smp_mb__before_llsc();
189 if (kernel_uses_llsc && R10000_LLSC_WAR) {
192 __asm__ __volatile__(
194 "1: ll %1, %2 # atomic_sub_return \n"
195 " subu %0, %1, %3 \n"
198 " subu %0, %1, %3 \n"
200 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
201 : "Ir" (i), "m" (v->counter)
205 } else if (kernel_uses_llsc) {
209 __asm__ __volatile__(
211 " ll %1, %2 # atomic_sub_return \n"
212 " subu %0, %1, %3 \n"
215 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
216 : "Ir" (i), "m" (v->counter)
218 } while (unlikely(!result));
224 raw_local_irq_save(flags);
228 raw_local_irq_restore(flags);
237 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
238 * @i: integer value to subtract
239 * @v: pointer of type atomic_t
241 * Atomically test @v and subtract @i if @v is greater or equal than @i.
242 * The function returns the old value of @v minus @i.
244 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
248 smp_mb__before_llsc();
250 if (kernel_uses_llsc && R10000_LLSC_WAR) {
253 __asm__ __volatile__(
255 "1: ll %1, %2 # atomic_sub_if_positive\n"
256 " subu %0, %1, %3 \n"
261 " subu %0, %1, %3 \n"
265 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
266 : "Ir" (i), "m" (v->counter)
268 } else if (kernel_uses_llsc) {
271 __asm__ __volatile__(
273 "1: ll %1, %2 # atomic_sub_if_positive\n"
274 " subu %0, %1, %3 \n"
279 " subu %0, %1, %3 \n"
283 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
284 : "Ir" (i), "m" (v->counter)
289 raw_local_irq_save(flags);
294 raw_local_irq_restore(flags);
302 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
303 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
306 * atomic_add_unless - add unless the number is a given value
307 * @v: pointer of type atomic_t
308 * @a: the amount to add to v...
309 * @u: ...unless v is equal to u.
311 * Atomically adds @a to @v, so long as it was not @u.
312 * Returns non-zero if @v was not @u, and zero otherwise.
314 static __inline__ int atomic_add_unless(atomic_t *v, int a, int u)
319 if (unlikely(c == (u)))
321 old = atomic_cmpxchg((v), c, c + (a));
322 if (likely(old == c))
328 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
330 #define atomic_dec_return(v) atomic_sub_return(1, (v))
331 #define atomic_inc_return(v) atomic_add_return(1, (v))
334 * atomic_sub_and_test - subtract value from variable and test result
335 * @i: integer value to subtract
336 * @v: pointer of type atomic_t
338 * Atomically subtracts @i from @v and returns
339 * true if the result is zero, or false for all
342 #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
345 * atomic_inc_and_test - increment and test
346 * @v: pointer of type atomic_t
348 * Atomically increments @v by 1
349 * and returns true if the result is zero, or false for all
352 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
355 * atomic_dec_and_test - decrement by 1 and test
356 * @v: pointer of type atomic_t
358 * Atomically decrements @v by 1 and
359 * returns true if the result is 0, or false for all other
362 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
365 * atomic_dec_if_positive - decrement by 1 if old value positive
366 * @v: pointer of type atomic_t
368 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
371 * atomic_inc - increment atomic variable
372 * @v: pointer of type atomic_t
374 * Atomically increments @v by 1.
376 #define atomic_inc(v) atomic_add(1, (v))
379 * atomic_dec - decrement and test
380 * @v: pointer of type atomic_t
382 * Atomically decrements @v by 1.
384 #define atomic_dec(v) atomic_sub(1, (v))
387 * atomic_add_negative - add and test if negative
388 * @v: pointer of type atomic_t
389 * @i: integer value to add
391 * Atomically adds @i to @v and returns true
392 * if the result is negative, or false when
393 * result is greater than or equal to zero.
395 #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
399 #define ATOMIC64_INIT(i) { (i) }
402 * atomic64_read - read atomic variable
403 * @v: pointer of type atomic64_t
406 #define atomic64_read(v) (*(volatile long *)&(v)->counter)
409 * atomic64_set - set atomic variable
410 * @v: pointer of type atomic64_t
413 #define atomic64_set(v, i) ((v)->counter = (i))
416 * atomic64_add - add integer to atomic variable
417 * @i: integer value to add
418 * @v: pointer of type atomic64_t
420 * Atomically adds @i to @v.
422 static __inline__ void atomic64_add(long i, atomic64_t * v)
424 if (kernel_uses_llsc && R10000_LLSC_WAR) {
427 __asm__ __volatile__(
429 "1: lld %0, %1 # atomic64_add \n"
434 : "=&r" (temp), "=m" (v->counter)
435 : "Ir" (i), "m" (v->counter));
436 } else if (kernel_uses_llsc) {
440 __asm__ __volatile__(
442 " lld %0, %1 # atomic64_add \n"
446 : "=&r" (temp), "=m" (v->counter)
447 : "Ir" (i), "m" (v->counter));
448 } while (unlikely(!temp));
452 raw_local_irq_save(flags);
454 raw_local_irq_restore(flags);
459 * atomic64_sub - subtract the atomic variable
460 * @i: integer value to subtract
461 * @v: pointer of type atomic64_t
463 * Atomically subtracts @i from @v.
465 static __inline__ void atomic64_sub(long i, atomic64_t * v)
467 if (kernel_uses_llsc && R10000_LLSC_WAR) {
470 __asm__ __volatile__(
472 "1: lld %0, %1 # atomic64_sub \n"
477 : "=&r" (temp), "=m" (v->counter)
478 : "Ir" (i), "m" (v->counter));
479 } else if (kernel_uses_llsc) {
483 __asm__ __volatile__(
485 " lld %0, %1 # atomic64_sub \n"
489 : "=&r" (temp), "=m" (v->counter)
490 : "Ir" (i), "m" (v->counter));
491 } while (unlikely(!temp));
495 raw_local_irq_save(flags);
497 raw_local_irq_restore(flags);
502 * Same as above, but return the result value
504 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
508 smp_mb__before_llsc();
510 if (kernel_uses_llsc && R10000_LLSC_WAR) {
513 __asm__ __volatile__(
515 "1: lld %1, %2 # atomic64_add_return \n"
516 " daddu %0, %1, %3 \n"
519 " daddu %0, %1, %3 \n"
521 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
522 : "Ir" (i), "m" (v->counter)
524 } else if (kernel_uses_llsc) {
528 __asm__ __volatile__(
530 " lld %1, %2 # atomic64_add_return \n"
531 " daddu %0, %1, %3 \n"
534 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
535 : "Ir" (i), "m" (v->counter)
537 } while (unlikely(!result));
543 raw_local_irq_save(flags);
547 raw_local_irq_restore(flags);
555 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
559 smp_mb__before_llsc();
561 if (kernel_uses_llsc && R10000_LLSC_WAR) {
564 __asm__ __volatile__(
566 "1: lld %1, %2 # atomic64_sub_return \n"
567 " dsubu %0, %1, %3 \n"
570 " dsubu %0, %1, %3 \n"
572 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
573 : "Ir" (i), "m" (v->counter)
575 } else if (kernel_uses_llsc) {
579 __asm__ __volatile__(
581 " lld %1, %2 # atomic64_sub_return \n"
582 " dsubu %0, %1, %3 \n"
585 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
586 : "Ir" (i), "m" (v->counter)
588 } while (unlikely(!result));
594 raw_local_irq_save(flags);
598 raw_local_irq_restore(flags);
607 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
608 * @i: integer value to subtract
609 * @v: pointer of type atomic64_t
611 * Atomically test @v and subtract @i if @v is greater or equal than @i.
612 * The function returns the old value of @v minus @i.
614 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
618 smp_mb__before_llsc();
620 if (kernel_uses_llsc && R10000_LLSC_WAR) {
623 __asm__ __volatile__(
625 "1: lld %1, %2 # atomic64_sub_if_positive\n"
626 " dsubu %0, %1, %3 \n"
631 " dsubu %0, %1, %3 \n"
635 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
636 : "Ir" (i), "m" (v->counter)
638 } else if (kernel_uses_llsc) {
641 __asm__ __volatile__(
643 "1: lld %1, %2 # atomic64_sub_if_positive\n"
644 " dsubu %0, %1, %3 \n"
649 " dsubu %0, %1, %3 \n"
653 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
654 : "Ir" (i), "m" (v->counter)
659 raw_local_irq_save(flags);
664 raw_local_irq_restore(flags);
672 #define atomic64_cmpxchg(v, o, n) \
673 ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
674 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
677 * atomic64_add_unless - add unless the number is a given value
678 * @v: pointer of type atomic64_t
679 * @a: the amount to add to v...
680 * @u: ...unless v is equal to u.
682 * Atomically adds @a to @v, so long as it was not @u.
683 * Returns non-zero if @v was not @u, and zero otherwise.
685 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
688 c = atomic64_read(v);
690 if (unlikely(c == (u)))
692 old = atomic64_cmpxchg((v), c, c + (a));
693 if (likely(old == c))
700 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
702 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
703 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
706 * atomic64_sub_and_test - subtract value from variable and test result
707 * @i: integer value to subtract
708 * @v: pointer of type atomic64_t
710 * Atomically subtracts @i from @v and returns
711 * true if the result is zero, or false for all
714 #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
717 * atomic64_inc_and_test - increment and test
718 * @v: pointer of type atomic64_t
720 * Atomically increments @v by 1
721 * and returns true if the result is zero, or false for all
724 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
727 * atomic64_dec_and_test - decrement by 1 and test
728 * @v: pointer of type atomic64_t
730 * Atomically decrements @v by 1 and
731 * returns true if the result is 0, or false for all other
734 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
737 * atomic64_dec_if_positive - decrement by 1 if old value positive
738 * @v: pointer of type atomic64_t
740 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
743 * atomic64_inc - increment atomic variable
744 * @v: pointer of type atomic64_t
746 * Atomically increments @v by 1.
748 #define atomic64_inc(v) atomic64_add(1, (v))
751 * atomic64_dec - decrement and test
752 * @v: pointer of type atomic64_t
754 * Atomically decrements @v by 1.
756 #define atomic64_dec(v) atomic64_sub(1, (v))
759 * atomic64_add_negative - add and test if negative
760 * @v: pointer of type atomic64_t
761 * @i: integer value to add
763 * Atomically adds @i to @v and returns true
764 * if the result is negative, or false when
765 * result is greater than or equal to zero.
767 #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
769 #else /* !CONFIG_64BIT */
771 #include <asm-generic/atomic64.h>
773 #endif /* CONFIG_64BIT */
776 * atomic*_return operations are serializing but not the non-*_return
779 #define smp_mb__before_atomic_dec() smp_mb__before_llsc()
780 #define smp_mb__after_atomic_dec() smp_llsc_mb()
781 #define smp_mb__before_atomic_inc() smp_mb__before_llsc()
782 #define smp_mb__after_atomic_inc() smp_llsc_mb()
784 #include <asm-generic/atomic-long.h>
786 #endif /* _ASM_ATOMIC_H */