1 #ifndef _ASM_POWERPC_ATOMIC_H_
2 #define _ASM_POWERPC_ATOMIC_H_
5 * PowerPC atomic operations
8 typedef struct { volatile int counter; } atomic_t;
11 #include <asm/synch.h>
12 #include <asm/asm-compat.h>
14 #define ATOMIC_INIT(i) { (i) }
16 #define atomic_read(v) ((v)->counter)
17 #define atomic_set(v,i) (((v)->counter) = (i))
19 static __inline__ void atomic_add(int a, atomic_t *v)
24 "1: lwarx %0,0,%3 # atomic_add\n\
29 : "=&r" (t), "=m" (v->counter)
30 : "r" (a), "r" (&v->counter), "m" (v->counter)
34 static __inline__ int atomic_add_return(int a, atomic_t *v)
40 "1: lwarx %0,0,%2 # atomic_add_return\n\
47 : "r" (a), "r" (&v->counter)
53 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
55 static __inline__ void atomic_sub(int a, atomic_t *v)
60 "1: lwarx %0,0,%3 # atomic_sub\n\
65 : "=&r" (t), "=m" (v->counter)
66 : "r" (a), "r" (&v->counter), "m" (v->counter)
70 static __inline__ int atomic_sub_return(int a, atomic_t *v)
76 "1: lwarx %0,0,%2 # atomic_sub_return\n\
83 : "r" (a), "r" (&v->counter)
89 static __inline__ void atomic_inc(atomic_t *v)
94 "1: lwarx %0,0,%2 # atomic_inc\n\
99 : "=&r" (t), "=m" (v->counter)
100 : "r" (&v->counter), "m" (v->counter)
104 static __inline__ int atomic_inc_return(atomic_t *v)
108 __asm__ __volatile__(
110 "1: lwarx %0,0,%1 # atomic_inc_return\n\
124 * atomic_inc_and_test - increment and test
125 * @v: pointer of type atomic_t
127 * Atomically increments @v by 1
128 * and returns true if the result is zero, or false for all
131 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
133 static __inline__ void atomic_dec(atomic_t *v)
137 __asm__ __volatile__(
138 "1: lwarx %0,0,%2 # atomic_dec\n\
143 : "=&r" (t), "=m" (v->counter)
144 : "r" (&v->counter), "m" (v->counter)
148 static __inline__ int atomic_dec_return(atomic_t *v)
152 __asm__ __volatile__(
154 "1: lwarx %0,0,%1 # atomic_dec_return\n\
167 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
169 #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
170 #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
173 * Atomically test *v and decrement if it is greater than 0.
174 * The function returns the old value of *v minus 1.
176 static __inline__ int atomic_dec_if_positive(atomic_t *v)
180 __asm__ __volatile__(
182 "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
197 #define smp_mb__before_atomic_dec() smp_mb()
198 #define smp_mb__after_atomic_dec() smp_mb()
199 #define smp_mb__before_atomic_inc() smp_mb()
200 #define smp_mb__after_atomic_inc() smp_mb()
204 typedef struct { volatile long counter; } atomic64_t;
206 #define ATOMIC64_INIT(i) { (i) }
208 #define atomic64_read(v) ((v)->counter)
209 #define atomic64_set(v,i) (((v)->counter) = (i))
211 static __inline__ void atomic64_add(long a, atomic64_t *v)
215 __asm__ __volatile__(
216 "1: ldarx %0,0,%3 # atomic64_add\n\
220 : "=&r" (t), "=m" (v->counter)
221 : "r" (a), "r" (&v->counter), "m" (v->counter)
225 static __inline__ long atomic64_add_return(long a, atomic64_t *v)
229 __asm__ __volatile__(
231 "1: ldarx %0,0,%2 # atomic64_add_return\n\
237 : "r" (a), "r" (&v->counter)
243 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
245 static __inline__ void atomic64_sub(long a, atomic64_t *v)
249 __asm__ __volatile__(
250 "1: ldarx %0,0,%3 # atomic64_sub\n\
254 : "=&r" (t), "=m" (v->counter)
255 : "r" (a), "r" (&v->counter), "m" (v->counter)
259 static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
263 __asm__ __volatile__(
265 "1: ldarx %0,0,%2 # atomic64_sub_return\n\
271 : "r" (a), "r" (&v->counter)
277 static __inline__ void atomic64_inc(atomic64_t *v)
281 __asm__ __volatile__(
282 "1: ldarx %0,0,%2 # atomic64_inc\n\
286 : "=&r" (t), "=m" (v->counter)
287 : "r" (&v->counter), "m" (v->counter)
291 static __inline__ long atomic64_inc_return(atomic64_t *v)
295 __asm__ __volatile__(
297 "1: ldarx %0,0,%1 # atomic64_inc_return\n\
310 * atomic64_inc_and_test - increment and test
311 * @v: pointer of type atomic64_t
313 * Atomically increments @v by 1
314 * and returns true if the result is zero, or false for all
317 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
319 static __inline__ void atomic64_dec(atomic64_t *v)
323 __asm__ __volatile__(
324 "1: ldarx %0,0,%2 # atomic64_dec\n\
328 : "=&r" (t), "=m" (v->counter)
329 : "r" (&v->counter), "m" (v->counter)
333 static __inline__ long atomic64_dec_return(atomic64_t *v)
337 __asm__ __volatile__(
339 "1: ldarx %0,0,%1 # atomic64_dec_return\n\
351 #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
352 #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
355 * Atomically test *v and decrement if it is greater than 0.
356 * The function returns the old value of *v minus 1.
358 static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
362 __asm__ __volatile__(
364 "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
378 #endif /* __powerpc64__ */
380 #endif /* __KERNEL__ */
381 #endif /* _ASM_POWERPC_ATOMIC_H_ */