1 #ifndef _ALPHA_ATOMIC_H
2 #define _ALPHA_ATOMIC_H
4 #include <linux/types.h>
5 #include <asm/barrier.h>
6 #include <asm/system.h>
9 * Atomic operations that C can't guarantee us. Useful for
10 * resource counting etc...
12 * But use these as seldom as possible since they are much slower
13 * than regular operations.
17 #define ATOMIC_INIT(i) ( (atomic_t) { (i) } )
18 #define ATOMIC64_INIT(i) ( (atomic64_t) { (i) } )
20 #define atomic_read(v) (*(volatile int *)&(v)->counter)
21 #define atomic64_read(v) (*(volatile long *)&(v)->counter)
23 #define atomic_set(v,i) ((v)->counter = (i))
24 #define atomic64_set(v,i) ((v)->counter = (i))
27 * To get proper branch prediction for the main line, we must branch
28 * forward to code at the end of this object's .text section, then
29 * branch back to restart the operation.
32 static __inline__ void atomic_add(int i, atomic_t * v)
43 :"=&r" (temp), "=m" (v->counter)
44 :"Ir" (i), "m" (v->counter));
47 static __inline__ void atomic64_add(long i, atomic64_t * v)
58 :"=&r" (temp), "=m" (v->counter)
59 :"Ir" (i), "m" (v->counter));
62 static __inline__ void atomic_sub(int i, atomic_t * v)
73 :"=&r" (temp), "=m" (v->counter)
74 :"Ir" (i), "m" (v->counter));
77 static __inline__ void atomic64_sub(long i, atomic64_t * v)
88 :"=&r" (temp), "=m" (v->counter)
89 :"Ir" (i), "m" (v->counter));
94 * Same as above, but return the result value
96 static inline int atomic_add_return(int i, atomic_t *v)
100 __asm__ __volatile__(
109 :"=&r" (temp), "=m" (v->counter), "=&r" (result)
110 :"Ir" (i), "m" (v->counter) : "memory");
115 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
119 __asm__ __volatile__(
128 :"=&r" (temp), "=m" (v->counter), "=&r" (result)
129 :"Ir" (i), "m" (v->counter) : "memory");
134 static __inline__ long atomic_sub_return(int i, atomic_t * v)
138 __asm__ __volatile__(
147 :"=&r" (temp), "=m" (v->counter), "=&r" (result)
148 :"Ir" (i), "m" (v->counter) : "memory");
153 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
157 __asm__ __volatile__(
166 :"=&r" (temp), "=m" (v->counter), "=&r" (result)
167 :"Ir" (i), "m" (v->counter) : "memory");
172 #define atomic64_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
173 #define atomic64_xchg(v, new) (xchg(&((v)->counter), new))
175 #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new))
176 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
179 * __atomic_add_unless - add unless the number is a given value
180 * @v: pointer of type atomic_t
181 * @a: the amount to add to v...
182 * @u: ...unless v is equal to u.
184 * Atomically adds @a to @v, so long as it was not @u.
185 * Returns the old value of @v.
187 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
192 if (unlikely(c == (u)))
194 old = atomic_cmpxchg((v), c, c + (a));
195 if (likely(old == c))
204 * atomic64_add_unless - add unless the number is a given value
205 * @v: pointer of type atomic64_t
206 * @a: the amount to add to v...
207 * @u: ...unless v is equal to u.
209 * Atomically adds @a to @v, so long as it was not @u.
210 * Returns the old value of @v.
212 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
215 c = atomic64_read(v);
217 if (unlikely(c == (u)))
219 old = atomic64_cmpxchg((v), c, c + (a));
220 if (likely(old == c))
227 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
229 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
230 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
232 #define atomic_dec_return(v) atomic_sub_return(1,(v))
233 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
235 #define atomic_inc_return(v) atomic_add_return(1,(v))
236 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
238 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
239 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
241 #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
242 #define atomic64_inc_and_test(v) (atomic64_add_return(1, (v)) == 0)
244 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
245 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
247 #define atomic_inc(v) atomic_add(1,(v))
248 #define atomic64_inc(v) atomic64_add(1,(v))
250 #define atomic_dec(v) atomic_sub(1,(v))
251 #define atomic64_dec(v) atomic64_sub(1,(v))
253 #define smp_mb__before_atomic_dec() smp_mb()
254 #define smp_mb__after_atomic_dec() smp_mb()
255 #define smp_mb__before_atomic_inc() smp_mb()
256 #define smp_mb__after_atomic_inc() smp_mb()
258 #endif /* _ALPHA_ATOMIC_H */