1 #ifndef _ASM_X86_ATOMIC_32_H
2 #define _ASM_X86_ATOMIC_32_H
4 #include <linux/compiler.h>
5 #include <linux/types.h>
6 #include <asm/processor.h>
7 #include <asm/cmpxchg.h>
10 * Atomic operations that C can't guarantee us. Useful for
11 * resource counting etc..
14 #define ATOMIC_INIT(i) { (i) }
17 * atomic_read - read atomic variable
18 * @v: pointer of type atomic_t
20 * Atomically reads the value of @v.
22 #define atomic_read(v) ((v)->counter)
25 * atomic_set - set atomic variable
26 * @v: pointer of type atomic_t
29 * Atomically sets the value of @v to @i.
31 #define atomic_set(v, i) (((v)->counter) = (i))
34 * atomic_add - add integer to atomic variable
35 * @i: integer value to add
36 * @v: pointer of type atomic_t
38 * Atomically adds @i to @v.
40 static inline void atomic_add(int i, atomic_t *v)
42 asm volatile(LOCK_PREFIX "addl %1,%0"
48 * atomic_sub - subtract integer from atomic variable
49 * @i: integer value to subtract
50 * @v: pointer of type atomic_t
52 * Atomically subtracts @i from @v.
54 static inline void atomic_sub(int i, atomic_t *v)
56 asm volatile(LOCK_PREFIX "subl %1,%0"
62 * atomic_sub_and_test - subtract value from variable and test result
63 * @i: integer value to subtract
64 * @v: pointer of type atomic_t
66 * Atomically subtracts @i from @v and returns
67 * true if the result is zero, or false for all
70 static inline int atomic_sub_and_test(int i, atomic_t *v)
74 asm volatile(LOCK_PREFIX "subl %2,%0; sete %1"
75 : "+m" (v->counter), "=qm" (c)
76 : "ir" (i) : "memory");
81 * atomic_inc - increment atomic variable
82 * @v: pointer of type atomic_t
84 * Atomically increments @v by 1.
86 static inline void atomic_inc(atomic_t *v)
88 asm volatile(LOCK_PREFIX "incl %0"
93 * atomic_dec - decrement atomic variable
94 * @v: pointer of type atomic_t
96 * Atomically decrements @v by 1.
98 static inline void atomic_dec(atomic_t *v)
100 asm volatile(LOCK_PREFIX "decl %0"
101 : "+m" (v->counter));
105 * atomic_dec_and_test - decrement and test
106 * @v: pointer of type atomic_t
108 * Atomically decrements @v by 1 and
109 * returns true if the result is 0, or false for all other
112 static inline int atomic_dec_and_test(atomic_t *v)
116 asm volatile(LOCK_PREFIX "decl %0; sete %1"
117 : "+m" (v->counter), "=qm" (c)
123 * atomic_inc_and_test - increment and test
124 * @v: pointer of type atomic_t
126 * Atomically increments @v by 1
127 * and returns true if the result is zero, or false for all
130 static inline int atomic_inc_and_test(atomic_t *v)
134 asm volatile(LOCK_PREFIX "incl %0; sete %1"
135 : "+m" (v->counter), "=qm" (c)
141 * atomic_add_negative - add and test if negative
142 * @v: pointer of type atomic_t
143 * @i: integer value to add
145 * Atomically adds @i to @v and returns true
146 * if the result is negative, or false when
147 * result is greater than or equal to zero.
149 static inline int atomic_add_negative(int i, atomic_t *v)
153 asm volatile(LOCK_PREFIX "addl %2,%0; sets %1"
154 : "+m" (v->counter), "=qm" (c)
155 : "ir" (i) : "memory");
160 * atomic_add_return - add integer and return
161 * @v: pointer of type atomic_t
162 * @i: integer value to add
164 * Atomically adds @i to @v and returns @i + @v
166 static inline int atomic_add_return(int i, atomic_t *v)
171 if (unlikely(boot_cpu_data.x86 <= 3))
174 /* Modern 486+ processor */
176 asm volatile(LOCK_PREFIX "xaddl %0, %1"
177 : "+r" (i), "+m" (v->counter)
182 no_xadd: /* Legacy 386 processor */
183 local_irq_save(flags);
184 __i = atomic_read(v);
185 atomic_set(v, i + __i);
186 local_irq_restore(flags);
192 * atomic_sub_return - subtract integer and return
193 * @v: pointer of type atomic_t
194 * @i: integer value to subtract
196 * Atomically subtracts @i from @v and returns @v - @i
198 static inline int atomic_sub_return(int i, atomic_t *v)
200 return atomic_add_return(-i, v);
203 #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
204 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
207 * atomic_add_unless - add unless the number is already a given value
208 * @v: pointer of type atomic_t
209 * @a: the amount to add to v...
210 * @u: ...unless v is equal to u.
212 * Atomically adds @a to @v, so long as @v was not already @u.
213 * Returns non-zero if @v was not @u, and zero otherwise.
215 static inline int atomic_add_unless(atomic_t *v, int a, int u)
220 if (unlikely(c == (u)))
222 old = atomic_cmpxchg((v), c, c + (a));
223 if (likely(old == c))
230 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
232 #define atomic_inc_return(v) (atomic_add_return(1, v))
233 #define atomic_dec_return(v) (atomic_sub_return(1, v))
235 /* These are x86-specific, used by some header files */
236 #define atomic_clear_mask(mask, addr) \
237 asm volatile(LOCK_PREFIX "andl %0,%1" \
238 : : "r" (~(mask)), "m" (*(addr)) : "memory")
240 #define atomic_set_mask(mask, addr) \
241 asm volatile(LOCK_PREFIX "orl %0,%1" \
242 : : "r" (mask), "m" (*(addr)) : "memory")
244 /* Atomic operations are already serializing on x86 */
245 #define smp_mb__before_atomic_dec() barrier()
246 #define smp_mb__after_atomic_dec() barrier()
247 #define smp_mb__before_atomic_inc() barrier()
248 #define smp_mb__after_atomic_inc() barrier()
250 /* An 64bit atomic type */
253 unsigned long long counter;
256 #define ATOMIC64_INIT(val) { (val) }
259 * atomic64_read - read atomic64 variable
260 * @v: pointer of type atomic64_t
262 * Atomically reads the value of @v.
263 * Doesn't imply a read memory barrier.
265 #define __atomic64_read(ptr) ((ptr)->counter)
267 static inline unsigned long long
268 cmpxchg8b(unsigned long long *ptr, unsigned long long old, unsigned long long new)
272 LOCK_PREFIX "cmpxchg8b (%[ptr])\n"
286 static inline unsigned long long
287 atomic64_cmpxchg(atomic64_t *ptr, unsigned long long old_val,
288 unsigned long long new_val)
290 return cmpxchg8b(&ptr->counter, old_val, new_val);
294 * atomic64_xchg - xchg atomic64 variable
295 * @ptr: pointer to type atomic64_t
296 * @new_val: value to assign
297 * @old_val: old value that was there
299 * Atomically xchgs the value of @ptr to @new_val and returns
303 static inline unsigned long long
304 atomic64_xchg(atomic64_t *ptr, unsigned long long new_val)
306 unsigned long long old_val;
309 old_val = atomic_read(ptr);
310 } while (atomic64_cmpxchg(ptr, old_val, new_val) != old_val);
316 * atomic64_set - set atomic64 variable
317 * @ptr: pointer to type atomic64_t
318 * @new_val: value to assign
320 * Atomically sets the value of @ptr to @new_val.
322 static inline void atomic64_set(atomic64_t *ptr, unsigned long long new_val)
324 atomic64_xchg(ptr, new_val);
328 * atomic64_read - read atomic64 variable
329 * @ptr: pointer to type atomic64_t
331 * Atomically reads the value of @ptr and returns it.
333 static inline unsigned long long atomic64_read(atomic64_t *ptr)
335 unsigned long long curr_val;
338 curr_val = __atomic64_read(ptr);
339 } while (atomic64_cmpxchg(ptr, curr_val, curr_val) != curr_val);
345 * atomic64_add_return - add and return
346 * @delta: integer value to add
347 * @ptr: pointer to type atomic64_t
349 * Atomically adds @delta to @ptr and returns @delta + *@ptr
351 static inline unsigned long long
352 atomic64_add_return(unsigned long long delta, atomic64_t *ptr)
354 unsigned long long old_val, new_val;
357 old_val = atomic_read(ptr);
358 new_val = old_val + delta;
360 } while (atomic64_cmpxchg(ptr, old_val, new_val) != old_val);
365 static inline long atomic64_sub_return(unsigned long long delta, atomic64_t *ptr)
367 return atomic64_add_return(-delta, ptr);
370 static inline long atomic64_inc_return(atomic64_t *ptr)
372 return atomic64_add_return(1, ptr);
375 static inline long atomic64_dec_return(atomic64_t *ptr)
377 return atomic64_sub_return(1, ptr);
381 * atomic64_add - add integer to atomic64 variable
382 * @delta: integer value to add
383 * @ptr: pointer to type atomic64_t
385 * Atomically adds @delta to @ptr.
387 static inline void atomic64_add(unsigned long long delta, atomic64_t *ptr)
389 atomic64_add_return(delta, ptr);
393 * atomic64_sub - subtract the atomic64 variable
394 * @delta: integer value to subtract
395 * @ptr: pointer to type atomic64_t
397 * Atomically subtracts @delta from @ptr.
399 static inline void atomic64_sub(unsigned long long delta, atomic64_t *ptr)
401 atomic64_add(-delta, ptr);
405 * atomic64_sub_and_test - subtract value from variable and test result
406 * @delta: integer value to subtract
407 * @ptr: pointer to type atomic64_t
409 * Atomically subtracts @delta from @ptr and returns
410 * true if the result is zero, or false for all
414 atomic64_sub_and_test(unsigned long long delta, atomic64_t *ptr)
416 unsigned long long old_val = atomic64_sub_return(delta, ptr);
422 * atomic64_inc - increment atomic64 variable
423 * @ptr: pointer to type atomic64_t
425 * Atomically increments @ptr by 1.
427 static inline void atomic64_inc(atomic64_t *ptr)
429 atomic64_add(1, ptr);
433 * atomic64_dec - decrement atomic64 variable
434 * @ptr: pointer to type atomic64_t
436 * Atomically decrements @ptr by 1.
438 static inline void atomic64_dec(atomic64_t *ptr)
440 atomic64_sub(1, ptr);
444 * atomic64_dec_and_test - decrement and test
445 * @ptr: pointer to type atomic64_t
447 * Atomically decrements @ptr by 1 and
448 * returns true if the result is 0, or false for all other
451 static inline int atomic64_dec_and_test(atomic64_t *ptr)
453 return atomic64_sub_and_test(1, ptr);
457 * atomic64_inc_and_test - increment and test
458 * @ptr: pointer to type atomic64_t
460 * Atomically increments @ptr by 1
461 * and returns true if the result is zero, or false for all
464 static inline int atomic64_inc_and_test(atomic64_t *ptr)
466 return atomic64_sub_and_test(-1, ptr);
470 * atomic64_add_negative - add and test if negative
471 * @delta: integer value to add
472 * @ptr: pointer to type atomic64_t
474 * Atomically adds @delta to @ptr and returns true
475 * if the result is negative, or false when
476 * result is greater than or equal to zero.
479 atomic64_add_negative(unsigned long long delta, atomic64_t *ptr)
481 long long old_val = atomic64_add_return(delta, ptr);
486 #include <asm-generic/atomic-long.h>
487 #endif /* _ASM_X86_ATOMIC_32_H */