1 /* MN10300 Atomic counter operations
3 * Copyright (C) 2007 Red Hat, Inc. All Rights Reserved.
4 * Written by David Howells (dhowells@redhat.com)
6 * This program is free software; you can redistribute it and/or
7 * modify it under the terms of the GNU General Public Licence
8 * as published by the Free Software Foundation; either version
9 * 2 of the Licence, or (at your option) any later version.
14 #include <asm/irqflags.h>
19 #ifdef CONFIG_MN10300_HAS_ATOMIC_OPS_UNIT
21 unsigned long __xchg(volatile unsigned long *m, unsigned long val)
27 "1: mov %4,(_AAR,%3) \n"
28 " mov (_ADR,%3),%1 \n"
29 " mov %5,(_ADR,%3) \n"
30 " mov (_ADR,%3),%0 \n" /* flush */
31 " mov (_ASR,%3),%0 \n"
34 : "=&r"(status), "=&r"(oldval), "=m"(*m)
35 : "a"(ATOMIC_OPS_BASE_ADDR), "r"(m), "r"(val)
41 static inline unsigned long __cmpxchg(volatile unsigned long *m,
42 unsigned long old, unsigned long new)
48 "1: mov %4,(_AAR,%3) \n"
49 " mov (_ADR,%3),%1 \n"
52 " mov %6,(_ADR,%3) \n"
53 "2: mov (_ADR,%3),%0 \n" /* flush */
54 " mov (_ASR,%3),%0 \n"
57 : "=&r"(status), "=&r"(oldval), "=m"(*m)
58 : "a"(ATOMIC_OPS_BASE_ADDR), "r"(m),
64 #else /* CONFIG_MN10300_HAS_ATOMIC_OPS_UNIT */
65 #error "No SMP atomic operation support!"
66 #endif /* CONFIG_MN10300_HAS_ATOMIC_OPS_UNIT */
68 #else /* CONFIG_SMP */
71 * Emulate xchg for non-SMP MN10300
73 struct __xchg_dummy { unsigned long a[100]; };
74 #define __xg(x) ((struct __xchg_dummy *)(x))
77 unsigned long __xchg(volatile unsigned long *m, unsigned long val)
82 flags = arch_local_cli_save();
85 arch_local_irq_restore(flags);
90 * Emulate cmpxchg for non-SMP MN10300
92 static inline unsigned long __cmpxchg(volatile unsigned long *m,
93 unsigned long old, unsigned long new)
98 flags = arch_local_cli_save();
102 arch_local_irq_restore(flags);
106 #endif /* CONFIG_SMP */
108 #define xchg(ptr, v) \
109 ((__typeof__(*(ptr))) __xchg((unsigned long *)(ptr), \
112 #define cmpxchg(ptr, o, n) \
113 ((__typeof__(*(ptr))) __cmpxchg((unsigned long *)(ptr), \
114 (unsigned long)(o), \
117 #define atomic_xchg(ptr, v) (xchg(&(ptr)->counter, (v)))
118 #define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), (old), (new)))
120 #endif /* !__ASSEMBLY__ */
123 #include <asm-generic/atomic.h>
127 * Atomic operations that C can't guarantee us. Useful for
128 * resource counting etc..
131 #define ATOMIC_INIT(i) { (i) }
136 * atomic_read - read atomic variable
137 * @v: pointer of type atomic_t
139 * Atomically reads the value of @v. Note that the guaranteed
140 * useful range of an atomic_t is only 24 bits.
142 #define atomic_read(v) ((v)->counter)
145 * atomic_set - set atomic variable
146 * @v: pointer of type atomic_t
149 * Atomically sets the value of @v to @i. Note that the guaranteed
150 * useful range of an atomic_t is only 24 bits.
152 #define atomic_set(v, i) (((v)->counter) = (i))
155 * atomic_add_return - add integer to atomic variable
156 * @i: integer value to add
157 * @v: pointer of type atomic_t
159 * Atomically adds @i to @v and returns the result
160 * Note that the guaranteed useful range of an atomic_t is only 24 bits.
162 static inline int atomic_add_return(int i, atomic_t *v)
169 "1: mov %4,(_AAR,%3) \n"
170 " mov (_ADR,%3),%1 \n"
172 " mov %1,(_ADR,%3) \n"
173 " mov (_ADR,%3),%0 \n" /* flush */
174 " mov (_ASR,%3),%0 \n"
177 : "=&r"(status), "=&r"(retval), "=m"(v->counter)
178 : "a"(ATOMIC_OPS_BASE_ADDR), "r"(&v->counter), "r"(i)
184 flags = arch_local_cli_save();
188 arch_local_irq_restore(flags);
194 * atomic_sub_return - subtract integer from atomic variable
195 * @i: integer value to subtract
196 * @v: pointer of type atomic_t
198 * Atomically subtracts @i from @v and returns the result
199 * Note that the guaranteed useful range of an atomic_t is only 24 bits.
201 static inline int atomic_sub_return(int i, atomic_t *v)
208 "1: mov %4,(_AAR,%3) \n"
209 " mov (_ADR,%3),%1 \n"
211 " mov %1,(_ADR,%3) \n"
212 " mov (_ADR,%3),%0 \n" /* flush */
213 " mov (_ASR,%3),%0 \n"
216 : "=&r"(status), "=&r"(retval), "=m"(v->counter)
217 : "a"(ATOMIC_OPS_BASE_ADDR), "r"(&v->counter), "r"(i)
222 flags = arch_local_cli_save();
226 arch_local_irq_restore(flags);
231 static inline int atomic_add_negative(int i, atomic_t *v)
233 return atomic_add_return(i, v) < 0;
236 static inline void atomic_add(int i, atomic_t *v)
238 atomic_add_return(i, v);
241 static inline void atomic_sub(int i, atomic_t *v)
243 atomic_sub_return(i, v);
246 static inline void atomic_inc(atomic_t *v)
248 atomic_add_return(1, v);
251 static inline void atomic_dec(atomic_t *v)
253 atomic_sub_return(1, v);
256 #define atomic_dec_return(v) atomic_sub_return(1, (v))
257 #define atomic_inc_return(v) atomic_add_return(1, (v))
259 #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
260 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
261 #define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
263 #define atomic_add_unless(v, a, u) \
266 c = atomic_read(v); \
267 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
272 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
275 * atomic_clear_mask - Atomically clear bits in memory
276 * @mask: Mask of the bits to be cleared
277 * @v: pointer to word in memory
279 * Atomically clears the bits set in mask from the memory word specified.
281 static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr)
287 "1: mov %3,(_AAR,%2) \n"
288 " mov (_ADR,%2),%0 \n"
290 " mov %0,(_ADR,%2) \n"
291 " mov (_ADR,%2),%0 \n" /* flush */
292 " mov (_ASR,%2),%0 \n"
295 : "=&r"(status), "=m"(*addr)
296 : "a"(ATOMIC_OPS_BASE_ADDR), "r"(addr), "r"(~mask)
302 flags = arch_local_cli_save();
304 arch_local_irq_restore(flags);
309 * atomic_set_mask - Atomically set bits in memory
310 * @mask: Mask of the bits to be set
311 * @v: pointer to word in memory
313 * Atomically sets the bits set in mask from the memory word specified.
315 static inline void atomic_set_mask(unsigned long mask, unsigned long *addr)
321 "1: mov %3,(_AAR,%2) \n"
322 " mov (_ADR,%2),%0 \n"
324 " mov %0,(_ADR,%2) \n"
325 " mov (_ADR,%2),%0 \n" /* flush */
326 " mov (_ASR,%2),%0 \n"
329 : "=&r"(status), "=m"(*addr)
330 : "a"(ATOMIC_OPS_BASE_ADDR), "r"(addr), "r"(mask)
335 flags = arch_local_cli_save();
337 arch_local_irq_restore(flags);
341 /* Atomic operations are already serializing on MN10300??? */
342 #define smp_mb__before_atomic_dec() barrier()
343 #define smp_mb__after_atomic_dec() barrier()
344 #define smp_mb__before_atomic_inc() barrier()
345 #define smp_mb__after_atomic_inc() barrier()
347 #include <asm-generic/atomic-long.h>
349 #endif /* __KERNEL__ */
350 #endif /* CONFIG_SMP */
351 #endif /* _ASM_ATOMIC_H */