Pull ec into release branch
[pandora-kernel.git] / include / asm-mips / atomic.h
1 /*
2  * Atomic operations that C can't guarantee us.  Useful for
3  * resource counting etc..
4  *
5  * But use these as seldom as possible since they are much more slower
6  * than regular operations.
7  *
8  * This file is subject to the terms and conditions of the GNU General Public
9  * License.  See the file "COPYING" in the main directory of this archive
10  * for more details.
11  *
12  * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
13  */
14 #ifndef _ASM_ATOMIC_H
15 #define _ASM_ATOMIC_H
16
17 #include <linux/irqflags.h>
18 #include <asm/barrier.h>
19 #include <asm/cpu-features.h>
20 #include <asm/war.h>
21
22 typedef struct { volatile int counter; } atomic_t;
23
24 #define ATOMIC_INIT(i)    { (i) }
25
26 /*
27  * atomic_read - read atomic variable
28  * @v: pointer of type atomic_t
29  *
30  * Atomically reads the value of @v.
31  */
32 #define atomic_read(v)          ((v)->counter)
33
34 /*
35  * atomic_set - set atomic variable
36  * @v: pointer of type atomic_t
37  * @i: required value
38  *
39  * Atomically sets the value of @v to @i.
40  */
41 #define atomic_set(v,i)         ((v)->counter = (i))
42
43 /*
44  * atomic_add - add integer to atomic variable
45  * @i: integer value to add
46  * @v: pointer of type atomic_t
47  *
48  * Atomically adds @i to @v.
49  */
50 static __inline__ void atomic_add(int i, atomic_t * v)
51 {
52         if (cpu_has_llsc && R10000_LLSC_WAR) {
53                 unsigned long temp;
54
55                 __asm__ __volatile__(
56                 "       .set    mips3                                   \n"
57                 "1:     ll      %0, %1          # atomic_add            \n"
58                 "       addu    %0, %2                                  \n"
59                 "       sc      %0, %1                                  \n"
60                 "       beqzl   %0, 1b                                  \n"
61                 "       .set    mips0                                   \n"
62                 : "=&r" (temp), "=m" (v->counter)
63                 : "Ir" (i), "m" (v->counter));
64         } else if (cpu_has_llsc) {
65                 unsigned long temp;
66
67                 __asm__ __volatile__(
68                 "       .set    mips3                                   \n"
69                 "1:     ll      %0, %1          # atomic_add            \n"
70                 "       addu    %0, %2                                  \n"
71                 "       sc      %0, %1                                  \n"
72                 "       beqz    %0, 2f                                  \n"
73                 "       .subsection 2                                   \n"
74                 "2:     b       1b                                      \n"
75                 "       .previous                                       \n"
76                 "       .set    mips0                                   \n"
77                 : "=&r" (temp), "=m" (v->counter)
78                 : "Ir" (i), "m" (v->counter));
79         } else {
80                 unsigned long flags;
81
82                 raw_local_irq_save(flags);
83                 v->counter += i;
84                 raw_local_irq_restore(flags);
85         }
86 }
87
88 /*
89  * atomic_sub - subtract the atomic variable
90  * @i: integer value to subtract
91  * @v: pointer of type atomic_t
92  *
93  * Atomically subtracts @i from @v.
94  */
95 static __inline__ void atomic_sub(int i, atomic_t * v)
96 {
97         if (cpu_has_llsc && R10000_LLSC_WAR) {
98                 unsigned long temp;
99
100                 __asm__ __volatile__(
101                 "       .set    mips3                                   \n"
102                 "1:     ll      %0, %1          # atomic_sub            \n"
103                 "       subu    %0, %2                                  \n"
104                 "       sc      %0, %1                                  \n"
105                 "       beqzl   %0, 1b                                  \n"
106                 "       .set    mips0                                   \n"
107                 : "=&r" (temp), "=m" (v->counter)
108                 : "Ir" (i), "m" (v->counter));
109         } else if (cpu_has_llsc) {
110                 unsigned long temp;
111
112                 __asm__ __volatile__(
113                 "       .set    mips3                                   \n"
114                 "1:     ll      %0, %1          # atomic_sub            \n"
115                 "       subu    %0, %2                                  \n"
116                 "       sc      %0, %1                                  \n"
117                 "       beqz    %0, 2f                                  \n"
118                 "       .subsection 2                                   \n"
119                 "2:     b       1b                                      \n"
120                 "       .previous                                       \n"
121                 "       .set    mips0                                   \n"
122                 : "=&r" (temp), "=m" (v->counter)
123                 : "Ir" (i), "m" (v->counter));
124         } else {
125                 unsigned long flags;
126
127                 raw_local_irq_save(flags);
128                 v->counter -= i;
129                 raw_local_irq_restore(flags);
130         }
131 }
132
133 /*
134  * Same as above, but return the result value
135  */
136 static __inline__ int atomic_add_return(int i, atomic_t * v)
137 {
138         unsigned long result;
139
140         smp_mb();
141
142         if (cpu_has_llsc && R10000_LLSC_WAR) {
143                 unsigned long temp;
144
145                 __asm__ __volatile__(
146                 "       .set    mips3                                   \n"
147                 "1:     ll      %1, %2          # atomic_add_return     \n"
148                 "       addu    %0, %1, %3                              \n"
149                 "       sc      %0, %2                                  \n"
150                 "       beqzl   %0, 1b                                  \n"
151                 "       addu    %0, %1, %3                              \n"
152                 "       .set    mips0                                   \n"
153                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
154                 : "Ir" (i), "m" (v->counter)
155                 : "memory");
156         } else if (cpu_has_llsc) {
157                 unsigned long temp;
158
159                 __asm__ __volatile__(
160                 "       .set    mips3                                   \n"
161                 "1:     ll      %1, %2          # atomic_add_return     \n"
162                 "       addu    %0, %1, %3                              \n"
163                 "       sc      %0, %2                                  \n"
164                 "       beqz    %0, 2f                                  \n"
165                 "       addu    %0, %1, %3                              \n"
166                 "       .subsection 2                                   \n"
167                 "2:     b       1b                                      \n"
168                 "       .previous                                       \n"
169                 "       .set    mips0                                   \n"
170                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
171                 : "Ir" (i), "m" (v->counter)
172                 : "memory");
173         } else {
174                 unsigned long flags;
175
176                 raw_local_irq_save(flags);
177                 result = v->counter;
178                 result += i;
179                 v->counter = result;
180                 raw_local_irq_restore(flags);
181         }
182
183         smp_mb();
184
185         return result;
186 }
187
188 static __inline__ int atomic_sub_return(int i, atomic_t * v)
189 {
190         unsigned long result;
191
192         smp_mb();
193
194         if (cpu_has_llsc && R10000_LLSC_WAR) {
195                 unsigned long temp;
196
197                 __asm__ __volatile__(
198                 "       .set    mips3                                   \n"
199                 "1:     ll      %1, %2          # atomic_sub_return     \n"
200                 "       subu    %0, %1, %3                              \n"
201                 "       sc      %0, %2                                  \n"
202                 "       beqzl   %0, 1b                                  \n"
203                 "       subu    %0, %1, %3                              \n"
204                 "       .set    mips0                                   \n"
205                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
206                 : "Ir" (i), "m" (v->counter)
207                 : "memory");
208         } else if (cpu_has_llsc) {
209                 unsigned long temp;
210
211                 __asm__ __volatile__(
212                 "       .set    mips3                                   \n"
213                 "1:     ll      %1, %2          # atomic_sub_return     \n"
214                 "       subu    %0, %1, %3                              \n"
215                 "       sc      %0, %2                                  \n"
216                 "       beqz    %0, 2f                                  \n"
217                 "       subu    %0, %1, %3                              \n"
218                 "       .subsection 2                                   \n"
219                 "2:     b       1b                                      \n"
220                 "       .previous                                       \n"
221                 "       .set    mips0                                   \n"
222                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
223                 : "Ir" (i), "m" (v->counter)
224                 : "memory");
225         } else {
226                 unsigned long flags;
227
228                 raw_local_irq_save(flags);
229                 result = v->counter;
230                 result -= i;
231                 v->counter = result;
232                 raw_local_irq_restore(flags);
233         }
234
235         smp_mb();
236
237         return result;
238 }
239
240 /*
241  * atomic_sub_if_positive - conditionally subtract integer from atomic variable
242  * @i: integer value to subtract
243  * @v: pointer of type atomic_t
244  *
245  * Atomically test @v and subtract @i if @v is greater or equal than @i.
246  * The function returns the old value of @v minus @i.
247  */
248 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
249 {
250         unsigned long result;
251
252         smp_mb();
253
254         if (cpu_has_llsc && R10000_LLSC_WAR) {
255                 unsigned long temp;
256
257                 __asm__ __volatile__(
258                 "       .set    mips3                                   \n"
259                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
260                 "       subu    %0, %1, %3                              \n"
261                 "       bltz    %0, 1f                                  \n"
262                 "       sc      %0, %2                                  \n"
263                 "       .set    noreorder                               \n"
264                 "       beqzl   %0, 1b                                  \n"
265                 "        subu   %0, %1, %3                              \n"
266                 "       .set    reorder                                 \n"
267                 "1:                                                     \n"
268                 "       .set    mips0                                   \n"
269                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
270                 : "Ir" (i), "m" (v->counter)
271                 : "memory");
272         } else if (cpu_has_llsc) {
273                 unsigned long temp;
274
275                 __asm__ __volatile__(
276                 "       .set    mips3                                   \n"
277                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
278                 "       subu    %0, %1, %3                              \n"
279                 "       bltz    %0, 1f                                  \n"
280                 "       sc      %0, %2                                  \n"
281                 "       .set    noreorder                               \n"
282                 "       beqz    %0, 2f                                  \n"
283                 "        subu   %0, %1, %3                              \n"
284                 "       .set    reorder                                 \n"
285                 "1:                                                     \n"
286                 "       .subsection 2                                   \n"
287                 "2:     b       1b                                      \n"
288                 "       .previous                                       \n"
289                 "       .set    mips0                                   \n"
290                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
291                 : "Ir" (i), "m" (v->counter)
292                 : "memory");
293         } else {
294                 unsigned long flags;
295
296                 raw_local_irq_save(flags);
297                 result = v->counter;
298                 result -= i;
299                 if (result >= 0)
300                         v->counter = result;
301                 raw_local_irq_restore(flags);
302         }
303
304         smp_mb();
305
306         return result;
307 }
308
309 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
310 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
311
312 /**
313  * atomic_add_unless - add unless the number is a given value
314  * @v: pointer of type atomic_t
315  * @a: the amount to add to v...
316  * @u: ...unless v is equal to u.
317  *
318  * Atomically adds @a to @v, so long as it was not @u.
319  * Returns non-zero if @v was not @u, and zero otherwise.
320  */
321 #define atomic_add_unless(v, a, u)                              \
322 ({                                                              \
323         int c, old;                                             \
324         c = atomic_read(v);                                     \
325         while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
326                 c = old;                                        \
327         c != (u);                                               \
328 })
329 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
330
331 #define atomic_dec_return(v) atomic_sub_return(1,(v))
332 #define atomic_inc_return(v) atomic_add_return(1,(v))
333
334 /*
335  * atomic_sub_and_test - subtract value from variable and test result
336  * @i: integer value to subtract
337  * @v: pointer of type atomic_t
338  *
339  * Atomically subtracts @i from @v and returns
340  * true if the result is zero, or false for all
341  * other cases.
342  */
343 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
344
345 /*
346  * atomic_inc_and_test - increment and test
347  * @v: pointer of type atomic_t
348  *
349  * Atomically increments @v by 1
350  * and returns true if the result is zero, or false for all
351  * other cases.
352  */
353 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
354
355 /*
356  * atomic_dec_and_test - decrement by 1 and test
357  * @v: pointer of type atomic_t
358  *
359  * Atomically decrements @v by 1 and
360  * returns true if the result is 0, or false for all other
361  * cases.
362  */
363 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
364
365 /*
366  * atomic_dec_if_positive - decrement by 1 if old value positive
367  * @v: pointer of type atomic_t
368  */
369 #define atomic_dec_if_positive(v)       atomic_sub_if_positive(1, v)
370
371 /*
372  * atomic_inc - increment atomic variable
373  * @v: pointer of type atomic_t
374  *
375  * Atomically increments @v by 1.
376  */
377 #define atomic_inc(v) atomic_add(1,(v))
378
379 /*
380  * atomic_dec - decrement and test
381  * @v: pointer of type atomic_t
382  *
383  * Atomically decrements @v by 1.
384  */
385 #define atomic_dec(v) atomic_sub(1,(v))
386
387 /*
388  * atomic_add_negative - add and test if negative
389  * @v: pointer of type atomic_t
390  * @i: integer value to add
391  *
392  * Atomically adds @i to @v and returns true
393  * if the result is negative, or false when
394  * result is greater than or equal to zero.
395  */
396 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
397
398 #ifdef CONFIG_64BIT
399
400 typedef struct { volatile long counter; } atomic64_t;
401
402 #define ATOMIC64_INIT(i)    { (i) }
403
404 /*
405  * atomic64_read - read atomic variable
406  * @v: pointer of type atomic64_t
407  *
408  */
409 #define atomic64_read(v)        ((v)->counter)
410
411 /*
412  * atomic64_set - set atomic variable
413  * @v: pointer of type atomic64_t
414  * @i: required value
415  */
416 #define atomic64_set(v,i)       ((v)->counter = (i))
417
418 /*
419  * atomic64_add - add integer to atomic variable
420  * @i: integer value to add
421  * @v: pointer of type atomic64_t
422  *
423  * Atomically adds @i to @v.
424  */
425 static __inline__ void atomic64_add(long i, atomic64_t * v)
426 {
427         if (cpu_has_llsc && R10000_LLSC_WAR) {
428                 unsigned long temp;
429
430                 __asm__ __volatile__(
431                 "       .set    mips3                                   \n"
432                 "1:     lld     %0, %1          # atomic64_add          \n"
433                 "       addu    %0, %2                                  \n"
434                 "       scd     %0, %1                                  \n"
435                 "       beqzl   %0, 1b                                  \n"
436                 "       .set    mips0                                   \n"
437                 : "=&r" (temp), "=m" (v->counter)
438                 : "Ir" (i), "m" (v->counter));
439         } else if (cpu_has_llsc) {
440                 unsigned long temp;
441
442                 __asm__ __volatile__(
443                 "       .set    mips3                                   \n"
444                 "1:     lld     %0, %1          # atomic64_add          \n"
445                 "       addu    %0, %2                                  \n"
446                 "       scd     %0, %1                                  \n"
447                 "       beqz    %0, 2f                                  \n"
448                 "       .subsection 2                                   \n"
449                 "2:     b       1b                                      \n"
450                 "       .previous                                       \n"
451                 "       .set    mips0                                   \n"
452                 : "=&r" (temp), "=m" (v->counter)
453                 : "Ir" (i), "m" (v->counter));
454         } else {
455                 unsigned long flags;
456
457                 raw_local_irq_save(flags);
458                 v->counter += i;
459                 raw_local_irq_restore(flags);
460         }
461 }
462
463 /*
464  * atomic64_sub - subtract the atomic variable
465  * @i: integer value to subtract
466  * @v: pointer of type atomic64_t
467  *
468  * Atomically subtracts @i from @v.
469  */
470 static __inline__ void atomic64_sub(long i, atomic64_t * v)
471 {
472         if (cpu_has_llsc && R10000_LLSC_WAR) {
473                 unsigned long temp;
474
475                 __asm__ __volatile__(
476                 "       .set    mips3                                   \n"
477                 "1:     lld     %0, %1          # atomic64_sub          \n"
478                 "       subu    %0, %2                                  \n"
479                 "       scd     %0, %1                                  \n"
480                 "       beqzl   %0, 1b                                  \n"
481                 "       .set    mips0                                   \n"
482                 : "=&r" (temp), "=m" (v->counter)
483                 : "Ir" (i), "m" (v->counter));
484         } else if (cpu_has_llsc) {
485                 unsigned long temp;
486
487                 __asm__ __volatile__(
488                 "       .set    mips3                                   \n"
489                 "1:     lld     %0, %1          # atomic64_sub          \n"
490                 "       subu    %0, %2                                  \n"
491                 "       scd     %0, %1                                  \n"
492                 "       beqz    %0, 2f                                  \n"
493                 "       .subsection 2                                   \n"
494                 "2:     b       1b                                      \n"
495                 "       .previous                                       \n"
496                 "       .set    mips0                                   \n"
497                 : "=&r" (temp), "=m" (v->counter)
498                 : "Ir" (i), "m" (v->counter));
499         } else {
500                 unsigned long flags;
501
502                 raw_local_irq_save(flags);
503                 v->counter -= i;
504                 raw_local_irq_restore(flags);
505         }
506 }
507
508 /*
509  * Same as above, but return the result value
510  */
511 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
512 {
513         unsigned long result;
514
515         smp_mb();
516
517         if (cpu_has_llsc && R10000_LLSC_WAR) {
518                 unsigned long temp;
519
520                 __asm__ __volatile__(
521                 "       .set    mips3                                   \n"
522                 "1:     lld     %1, %2          # atomic64_add_return   \n"
523                 "       addu    %0, %1, %3                              \n"
524                 "       scd     %0, %2                                  \n"
525                 "       beqzl   %0, 1b                                  \n"
526                 "       addu    %0, %1, %3                              \n"
527                 "       .set    mips0                                   \n"
528                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
529                 : "Ir" (i), "m" (v->counter)
530                 : "memory");
531         } else if (cpu_has_llsc) {
532                 unsigned long temp;
533
534                 __asm__ __volatile__(
535                 "       .set    mips3                                   \n"
536                 "1:     lld     %1, %2          # atomic64_add_return   \n"
537                 "       addu    %0, %1, %3                              \n"
538                 "       scd     %0, %2                                  \n"
539                 "       beqz    %0, 2f                                  \n"
540                 "       addu    %0, %1, %3                              \n"
541                 "       .subsection 2                                   \n"
542                 "2:     b       1b                                      \n"
543                 "       .previous                                       \n"
544                 "       .set    mips0                                   \n"
545                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
546                 : "Ir" (i), "m" (v->counter)
547                 : "memory");
548         } else {
549                 unsigned long flags;
550
551                 raw_local_irq_save(flags);
552                 result = v->counter;
553                 result += i;
554                 v->counter = result;
555                 raw_local_irq_restore(flags);
556         }
557
558         smp_mb();
559
560         return result;
561 }
562
563 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
564 {
565         unsigned long result;
566
567         smp_mb();
568
569         if (cpu_has_llsc && R10000_LLSC_WAR) {
570                 unsigned long temp;
571
572                 __asm__ __volatile__(
573                 "       .set    mips3                                   \n"
574                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
575                 "       subu    %0, %1, %3                              \n"
576                 "       scd     %0, %2                                  \n"
577                 "       beqzl   %0, 1b                                  \n"
578                 "       subu    %0, %1, %3                              \n"
579                 "       .set    mips0                                   \n"
580                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
581                 : "Ir" (i), "m" (v->counter)
582                 : "memory");
583         } else if (cpu_has_llsc) {
584                 unsigned long temp;
585
586                 __asm__ __volatile__(
587                 "       .set    mips3                                   \n"
588                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
589                 "       subu    %0, %1, %3                              \n"
590                 "       scd     %0, %2                                  \n"
591                 "       beqz    %0, 2f                                  \n"
592                 "       subu    %0, %1, %3                              \n"
593                 "       .subsection 2                                   \n"
594                 "2:     b       1b                                      \n"
595                 "       .previous                                       \n"
596                 "       .set    mips0                                   \n"
597                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
598                 : "Ir" (i), "m" (v->counter)
599                 : "memory");
600         } else {
601                 unsigned long flags;
602
603                 raw_local_irq_save(flags);
604                 result = v->counter;
605                 result -= i;
606                 v->counter = result;
607                 raw_local_irq_restore(flags);
608         }
609
610         smp_mb();
611
612         return result;
613 }
614
615 /*
616  * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
617  * @i: integer value to subtract
618  * @v: pointer of type atomic64_t
619  *
620  * Atomically test @v and subtract @i if @v is greater or equal than @i.
621  * The function returns the old value of @v minus @i.
622  */
623 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
624 {
625         unsigned long result;
626
627         smp_mb();
628
629         if (cpu_has_llsc && R10000_LLSC_WAR) {
630                 unsigned long temp;
631
632                 __asm__ __volatile__(
633                 "       .set    mips3                                   \n"
634                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
635                 "       dsubu   %0, %1, %3                              \n"
636                 "       bltz    %0, 1f                                  \n"
637                 "       scd     %0, %2                                  \n"
638                 "       .set    noreorder                               \n"
639                 "       beqzl   %0, 1b                                  \n"
640                 "        dsubu  %0, %1, %3                              \n"
641                 "       .set    reorder                                 \n"
642                 "1:                                                     \n"
643                 "       .set    mips0                                   \n"
644                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
645                 : "Ir" (i), "m" (v->counter)
646                 : "memory");
647         } else if (cpu_has_llsc) {
648                 unsigned long temp;
649
650                 __asm__ __volatile__(
651                 "       .set    mips3                                   \n"
652                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
653                 "       dsubu   %0, %1, %3                              \n"
654                 "       bltz    %0, 1f                                  \n"
655                 "       scd     %0, %2                                  \n"
656                 "       .set    noreorder                               \n"
657                 "       beqz    %0, 2f                                  \n"
658                 "        dsubu  %0, %1, %3                              \n"
659                 "       .set    reorder                                 \n"
660                 "1:                                                     \n"
661                 "       .subsection 2                                   \n"
662                 "2:     b       1b                                      \n"
663                 "       .previous                                       \n"
664                 "       .set    mips0                                   \n"
665                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
666                 : "Ir" (i), "m" (v->counter)
667                 : "memory");
668         } else {
669                 unsigned long flags;
670
671                 raw_local_irq_save(flags);
672                 result = v->counter;
673                 result -= i;
674                 if (result >= 0)
675                         v->counter = result;
676                 raw_local_irq_restore(flags);
677         }
678
679         smp_mb();
680
681         return result;
682 }
683
684 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
685 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
686
687 /*
688  * atomic64_sub_and_test - subtract value from variable and test result
689  * @i: integer value to subtract
690  * @v: pointer of type atomic64_t
691  *
692  * Atomically subtracts @i from @v and returns
693  * true if the result is zero, or false for all
694  * other cases.
695  */
696 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
697
698 /*
699  * atomic64_inc_and_test - increment and test
700  * @v: pointer of type atomic64_t
701  *
702  * Atomically increments @v by 1
703  * and returns true if the result is zero, or false for all
704  * other cases.
705  */
706 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
707
708 /*
709  * atomic64_dec_and_test - decrement by 1 and test
710  * @v: pointer of type atomic64_t
711  *
712  * Atomically decrements @v by 1 and
713  * returns true if the result is 0, or false for all other
714  * cases.
715  */
716 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
717
718 /*
719  * atomic64_dec_if_positive - decrement by 1 if old value positive
720  * @v: pointer of type atomic64_t
721  */
722 #define atomic64_dec_if_positive(v)     atomic64_sub_if_positive(1, v)
723
724 /*
725  * atomic64_inc - increment atomic variable
726  * @v: pointer of type atomic64_t
727  *
728  * Atomically increments @v by 1.
729  */
730 #define atomic64_inc(v) atomic64_add(1,(v))
731
732 /*
733  * atomic64_dec - decrement and test
734  * @v: pointer of type atomic64_t
735  *
736  * Atomically decrements @v by 1.
737  */
738 #define atomic64_dec(v) atomic64_sub(1,(v))
739
740 /*
741  * atomic64_add_negative - add and test if negative
742  * @v: pointer of type atomic64_t
743  * @i: integer value to add
744  *
745  * Atomically adds @i to @v and returns true
746  * if the result is negative, or false when
747  * result is greater than or equal to zero.
748  */
749 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
750
751 #endif /* CONFIG_64BIT */
752
753 /*
754  * atomic*_return operations are serializing but not the non-*_return
755  * versions.
756  */
757 #define smp_mb__before_atomic_dec()     smp_mb()
758 #define smp_mb__after_atomic_dec()      smp_mb()
759 #define smp_mb__before_atomic_inc()     smp_mb()
760 #define smp_mb__after_atomic_inc()      smp_mb()
761
762 #include <asm-generic/atomic.h>
763 #endif /* _ASM_ATOMIC_H */