Merge branch 'next-i2c' of git://git.fluff.org/bjdooks/linux
[pandora-kernel.git] / arch / mips / include / asm / atomic.h
1 /*
2  * Atomic operations that C can't guarantee us.  Useful for
3  * resource counting etc..
4  *
5  * But use these as seldom as possible since they are much more slower
6  * than regular operations.
7  *
8  * This file is subject to the terms and conditions of the GNU General Public
9  * License.  See the file "COPYING" in the main directory of this archive
10  * for more details.
11  *
12  * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
13  */
14 #ifndef _ASM_ATOMIC_H
15 #define _ASM_ATOMIC_H
16
17 #include <linux/irqflags.h>
18 #include <linux/types.h>
19 #include <asm/barrier.h>
20 #include <asm/cpu-features.h>
21 #include <asm/war.h>
22 #include <asm/system.h>
23
24 #define ATOMIC_INIT(i)    { (i) }
25
26 /*
27  * atomic_read - read atomic variable
28  * @v: pointer of type atomic_t
29  *
30  * Atomically reads the value of @v.
31  */
32 #define atomic_read(v)          (*(volatile int *)&(v)->counter)
33
34 /*
35  * atomic_set - set atomic variable
36  * @v: pointer of type atomic_t
37  * @i: required value
38  *
39  * Atomically sets the value of @v to @i.
40  */
41 #define atomic_set(v, i)                ((v)->counter = (i))
42
43 /*
44  * atomic_add - add integer to atomic variable
45  * @i: integer value to add
46  * @v: pointer of type atomic_t
47  *
48  * Atomically adds @i to @v.
49  */
50 static __inline__ void atomic_add(int i, atomic_t * v)
51 {
52         if (kernel_uses_llsc && R10000_LLSC_WAR) {
53                 int temp;
54
55                 __asm__ __volatile__(
56                 "       .set    mips3                                   \n"
57                 "1:     ll      %0, %1          # atomic_add            \n"
58                 "       addu    %0, %2                                  \n"
59                 "       sc      %0, %1                                  \n"
60                 "       beqzl   %0, 1b                                  \n"
61                 "       .set    mips0                                   \n"
62                 : "=&r" (temp), "=m" (v->counter)
63                 : "Ir" (i), "m" (v->counter));
64         } else if (kernel_uses_llsc) {
65                 int temp;
66
67                 do {
68                         __asm__ __volatile__(
69                         "       .set    mips3                           \n"
70                         "       ll      %0, %1          # atomic_add    \n"
71                         "       addu    %0, %2                          \n"
72                         "       sc      %0, %1                          \n"
73                         "       .set    mips0                           \n"
74                         : "=&r" (temp), "=m" (v->counter)
75                         : "Ir" (i), "m" (v->counter));
76                 } while (unlikely(!temp));
77         } else {
78                 unsigned long flags;
79
80                 raw_local_irq_save(flags);
81                 v->counter += i;
82                 raw_local_irq_restore(flags);
83         }
84 }
85
86 /*
87  * atomic_sub - subtract the atomic variable
88  * @i: integer value to subtract
89  * @v: pointer of type atomic_t
90  *
91  * Atomically subtracts @i from @v.
92  */
93 static __inline__ void atomic_sub(int i, atomic_t * v)
94 {
95         if (kernel_uses_llsc && R10000_LLSC_WAR) {
96                 int temp;
97
98                 __asm__ __volatile__(
99                 "       .set    mips3                                   \n"
100                 "1:     ll      %0, %1          # atomic_sub            \n"
101                 "       subu    %0, %2                                  \n"
102                 "       sc      %0, %1                                  \n"
103                 "       beqzl   %0, 1b                                  \n"
104                 "       .set    mips0                                   \n"
105                 : "=&r" (temp), "=m" (v->counter)
106                 : "Ir" (i), "m" (v->counter));
107         } else if (kernel_uses_llsc) {
108                 int temp;
109
110                 do {
111                         __asm__ __volatile__(
112                         "       .set    mips3                           \n"
113                         "       ll      %0, %1          # atomic_sub    \n"
114                         "       subu    %0, %2                          \n"
115                         "       sc      %0, %1                          \n"
116                         "       .set    mips0                           \n"
117                         : "=&r" (temp), "=m" (v->counter)
118                         : "Ir" (i), "m" (v->counter));
119                 } while (unlikely(!temp));
120         } else {
121                 unsigned long flags;
122
123                 raw_local_irq_save(flags);
124                 v->counter -= i;
125                 raw_local_irq_restore(flags);
126         }
127 }
128
129 /*
130  * Same as above, but return the result value
131  */
132 static __inline__ int atomic_add_return(int i, atomic_t * v)
133 {
134         int result;
135
136         smp_mb__before_llsc();
137
138         if (kernel_uses_llsc && R10000_LLSC_WAR) {
139                 int temp;
140
141                 __asm__ __volatile__(
142                 "       .set    mips3                                   \n"
143                 "1:     ll      %1, %2          # atomic_add_return     \n"
144                 "       addu    %0, %1, %3                              \n"
145                 "       sc      %0, %2                                  \n"
146                 "       beqzl   %0, 1b                                  \n"
147                 "       addu    %0, %1, %3                              \n"
148                 "       .set    mips0                                   \n"
149                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
150                 : "Ir" (i), "m" (v->counter)
151                 : "memory");
152         } else if (kernel_uses_llsc) {
153                 int temp;
154
155                 do {
156                         __asm__ __volatile__(
157                         "       .set    mips3                           \n"
158                         "       ll      %1, %2  # atomic_add_return     \n"
159                         "       addu    %0, %1, %3                      \n"
160                         "       sc      %0, %2                          \n"
161                         "       .set    mips0                           \n"
162                         : "=&r" (result), "=&r" (temp), "=m" (v->counter)
163                         : "Ir" (i), "m" (v->counter)
164                         : "memory");
165                 } while (unlikely(!result));
166
167                 result = temp + i;
168         } else {
169                 unsigned long flags;
170
171                 raw_local_irq_save(flags);
172                 result = v->counter;
173                 result += i;
174                 v->counter = result;
175                 raw_local_irq_restore(flags);
176         }
177
178         smp_llsc_mb();
179
180         return result;
181 }
182
183 static __inline__ int atomic_sub_return(int i, atomic_t * v)
184 {
185         int result;
186
187         smp_mb__before_llsc();
188
189         if (kernel_uses_llsc && R10000_LLSC_WAR) {
190                 int temp;
191
192                 __asm__ __volatile__(
193                 "       .set    mips3                                   \n"
194                 "1:     ll      %1, %2          # atomic_sub_return     \n"
195                 "       subu    %0, %1, %3                              \n"
196                 "       sc      %0, %2                                  \n"
197                 "       beqzl   %0, 1b                                  \n"
198                 "       subu    %0, %1, %3                              \n"
199                 "       .set    mips0                                   \n"
200                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
201                 : "Ir" (i), "m" (v->counter)
202                 : "memory");
203
204                 result = temp - i;
205         } else if (kernel_uses_llsc) {
206                 int temp;
207
208                 do {
209                         __asm__ __volatile__(
210                         "       .set    mips3                           \n"
211                         "       ll      %1, %2  # atomic_sub_return     \n"
212                         "       subu    %0, %1, %3                      \n"
213                         "       sc      %0, %2                          \n"
214                         "       .set    mips0                           \n"
215                         : "=&r" (result), "=&r" (temp), "=m" (v->counter)
216                         : "Ir" (i), "m" (v->counter)
217                         : "memory");
218                 } while (unlikely(!result));
219
220                 result = temp - i;
221         } else {
222                 unsigned long flags;
223
224                 raw_local_irq_save(flags);
225                 result = v->counter;
226                 result -= i;
227                 v->counter = result;
228                 raw_local_irq_restore(flags);
229         }
230
231         smp_llsc_mb();
232
233         return result;
234 }
235
236 /*
237  * atomic_sub_if_positive - conditionally subtract integer from atomic variable
238  * @i: integer value to subtract
239  * @v: pointer of type atomic_t
240  *
241  * Atomically test @v and subtract @i if @v is greater or equal than @i.
242  * The function returns the old value of @v minus @i.
243  */
244 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
245 {
246         int result;
247
248         smp_mb__before_llsc();
249
250         if (kernel_uses_llsc && R10000_LLSC_WAR) {
251                 int temp;
252
253                 __asm__ __volatile__(
254                 "       .set    mips3                                   \n"
255                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
256                 "       subu    %0, %1, %3                              \n"
257                 "       bltz    %0, 1f                                  \n"
258                 "       sc      %0, %2                                  \n"
259                 "       .set    noreorder                               \n"
260                 "       beqzl   %0, 1b                                  \n"
261                 "        subu   %0, %1, %3                              \n"
262                 "       .set    reorder                                 \n"
263                 "1:                                                     \n"
264                 "       .set    mips0                                   \n"
265                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
266                 : "Ir" (i), "m" (v->counter)
267                 : "memory");
268         } else if (kernel_uses_llsc) {
269                 int temp;
270
271                 __asm__ __volatile__(
272                 "       .set    mips3                                   \n"
273                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
274                 "       subu    %0, %1, %3                              \n"
275                 "       bltz    %0, 1f                                  \n"
276                 "       sc      %0, %2                                  \n"
277                 "       .set    noreorder                               \n"
278                 "       beqz    %0, 1b                                  \n"
279                 "        subu   %0, %1, %3                              \n"
280                 "       .set    reorder                                 \n"
281                 "1:                                                     \n"
282                 "       .set    mips0                                   \n"
283                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
284                 : "Ir" (i), "m" (v->counter)
285                 : "memory");
286         } else {
287                 unsigned long flags;
288
289                 raw_local_irq_save(flags);
290                 result = v->counter;
291                 result -= i;
292                 if (result >= 0)
293                         v->counter = result;
294                 raw_local_irq_restore(flags);
295         }
296
297         smp_llsc_mb();
298
299         return result;
300 }
301
302 #define atomic_cmpxchg(v, o, n) (cmpxchg(&((v)->counter), (o), (n)))
303 #define atomic_xchg(v, new) (xchg(&((v)->counter), (new)))
304
305 /**
306  * __atomic_add_unless - add unless the number is a given value
307  * @v: pointer of type atomic_t
308  * @a: the amount to add to v...
309  * @u: ...unless v is equal to u.
310  *
311  * Atomically adds @a to @v, so long as it was not @u.
312  * Returns the old value of @v.
313  */
314 static __inline__ int __atomic_add_unless(atomic_t *v, int a, int u)
315 {
316         int c, old;
317         c = atomic_read(v);
318         for (;;) {
319                 if (unlikely(c == (u)))
320                         break;
321                 old = atomic_cmpxchg((v), c, c + (a));
322                 if (likely(old == c))
323                         break;
324                 c = old;
325         }
326         return c;
327 }
328
329 #define atomic_dec_return(v) atomic_sub_return(1, (v))
330 #define atomic_inc_return(v) atomic_add_return(1, (v))
331
332 /*
333  * atomic_sub_and_test - subtract value from variable and test result
334  * @i: integer value to subtract
335  * @v: pointer of type atomic_t
336  *
337  * Atomically subtracts @i from @v and returns
338  * true if the result is zero, or false for all
339  * other cases.
340  */
341 #define atomic_sub_and_test(i, v) (atomic_sub_return((i), (v)) == 0)
342
343 /*
344  * atomic_inc_and_test - increment and test
345  * @v: pointer of type atomic_t
346  *
347  * Atomically increments @v by 1
348  * and returns true if the result is zero, or false for all
349  * other cases.
350  */
351 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
352
353 /*
354  * atomic_dec_and_test - decrement by 1 and test
355  * @v: pointer of type atomic_t
356  *
357  * Atomically decrements @v by 1 and
358  * returns true if the result is 0, or false for all other
359  * cases.
360  */
361 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
362
363 /*
364  * atomic_dec_if_positive - decrement by 1 if old value positive
365  * @v: pointer of type atomic_t
366  */
367 #define atomic_dec_if_positive(v)       atomic_sub_if_positive(1, v)
368
369 /*
370  * atomic_inc - increment atomic variable
371  * @v: pointer of type atomic_t
372  *
373  * Atomically increments @v by 1.
374  */
375 #define atomic_inc(v) atomic_add(1, (v))
376
377 /*
378  * atomic_dec - decrement and test
379  * @v: pointer of type atomic_t
380  *
381  * Atomically decrements @v by 1.
382  */
383 #define atomic_dec(v) atomic_sub(1, (v))
384
385 /*
386  * atomic_add_negative - add and test if negative
387  * @v: pointer of type atomic_t
388  * @i: integer value to add
389  *
390  * Atomically adds @i to @v and returns true
391  * if the result is negative, or false when
392  * result is greater than or equal to zero.
393  */
394 #define atomic_add_negative(i, v) (atomic_add_return(i, (v)) < 0)
395
396 #ifdef CONFIG_64BIT
397
398 #define ATOMIC64_INIT(i)    { (i) }
399
400 /*
401  * atomic64_read - read atomic variable
402  * @v: pointer of type atomic64_t
403  *
404  */
405 #define atomic64_read(v)        (*(volatile long *)&(v)->counter)
406
407 /*
408  * atomic64_set - set atomic variable
409  * @v: pointer of type atomic64_t
410  * @i: required value
411  */
412 #define atomic64_set(v, i)      ((v)->counter = (i))
413
414 /*
415  * atomic64_add - add integer to atomic variable
416  * @i: integer value to add
417  * @v: pointer of type atomic64_t
418  *
419  * Atomically adds @i to @v.
420  */
421 static __inline__ void atomic64_add(long i, atomic64_t * v)
422 {
423         if (kernel_uses_llsc && R10000_LLSC_WAR) {
424                 long temp;
425
426                 __asm__ __volatile__(
427                 "       .set    mips3                                   \n"
428                 "1:     lld     %0, %1          # atomic64_add          \n"
429                 "       daddu   %0, %2                                  \n"
430                 "       scd     %0, %1                                  \n"
431                 "       beqzl   %0, 1b                                  \n"
432                 "       .set    mips0                                   \n"
433                 : "=&r" (temp), "=m" (v->counter)
434                 : "Ir" (i), "m" (v->counter));
435         } else if (kernel_uses_llsc) {
436                 long temp;
437
438                 do {
439                         __asm__ __volatile__(
440                         "       .set    mips3                           \n"
441                         "       lld     %0, %1          # atomic64_add  \n"
442                         "       daddu   %0, %2                          \n"
443                         "       scd     %0, %1                          \n"
444                         "       .set    mips0                           \n"
445                         : "=&r" (temp), "=m" (v->counter)
446                         : "Ir" (i), "m" (v->counter));
447                 } while (unlikely(!temp));
448         } else {
449                 unsigned long flags;
450
451                 raw_local_irq_save(flags);
452                 v->counter += i;
453                 raw_local_irq_restore(flags);
454         }
455 }
456
457 /*
458  * atomic64_sub - subtract the atomic variable
459  * @i: integer value to subtract
460  * @v: pointer of type atomic64_t
461  *
462  * Atomically subtracts @i from @v.
463  */
464 static __inline__ void atomic64_sub(long i, atomic64_t * v)
465 {
466         if (kernel_uses_llsc && R10000_LLSC_WAR) {
467                 long temp;
468
469                 __asm__ __volatile__(
470                 "       .set    mips3                                   \n"
471                 "1:     lld     %0, %1          # atomic64_sub          \n"
472                 "       dsubu   %0, %2                                  \n"
473                 "       scd     %0, %1                                  \n"
474                 "       beqzl   %0, 1b                                  \n"
475                 "       .set    mips0                                   \n"
476                 : "=&r" (temp), "=m" (v->counter)
477                 : "Ir" (i), "m" (v->counter));
478         } else if (kernel_uses_llsc) {
479                 long temp;
480
481                 do {
482                         __asm__ __volatile__(
483                         "       .set    mips3                           \n"
484                         "       lld     %0, %1          # atomic64_sub  \n"
485                         "       dsubu   %0, %2                          \n"
486                         "       scd     %0, %1                          \n"
487                         "       .set    mips0                           \n"
488                         : "=&r" (temp), "=m" (v->counter)
489                         : "Ir" (i), "m" (v->counter));
490                 } while (unlikely(!temp));
491         } else {
492                 unsigned long flags;
493
494                 raw_local_irq_save(flags);
495                 v->counter -= i;
496                 raw_local_irq_restore(flags);
497         }
498 }
499
500 /*
501  * Same as above, but return the result value
502  */
503 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
504 {
505         long result;
506
507         smp_mb__before_llsc();
508
509         if (kernel_uses_llsc && R10000_LLSC_WAR) {
510                 long temp;
511
512                 __asm__ __volatile__(
513                 "       .set    mips3                                   \n"
514                 "1:     lld     %1, %2          # atomic64_add_return   \n"
515                 "       daddu   %0, %1, %3                              \n"
516                 "       scd     %0, %2                                  \n"
517                 "       beqzl   %0, 1b                                  \n"
518                 "       daddu   %0, %1, %3                              \n"
519                 "       .set    mips0                                   \n"
520                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
521                 : "Ir" (i), "m" (v->counter)
522                 : "memory");
523         } else if (kernel_uses_llsc) {
524                 long temp;
525
526                 do {
527                         __asm__ __volatile__(
528                         "       .set    mips3                           \n"
529                         "       lld     %1, %2  # atomic64_add_return   \n"
530                         "       daddu   %0, %1, %3                      \n"
531                         "       scd     %0, %2                          \n"
532                         "       .set    mips0                           \n"
533                         : "=&r" (result), "=&r" (temp), "=m" (v->counter)
534                         : "Ir" (i), "m" (v->counter)
535                         : "memory");
536                 } while (unlikely(!result));
537
538                 result = temp + i;
539         } else {
540                 unsigned long flags;
541
542                 raw_local_irq_save(flags);
543                 result = v->counter;
544                 result += i;
545                 v->counter = result;
546                 raw_local_irq_restore(flags);
547         }
548
549         smp_llsc_mb();
550
551         return result;
552 }
553
554 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
555 {
556         long result;
557
558         smp_mb__before_llsc();
559
560         if (kernel_uses_llsc && R10000_LLSC_WAR) {
561                 long temp;
562
563                 __asm__ __volatile__(
564                 "       .set    mips3                                   \n"
565                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
566                 "       dsubu   %0, %1, %3                              \n"
567                 "       scd     %0, %2                                  \n"
568                 "       beqzl   %0, 1b                                  \n"
569                 "       dsubu   %0, %1, %3                              \n"
570                 "       .set    mips0                                   \n"
571                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
572                 : "Ir" (i), "m" (v->counter)
573                 : "memory");
574         } else if (kernel_uses_llsc) {
575                 long temp;
576
577                 do {
578                         __asm__ __volatile__(
579                         "       .set    mips3                           \n"
580                         "       lld     %1, %2  # atomic64_sub_return   \n"
581                         "       dsubu   %0, %1, %3                      \n"
582                         "       scd     %0, %2                          \n"
583                         "       .set    mips0                           \n"
584                         : "=&r" (result), "=&r" (temp), "=m" (v->counter)
585                         : "Ir" (i), "m" (v->counter)
586                         : "memory");
587                 } while (unlikely(!result));
588
589                 result = temp - i;
590         } else {
591                 unsigned long flags;
592
593                 raw_local_irq_save(flags);
594                 result = v->counter;
595                 result -= i;
596                 v->counter = result;
597                 raw_local_irq_restore(flags);
598         }
599
600         smp_llsc_mb();
601
602         return result;
603 }
604
605 /*
606  * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
607  * @i: integer value to subtract
608  * @v: pointer of type atomic64_t
609  *
610  * Atomically test @v and subtract @i if @v is greater or equal than @i.
611  * The function returns the old value of @v minus @i.
612  */
613 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
614 {
615         long result;
616
617         smp_mb__before_llsc();
618
619         if (kernel_uses_llsc && R10000_LLSC_WAR) {
620                 long temp;
621
622                 __asm__ __volatile__(
623                 "       .set    mips3                                   \n"
624                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
625                 "       dsubu   %0, %1, %3                              \n"
626                 "       bltz    %0, 1f                                  \n"
627                 "       scd     %0, %2                                  \n"
628                 "       .set    noreorder                               \n"
629                 "       beqzl   %0, 1b                                  \n"
630                 "        dsubu  %0, %1, %3                              \n"
631                 "       .set    reorder                                 \n"
632                 "1:                                                     \n"
633                 "       .set    mips0                                   \n"
634                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
635                 : "Ir" (i), "m" (v->counter)
636                 : "memory");
637         } else if (kernel_uses_llsc) {
638                 long temp;
639
640                 __asm__ __volatile__(
641                 "       .set    mips3                                   \n"
642                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
643                 "       dsubu   %0, %1, %3                              \n"
644                 "       bltz    %0, 1f                                  \n"
645                 "       scd     %0, %2                                  \n"
646                 "       .set    noreorder                               \n"
647                 "       beqz    %0, 1b                                  \n"
648                 "        dsubu  %0, %1, %3                              \n"
649                 "       .set    reorder                                 \n"
650                 "1:                                                     \n"
651                 "       .set    mips0                                   \n"
652                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
653                 : "Ir" (i), "m" (v->counter)
654                 : "memory");
655         } else {
656                 unsigned long flags;
657
658                 raw_local_irq_save(flags);
659                 result = v->counter;
660                 result -= i;
661                 if (result >= 0)
662                         v->counter = result;
663                 raw_local_irq_restore(flags);
664         }
665
666         smp_llsc_mb();
667
668         return result;
669 }
670
671 #define atomic64_cmpxchg(v, o, n) \
672         ((__typeof__((v)->counter))cmpxchg(&((v)->counter), (o), (n)))
673 #define atomic64_xchg(v, new) (xchg(&((v)->counter), (new)))
674
675 /**
676  * atomic64_add_unless - add unless the number is a given value
677  * @v: pointer of type atomic64_t
678  * @a: the amount to add to v...
679  * @u: ...unless v is equal to u.
680  *
681  * Atomically adds @a to @v, so long as it was not @u.
682  * Returns the old value of @v.
683  */
684 static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u)
685 {
686         long c, old;
687         c = atomic64_read(v);
688         for (;;) {
689                 if (unlikely(c == (u)))
690                         break;
691                 old = atomic64_cmpxchg((v), c, c + (a));
692                 if (likely(old == c))
693                         break;
694                 c = old;
695         }
696         return c != (u);
697 }
698
699 #define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0)
700
701 #define atomic64_dec_return(v) atomic64_sub_return(1, (v))
702 #define atomic64_inc_return(v) atomic64_add_return(1, (v))
703
704 /*
705  * atomic64_sub_and_test - subtract value from variable and test result
706  * @i: integer value to subtract
707  * @v: pointer of type atomic64_t
708  *
709  * Atomically subtracts @i from @v and returns
710  * true if the result is zero, or false for all
711  * other cases.
712  */
713 #define atomic64_sub_and_test(i, v) (atomic64_sub_return((i), (v)) == 0)
714
715 /*
716  * atomic64_inc_and_test - increment and test
717  * @v: pointer of type atomic64_t
718  *
719  * Atomically increments @v by 1
720  * and returns true if the result is zero, or false for all
721  * other cases.
722  */
723 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
724
725 /*
726  * atomic64_dec_and_test - decrement by 1 and test
727  * @v: pointer of type atomic64_t
728  *
729  * Atomically decrements @v by 1 and
730  * returns true if the result is 0, or false for all other
731  * cases.
732  */
733 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
734
735 /*
736  * atomic64_dec_if_positive - decrement by 1 if old value positive
737  * @v: pointer of type atomic64_t
738  */
739 #define atomic64_dec_if_positive(v)     atomic64_sub_if_positive(1, v)
740
741 /*
742  * atomic64_inc - increment atomic variable
743  * @v: pointer of type atomic64_t
744  *
745  * Atomically increments @v by 1.
746  */
747 #define atomic64_inc(v) atomic64_add(1, (v))
748
749 /*
750  * atomic64_dec - decrement and test
751  * @v: pointer of type atomic64_t
752  *
753  * Atomically decrements @v by 1.
754  */
755 #define atomic64_dec(v) atomic64_sub(1, (v))
756
757 /*
758  * atomic64_add_negative - add and test if negative
759  * @v: pointer of type atomic64_t
760  * @i: integer value to add
761  *
762  * Atomically adds @i to @v and returns true
763  * if the result is negative, or false when
764  * result is greater than or equal to zero.
765  */
766 #define atomic64_add_negative(i, v) (atomic64_add_return(i, (v)) < 0)
767
768 #endif /* CONFIG_64BIT */
769
770 /*
771  * atomic*_return operations are serializing but not the non-*_return
772  * versions.
773  */
774 #define smp_mb__before_atomic_dec()     smp_mb__before_llsc()
775 #define smp_mb__after_atomic_dec()      smp_llsc_mb()
776 #define smp_mb__before_atomic_inc()     smp_mb__before_llsc()
777 #define smp_mb__after_atomic_inc()      smp_llsc_mb()
778
779 #endif /* _ASM_ATOMIC_H */