[MIPS] Remove old junk left from old atomic_lock.
[pandora-kernel.git] / include / asm-mips / atomic.h
1 /*
2  * Atomic operations that C can't guarantee us.  Useful for
3  * resource counting etc..
4  *
5  * But use these as seldom as possible since they are much more slower
6  * than regular operations.
7  *
8  * This file is subject to the terms and conditions of the GNU General Public
9  * License.  See the file "COPYING" in the main directory of this archive
10  * for more details.
11  *
12  * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
13  */
14 #ifndef _ASM_ATOMIC_H
15 #define _ASM_ATOMIC_H
16
17 #include <linux/irqflags.h>
18 #include <asm/cpu-features.h>
19 #include <asm/war.h>
20
21 typedef struct { volatile int counter; } atomic_t;
22
23 #define ATOMIC_INIT(i)    { (i) }
24
25 /*
26  * atomic_read - read atomic variable
27  * @v: pointer of type atomic_t
28  *
29  * Atomically reads the value of @v.
30  */
31 #define atomic_read(v)          ((v)->counter)
32
33 /*
34  * atomic_set - set atomic variable
35  * @v: pointer of type atomic_t
36  * @i: required value
37  *
38  * Atomically sets the value of @v to @i.
39  */
40 #define atomic_set(v,i)         ((v)->counter = (i))
41
42 /*
43  * atomic_add - add integer to atomic variable
44  * @i: integer value to add
45  * @v: pointer of type atomic_t
46  *
47  * Atomically adds @i to @v.
48  */
49 static __inline__ void atomic_add(int i, atomic_t * v)
50 {
51         if (cpu_has_llsc && R10000_LLSC_WAR) {
52                 unsigned long temp;
53
54                 __asm__ __volatile__(
55                 "       .set    mips3                                   \n"
56                 "1:     ll      %0, %1          # atomic_add            \n"
57                 "       addu    %0, %2                                  \n"
58                 "       sc      %0, %1                                  \n"
59                 "       beqzl   %0, 1b                                  \n"
60                 "       .set    mips0                                   \n"
61                 : "=&r" (temp), "=m" (v->counter)
62                 : "Ir" (i), "m" (v->counter));
63         } else if (cpu_has_llsc) {
64                 unsigned long temp;
65
66                 __asm__ __volatile__(
67                 "       .set    mips3                                   \n"
68                 "1:     ll      %0, %1          # atomic_add            \n"
69                 "       addu    %0, %2                                  \n"
70                 "       sc      %0, %1                                  \n"
71                 "       beqz    %0, 1b                                  \n"
72                 "       .set    mips0                                   \n"
73                 : "=&r" (temp), "=m" (v->counter)
74                 : "Ir" (i), "m" (v->counter));
75         } else {
76                 unsigned long flags;
77
78                 local_irq_save(flags);
79                 v->counter += i;
80                 local_irq_restore(flags);
81         }
82 }
83
84 /*
85  * atomic_sub - subtract the atomic variable
86  * @i: integer value to subtract
87  * @v: pointer of type atomic_t
88  *
89  * Atomically subtracts @i from @v.
90  */
91 static __inline__ void atomic_sub(int i, atomic_t * v)
92 {
93         if (cpu_has_llsc && R10000_LLSC_WAR) {
94                 unsigned long temp;
95
96                 __asm__ __volatile__(
97                 "       .set    mips3                                   \n"
98                 "1:     ll      %0, %1          # atomic_sub            \n"
99                 "       subu    %0, %2                                  \n"
100                 "       sc      %0, %1                                  \n"
101                 "       beqzl   %0, 1b                                  \n"
102                 "       .set    mips0                                   \n"
103                 : "=&r" (temp), "=m" (v->counter)
104                 : "Ir" (i), "m" (v->counter));
105         } else if (cpu_has_llsc) {
106                 unsigned long temp;
107
108                 __asm__ __volatile__(
109                 "       .set    mips3                                   \n"
110                 "1:     ll      %0, %1          # atomic_sub            \n"
111                 "       subu    %0, %2                                  \n"
112                 "       sc      %0, %1                                  \n"
113                 "       beqz    %0, 1b                                  \n"
114                 "       .set    mips0                                   \n"
115                 : "=&r" (temp), "=m" (v->counter)
116                 : "Ir" (i), "m" (v->counter));
117         } else {
118                 unsigned long flags;
119
120                 local_irq_save(flags);
121                 v->counter -= i;
122                 local_irq_restore(flags);
123         }
124 }
125
126 /*
127  * Same as above, but return the result value
128  */
129 static __inline__ int atomic_add_return(int i, atomic_t * v)
130 {
131         unsigned long result;
132
133         if (cpu_has_llsc && R10000_LLSC_WAR) {
134                 unsigned long temp;
135
136                 __asm__ __volatile__(
137                 "       .set    mips3                                   \n"
138                 "1:     ll      %1, %2          # atomic_add_return     \n"
139                 "       addu    %0, %1, %3                              \n"
140                 "       sc      %0, %2                                  \n"
141                 "       beqzl   %0, 1b                                  \n"
142                 "       addu    %0, %1, %3                              \n"
143                 "       sync                                            \n"
144                 "       .set    mips0                                   \n"
145                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
146                 : "Ir" (i), "m" (v->counter)
147                 : "memory");
148         } else if (cpu_has_llsc) {
149                 unsigned long temp;
150
151                 __asm__ __volatile__(
152                 "       .set    mips3                                   \n"
153                 "1:     ll      %1, %2          # atomic_add_return     \n"
154                 "       addu    %0, %1, %3                              \n"
155                 "       sc      %0, %2                                  \n"
156                 "       beqz    %0, 1b                                  \n"
157                 "       addu    %0, %1, %3                              \n"
158                 "       sync                                            \n"
159                 "       .set    mips0                                   \n"
160                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
161                 : "Ir" (i), "m" (v->counter)
162                 : "memory");
163         } else {
164                 unsigned long flags;
165
166                 local_irq_save(flags);
167                 result = v->counter;
168                 result += i;
169                 v->counter = result;
170                 local_irq_restore(flags);
171         }
172
173         return result;
174 }
175
176 static __inline__ int atomic_sub_return(int i, atomic_t * v)
177 {
178         unsigned long result;
179
180         if (cpu_has_llsc && R10000_LLSC_WAR) {
181                 unsigned long temp;
182
183                 __asm__ __volatile__(
184                 "       .set    mips3                                   \n"
185                 "1:     ll      %1, %2          # atomic_sub_return     \n"
186                 "       subu    %0, %1, %3                              \n"
187                 "       sc      %0, %2                                  \n"
188                 "       beqzl   %0, 1b                                  \n"
189                 "       subu    %0, %1, %3                              \n"
190                 "       sync                                            \n"
191                 "       .set    mips0                                   \n"
192                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
193                 : "Ir" (i), "m" (v->counter)
194                 : "memory");
195         } else if (cpu_has_llsc) {
196                 unsigned long temp;
197
198                 __asm__ __volatile__(
199                 "       .set    mips3                                   \n"
200                 "1:     ll      %1, %2          # atomic_sub_return     \n"
201                 "       subu    %0, %1, %3                              \n"
202                 "       sc      %0, %2                                  \n"
203                 "       beqz    %0, 1b                                  \n"
204                 "       subu    %0, %1, %3                              \n"
205                 "       sync                                            \n"
206                 "       .set    mips0                                   \n"
207                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
208                 : "Ir" (i), "m" (v->counter)
209                 : "memory");
210         } else {
211                 unsigned long flags;
212
213                 local_irq_save(flags);
214                 result = v->counter;
215                 result -= i;
216                 v->counter = result;
217                 local_irq_restore(flags);
218         }
219
220         return result;
221 }
222
223 /*
224  * atomic_sub_if_positive - conditionally subtract integer from atomic variable
225  * @i: integer value to subtract
226  * @v: pointer of type atomic_t
227  *
228  * Atomically test @v and subtract @i if @v is greater or equal than @i.
229  * The function returns the old value of @v minus @i.
230  */
231 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
232 {
233         unsigned long result;
234
235         if (cpu_has_llsc && R10000_LLSC_WAR) {
236                 unsigned long temp;
237
238                 __asm__ __volatile__(
239                 "       .set    mips3                                   \n"
240                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
241                 "       subu    %0, %1, %3                              \n"
242                 "       bltz    %0, 1f                                  \n"
243                 "       sc      %0, %2                                  \n"
244                 "       .set    noreorder                               \n"
245                 "       beqzl   %0, 1b                                  \n"
246                 "        subu   %0, %1, %3                              \n"
247                 "       .set    reorder                                 \n"
248                 "       sync                                            \n"
249                 "1:                                                     \n"
250                 "       .set    mips0                                   \n"
251                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
252                 : "Ir" (i), "m" (v->counter)
253                 : "memory");
254         } else if (cpu_has_llsc) {
255                 unsigned long temp;
256
257                 __asm__ __volatile__(
258                 "       .set    mips3                                   \n"
259                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
260                 "       subu    %0, %1, %3                              \n"
261                 "       bltz    %0, 1f                                  \n"
262                 "       sc      %0, %2                                  \n"
263                 "       .set    noreorder                               \n"
264                 "       beqz    %0, 1b                                  \n"
265                 "        subu   %0, %1, %3                              \n"
266                 "       .set    reorder                                 \n"
267                 "       sync                                            \n"
268                 "1:                                                     \n"
269                 "       .set    mips0                                   \n"
270                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
271                 : "Ir" (i), "m" (v->counter)
272                 : "memory");
273         } else {
274                 unsigned long flags;
275
276                 local_irq_save(flags);
277                 result = v->counter;
278                 result -= i;
279                 if (result >= 0)
280                         v->counter = result;
281                 local_irq_restore(flags);
282         }
283
284         return result;
285 }
286
287 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
288 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
289
290 /**
291  * atomic_add_unless - add unless the number is a given value
292  * @v: pointer of type atomic_t
293  * @a: the amount to add to v...
294  * @u: ...unless v is equal to u.
295  *
296  * Atomically adds @a to @v, so long as it was not @u.
297  * Returns non-zero if @v was not @u, and zero otherwise.
298  */
299 #define atomic_add_unless(v, a, u)                              \
300 ({                                                              \
301         int c, old;                                             \
302         c = atomic_read(v);                                     \
303         while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
304                 c = old;                                        \
305         c != (u);                                               \
306 })
307 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
308
309 #define atomic_dec_return(v) atomic_sub_return(1,(v))
310 #define atomic_inc_return(v) atomic_add_return(1,(v))
311
312 /*
313  * atomic_sub_and_test - subtract value from variable and test result
314  * @i: integer value to subtract
315  * @v: pointer of type atomic_t
316  *
317  * Atomically subtracts @i from @v and returns
318  * true if the result is zero, or false for all
319  * other cases.
320  */
321 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
322
323 /*
324  * atomic_inc_and_test - increment and test
325  * @v: pointer of type atomic_t
326  *
327  * Atomically increments @v by 1
328  * and returns true if the result is zero, or false for all
329  * other cases.
330  */
331 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
332
333 /*
334  * atomic_dec_and_test - decrement by 1 and test
335  * @v: pointer of type atomic_t
336  *
337  * Atomically decrements @v by 1 and
338  * returns true if the result is 0, or false for all other
339  * cases.
340  */
341 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
342
343 /*
344  * atomic_dec_if_positive - decrement by 1 if old value positive
345  * @v: pointer of type atomic_t
346  */
347 #define atomic_dec_if_positive(v)       atomic_sub_if_positive(1, v)
348
349 /*
350  * atomic_inc - increment atomic variable
351  * @v: pointer of type atomic_t
352  *
353  * Atomically increments @v by 1.
354  */
355 #define atomic_inc(v) atomic_add(1,(v))
356
357 /*
358  * atomic_dec - decrement and test
359  * @v: pointer of type atomic_t
360  *
361  * Atomically decrements @v by 1.
362  */
363 #define atomic_dec(v) atomic_sub(1,(v))
364
365 /*
366  * atomic_add_negative - add and test if negative
367  * @v: pointer of type atomic_t
368  * @i: integer value to add
369  *
370  * Atomically adds @i to @v and returns true
371  * if the result is negative, or false when
372  * result is greater than or equal to zero.
373  */
374 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
375
376 #ifdef CONFIG_64BIT
377
378 typedef struct { volatile __s64 counter; } atomic64_t;
379
380 #define ATOMIC64_INIT(i)    { (i) }
381
382 /*
383  * atomic64_read - read atomic variable
384  * @v: pointer of type atomic64_t
385  *
386  */
387 #define atomic64_read(v)        ((v)->counter)
388
389 /*
390  * atomic64_set - set atomic variable
391  * @v: pointer of type atomic64_t
392  * @i: required value
393  */
394 #define atomic64_set(v,i)       ((v)->counter = (i))
395
396 /*
397  * atomic64_add - add integer to atomic variable
398  * @i: integer value to add
399  * @v: pointer of type atomic64_t
400  *
401  * Atomically adds @i to @v.
402  */
403 static __inline__ void atomic64_add(long i, atomic64_t * v)
404 {
405         if (cpu_has_llsc && R10000_LLSC_WAR) {
406                 unsigned long temp;
407
408                 __asm__ __volatile__(
409                 "       .set    mips3                                   \n"
410                 "1:     lld     %0, %1          # atomic64_add          \n"
411                 "       addu    %0, %2                                  \n"
412                 "       scd     %0, %1                                  \n"
413                 "       beqzl   %0, 1b                                  \n"
414                 "       .set    mips0                                   \n"
415                 : "=&r" (temp), "=m" (v->counter)
416                 : "Ir" (i), "m" (v->counter));
417         } else if (cpu_has_llsc) {
418                 unsigned long temp;
419
420                 __asm__ __volatile__(
421                 "       .set    mips3                                   \n"
422                 "1:     lld     %0, %1          # atomic64_add          \n"
423                 "       addu    %0, %2                                  \n"
424                 "       scd     %0, %1                                  \n"
425                 "       beqz    %0, 1b                                  \n"
426                 "       .set    mips0                                   \n"
427                 : "=&r" (temp), "=m" (v->counter)
428                 : "Ir" (i), "m" (v->counter));
429         } else {
430                 unsigned long flags;
431
432                 local_irq_save(flags);
433                 v->counter += i;
434                 local_irq_restore(flags);
435         }
436 }
437
438 /*
439  * atomic64_sub - subtract the atomic variable
440  * @i: integer value to subtract
441  * @v: pointer of type atomic64_t
442  *
443  * Atomically subtracts @i from @v.
444  */
445 static __inline__ void atomic64_sub(long i, atomic64_t * v)
446 {
447         if (cpu_has_llsc && R10000_LLSC_WAR) {
448                 unsigned long temp;
449
450                 __asm__ __volatile__(
451                 "       .set    mips3                                   \n"
452                 "1:     lld     %0, %1          # atomic64_sub          \n"
453                 "       subu    %0, %2                                  \n"
454                 "       scd     %0, %1                                  \n"
455                 "       beqzl   %0, 1b                                  \n"
456                 "       .set    mips0                                   \n"
457                 : "=&r" (temp), "=m" (v->counter)
458                 : "Ir" (i), "m" (v->counter));
459         } else if (cpu_has_llsc) {
460                 unsigned long temp;
461
462                 __asm__ __volatile__(
463                 "       .set    mips3                                   \n"
464                 "1:     lld     %0, %1          # atomic64_sub          \n"
465                 "       subu    %0, %2                                  \n"
466                 "       scd     %0, %1                                  \n"
467                 "       beqz    %0, 1b                                  \n"
468                 "       .set    mips0                                   \n"
469                 : "=&r" (temp), "=m" (v->counter)
470                 : "Ir" (i), "m" (v->counter));
471         } else {
472                 unsigned long flags;
473
474                 local_irq_save(flags);
475                 v->counter -= i;
476                 local_irq_restore(flags);
477         }
478 }
479
480 /*
481  * Same as above, but return the result value
482  */
483 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
484 {
485         unsigned long result;
486
487         if (cpu_has_llsc && R10000_LLSC_WAR) {
488                 unsigned long temp;
489
490                 __asm__ __volatile__(
491                 "       .set    mips3                                   \n"
492                 "1:     lld     %1, %2          # atomic64_add_return   \n"
493                 "       addu    %0, %1, %3                              \n"
494                 "       scd     %0, %2                                  \n"
495                 "       beqzl   %0, 1b                                  \n"
496                 "       addu    %0, %1, %3                              \n"
497                 "       sync                                            \n"
498                 "       .set    mips0                                   \n"
499                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
500                 : "Ir" (i), "m" (v->counter)
501                 : "memory");
502         } else if (cpu_has_llsc) {
503                 unsigned long temp;
504
505                 __asm__ __volatile__(
506                 "       .set    mips3                                   \n"
507                 "1:     lld     %1, %2          # atomic64_add_return   \n"
508                 "       addu    %0, %1, %3                              \n"
509                 "       scd     %0, %2                                  \n"
510                 "       beqz    %0, 1b                                  \n"
511                 "       addu    %0, %1, %3                              \n"
512                 "       sync                                            \n"
513                 "       .set    mips0                                   \n"
514                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
515                 : "Ir" (i), "m" (v->counter)
516                 : "memory");
517         } else {
518                 unsigned long flags;
519
520                 local_irq_save(flags);
521                 result = v->counter;
522                 result += i;
523                 v->counter = result;
524                 local_irq_restore(flags);
525         }
526
527         return result;
528 }
529
530 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
531 {
532         unsigned long result;
533
534         if (cpu_has_llsc && R10000_LLSC_WAR) {
535                 unsigned long temp;
536
537                 __asm__ __volatile__(
538                 "       .set    mips3                                   \n"
539                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
540                 "       subu    %0, %1, %3                              \n"
541                 "       scd     %0, %2                                  \n"
542                 "       beqzl   %0, 1b                                  \n"
543                 "       subu    %0, %1, %3                              \n"
544                 "       sync                                            \n"
545                 "       .set    mips0                                   \n"
546                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
547                 : "Ir" (i), "m" (v->counter)
548                 : "memory");
549         } else if (cpu_has_llsc) {
550                 unsigned long temp;
551
552                 __asm__ __volatile__(
553                 "       .set    mips3                                   \n"
554                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
555                 "       subu    %0, %1, %3                              \n"
556                 "       scd     %0, %2                                  \n"
557                 "       beqz    %0, 1b                                  \n"
558                 "       subu    %0, %1, %3                              \n"
559                 "       sync                                            \n"
560                 "       .set    mips0                                   \n"
561                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
562                 : "Ir" (i), "m" (v->counter)
563                 : "memory");
564         } else {
565                 unsigned long flags;
566
567                 local_irq_save(flags);
568                 result = v->counter;
569                 result -= i;
570                 v->counter = result;
571                 local_irq_restore(flags);
572         }
573
574         return result;
575 }
576
577 /*
578  * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
579  * @i: integer value to subtract
580  * @v: pointer of type atomic64_t
581  *
582  * Atomically test @v and subtract @i if @v is greater or equal than @i.
583  * The function returns the old value of @v minus @i.
584  */
585 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
586 {
587         unsigned long result;
588
589         if (cpu_has_llsc && R10000_LLSC_WAR) {
590                 unsigned long temp;
591
592                 __asm__ __volatile__(
593                 "       .set    mips3                                   \n"
594                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
595                 "       dsubu   %0, %1, %3                              \n"
596                 "       bltz    %0, 1f                                  \n"
597                 "       scd     %0, %2                                  \n"
598                 "       .set    noreorder                               \n"
599                 "       beqzl   %0, 1b                                  \n"
600                 "        dsubu  %0, %1, %3                              \n"
601                 "       .set    reorder                                 \n"
602                 "       sync                                            \n"
603                 "1:                                                     \n"
604                 "       .set    mips0                                   \n"
605                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
606                 : "Ir" (i), "m" (v->counter)
607                 : "memory");
608         } else if (cpu_has_llsc) {
609                 unsigned long temp;
610
611                 __asm__ __volatile__(
612                 "       .set    mips3                                   \n"
613                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
614                 "       dsubu   %0, %1, %3                              \n"
615                 "       bltz    %0, 1f                                  \n"
616                 "       scd     %0, %2                                  \n"
617                 "       .set    noreorder                               \n"
618                 "       beqz    %0, 1b                                  \n"
619                 "        dsubu  %0, %1, %3                              \n"
620                 "       .set    reorder                                 \n"
621                 "       sync                                            \n"
622                 "1:                                                     \n"
623                 "       .set    mips0                                   \n"
624                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
625                 : "Ir" (i), "m" (v->counter)
626                 : "memory");
627         } else {
628                 unsigned long flags;
629
630                 local_irq_save(flags);
631                 result = v->counter;
632                 result -= i;
633                 if (result >= 0)
634                         v->counter = result;
635                 local_irq_restore(flags);
636         }
637
638         return result;
639 }
640
641 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
642 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
643
644 /*
645  * atomic64_sub_and_test - subtract value from variable and test result
646  * @i: integer value to subtract
647  * @v: pointer of type atomic64_t
648  *
649  * Atomically subtracts @i from @v and returns
650  * true if the result is zero, or false for all
651  * other cases.
652  */
653 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
654
655 /*
656  * atomic64_inc_and_test - increment and test
657  * @v: pointer of type atomic64_t
658  *
659  * Atomically increments @v by 1
660  * and returns true if the result is zero, or false for all
661  * other cases.
662  */
663 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
664
665 /*
666  * atomic64_dec_and_test - decrement by 1 and test
667  * @v: pointer of type atomic64_t
668  *
669  * Atomically decrements @v by 1 and
670  * returns true if the result is 0, or false for all other
671  * cases.
672  */
673 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
674
675 /*
676  * atomic64_dec_if_positive - decrement by 1 if old value positive
677  * @v: pointer of type atomic64_t
678  */
679 #define atomic64_dec_if_positive(v)     atomic64_sub_if_positive(1, v)
680
681 /*
682  * atomic64_inc - increment atomic variable
683  * @v: pointer of type atomic64_t
684  *
685  * Atomically increments @v by 1.
686  */
687 #define atomic64_inc(v) atomic64_add(1,(v))
688
689 /*
690  * atomic64_dec - decrement and test
691  * @v: pointer of type atomic64_t
692  *
693  * Atomically decrements @v by 1.
694  */
695 #define atomic64_dec(v) atomic64_sub(1,(v))
696
697 /*
698  * atomic64_add_negative - add and test if negative
699  * @v: pointer of type atomic64_t
700  * @i: integer value to add
701  *
702  * Atomically adds @i to @v and returns true
703  * if the result is negative, or false when
704  * result is greater than or equal to zero.
705  */
706 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
707
708 #endif /* CONFIG_64BIT */
709
710 /*
711  * atomic*_return operations are serializing but not the non-*_return
712  * versions.
713  */
714 #define smp_mb__before_atomic_dec()     smp_mb()
715 #define smp_mb__after_atomic_dec()      smp_mb()
716 #define smp_mb__before_atomic_inc()     smp_mb()
717 #define smp_mb__after_atomic_inc()      smp_mb()
718
719 #include <asm-generic/atomic.h>
720 #endif /* _ASM_ATOMIC_H */