Enable a suitable ISA for the assembler around ll/sc so that code
[pandora-kernel.git] / include / asm-mips / atomic.h
1 /*
2  * Atomic operations that C can't guarantee us.  Useful for
3  * resource counting etc..
4  *
5  * But use these as seldom as possible since they are much more slower
6  * than regular operations.
7  *
8  * This file is subject to the terms and conditions of the GNU General Public
9  * License.  See the file "COPYING" in the main directory of this archive
10  * for more details.
11  *
12  * Copyright (C) 1996, 97, 99, 2000, 03, 04 by Ralf Baechle
13  */
14
15 /*
16  * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in
17  * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the
18  * main big wrapper ...
19  */
20 #include <linux/config.h>
21 #include <linux/spinlock.h>
22
23 #ifndef _ASM_ATOMIC_H
24 #define _ASM_ATOMIC_H
25
26 #include <asm/cpu-features.h>
27 #include <asm/war.h>
28
29 extern spinlock_t atomic_lock;
30
31 typedef struct { volatile int counter; } atomic_t;
32
33 #define ATOMIC_INIT(i)    { (i) }
34
35 /*
36  * atomic_read - read atomic variable
37  * @v: pointer of type atomic_t
38  *
39  * Atomically reads the value of @v.
40  */
41 #define atomic_read(v)          ((v)->counter)
42
43 /*
44  * atomic_set - set atomic variable
45  * @v: pointer of type atomic_t
46  * @i: required value
47  *
48  * Atomically sets the value of @v to @i.
49  */
50 #define atomic_set(v,i)         ((v)->counter = (i))
51
52 /*
53  * atomic_add - add integer to atomic variable
54  * @i: integer value to add
55  * @v: pointer of type atomic_t
56  *
57  * Atomically adds @i to @v.
58  */
59 static __inline__ void atomic_add(int i, atomic_t * v)
60 {
61         if (cpu_has_llsc && R10000_LLSC_WAR) {
62                 unsigned long temp;
63
64                 __asm__ __volatile__(
65                 "       .set    mips2                                   \n"
66                 "1:     ll      %0, %1          # atomic_add            \n"
67                 "       addu    %0, %2                                  \n"
68                 "       sc      %0, %1                                  \n"
69                 "       beqzl   %0, 1b                                  \n"
70                 "       .set    mips0                                   \n"
71                 : "=&r" (temp), "=m" (v->counter)
72                 : "Ir" (i), "m" (v->counter));
73         } else if (cpu_has_llsc) {
74                 unsigned long temp;
75
76                 __asm__ __volatile__(
77                 "       .set    mips2                                   \n"
78                 "1:     ll      %0, %1          # atomic_add            \n"
79                 "       addu    %0, %2                                  \n"
80                 "       sc      %0, %1                                  \n"
81                 "       beqz    %0, 1b                                  \n"
82                 "       .set    mips0                                   \n"
83                 : "=&r" (temp), "=m" (v->counter)
84                 : "Ir" (i), "m" (v->counter));
85         } else {
86                 unsigned long flags;
87
88                 spin_lock_irqsave(&atomic_lock, flags);
89                 v->counter += i;
90                 spin_unlock_irqrestore(&atomic_lock, flags);
91         }
92 }
93
94 /*
95  * atomic_sub - subtract the atomic variable
96  * @i: integer value to subtract
97  * @v: pointer of type atomic_t
98  *
99  * Atomically subtracts @i from @v.
100  */
101 static __inline__ void atomic_sub(int i, atomic_t * v)
102 {
103         if (cpu_has_llsc && R10000_LLSC_WAR) {
104                 unsigned long temp;
105
106                 __asm__ __volatile__(
107                 "       .set    mips2                                   \n"
108                 "1:     ll      %0, %1          # atomic_sub            \n"
109                 "       subu    %0, %2                                  \n"
110                 "       sc      %0, %1                                  \n"
111                 "       beqzl   %0, 1b                                  \n"
112                 "       .set    mips0                                   \n"
113                 : "=&r" (temp), "=m" (v->counter)
114                 : "Ir" (i), "m" (v->counter));
115         } else if (cpu_has_llsc) {
116                 unsigned long temp;
117
118                 __asm__ __volatile__(
119                 "       .set    mips2                                   \n"
120                 "1:     ll      %0, %1          # atomic_sub            \n"
121                 "       subu    %0, %2                                  \n"
122                 "       sc      %0, %1                                  \n"
123                 "       beqz    %0, 1b                                  \n"
124                 "       .set    mips0                                   \n"
125                 : "=&r" (temp), "=m" (v->counter)
126                 : "Ir" (i), "m" (v->counter));
127         } else {
128                 unsigned long flags;
129
130                 spin_lock_irqsave(&atomic_lock, flags);
131                 v->counter -= i;
132                 spin_unlock_irqrestore(&atomic_lock, flags);
133         }
134 }
135
136 /*
137  * Same as above, but return the result value
138  */
139 static __inline__ int atomic_add_return(int i, atomic_t * v)
140 {
141         unsigned long result;
142
143         if (cpu_has_llsc && R10000_LLSC_WAR) {
144                 unsigned long temp;
145
146                 __asm__ __volatile__(
147                 "       .set    mips2                                   \n"
148                 "1:     ll      %1, %2          # atomic_add_return     \n"
149                 "       addu    %0, %1, %3                              \n"
150                 "       sc      %0, %2                                  \n"
151                 "       beqzl   %0, 1b                                  \n"
152                 "       addu    %0, %1, %3                              \n"
153                 "       sync                                            \n"
154                 "       .set    mips0                                   \n"
155                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
156                 : "Ir" (i), "m" (v->counter)
157                 : "memory");
158         } else if (cpu_has_llsc) {
159                 unsigned long temp;
160
161                 __asm__ __volatile__(
162                 "       .set    mips2                                   \n"
163                 "1:     ll      %1, %2          # atomic_add_return     \n"
164                 "       addu    %0, %1, %3                              \n"
165                 "       sc      %0, %2                                  \n"
166                 "       beqz    %0, 1b                                  \n"
167                 "       addu    %0, %1, %3                              \n"
168                 "       sync                                            \n"
169                 "       .set    mips0                                   \n"
170                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
171                 : "Ir" (i), "m" (v->counter)
172                 : "memory");
173         } else {
174                 unsigned long flags;
175
176                 spin_lock_irqsave(&atomic_lock, flags);
177                 result = v->counter;
178                 result += i;
179                 v->counter = result;
180                 spin_unlock_irqrestore(&atomic_lock, flags);
181         }
182
183         return result;
184 }
185
186 static __inline__ int atomic_sub_return(int i, atomic_t * v)
187 {
188         unsigned long result;
189
190         if (cpu_has_llsc && R10000_LLSC_WAR) {
191                 unsigned long temp;
192
193                 __asm__ __volatile__(
194                 "       .set    mips2                                   \n"
195                 "1:     ll      %1, %2          # atomic_sub_return     \n"
196                 "       subu    %0, %1, %3                              \n"
197                 "       sc      %0, %2                                  \n"
198                 "       beqzl   %0, 1b                                  \n"
199                 "       subu    %0, %1, %3                              \n"
200                 "       sync                                            \n"
201                 "       .set    mips0                                   \n"
202                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
203                 : "Ir" (i), "m" (v->counter)
204                 : "memory");
205         } else if (cpu_has_llsc) {
206                 unsigned long temp;
207
208                 __asm__ __volatile__(
209                 "       .set    mips2                                   \n"
210                 "1:     ll      %1, %2          # atomic_sub_return     \n"
211                 "       subu    %0, %1, %3                              \n"
212                 "       sc      %0, %2                                  \n"
213                 "       beqz    %0, 1b                                  \n"
214                 "       subu    %0, %1, %3                              \n"
215                 "       sync                                            \n"
216                 "       .set    mips0                                   \n"
217                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
218                 : "Ir" (i), "m" (v->counter)
219                 : "memory");
220         } else {
221                 unsigned long flags;
222
223                 spin_lock_irqsave(&atomic_lock, flags);
224                 result = v->counter;
225                 result -= i;
226                 v->counter = result;
227                 spin_unlock_irqrestore(&atomic_lock, flags);
228         }
229
230         return result;
231 }
232
233 /*
234  * atomic_sub_if_positive - add integer to atomic variable
235  * @v: pointer of type atomic_t
236  *
237  * Atomically test @v and decrement if it is greater than 0.
238  * The function returns the old value of @v minus 1.
239  */
240 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
241 {
242         unsigned long result;
243
244         if (cpu_has_llsc && R10000_LLSC_WAR) {
245                 unsigned long temp;
246
247                 __asm__ __volatile__(
248                 "       .set    mips2                                   \n"
249                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
250                 "       subu    %0, %1, %3                              \n"
251                 "       bltz    %0, 1f                                  \n"
252                 "       sc      %0, %2                                  \n"
253                 "       beqzl   %0, 1b                                  \n"
254                 "       sync                                            \n"
255                 "1:                                                     \n"
256                 "       .set    mips0                                   \n"
257                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
258                 : "Ir" (i), "m" (v->counter)
259                 : "memory");
260         } else if (cpu_has_llsc) {
261                 unsigned long temp;
262
263                 __asm__ __volatile__(
264                 "       .set    mips2                                   \n"
265                 "1:     ll      %1, %2          # atomic_sub_if_positive\n"
266                 "       subu    %0, %1, %3                              \n"
267                 "       bltz    %0, 1f                                  \n"
268                 "       sc      %0, %2                                  \n"
269                 "       beqz    %0, 1b                                  \n"
270                 "       sync                                            \n"
271                 "1:                                                     \n"
272                 "       .set    mips0                                   \n"
273                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
274                 : "Ir" (i), "m" (v->counter)
275                 : "memory");
276         } else {
277                 unsigned long flags;
278
279                 spin_lock_irqsave(&atomic_lock, flags);
280                 result = v->counter;
281                 result -= i;
282                 if (result >= 0)
283                         v->counter = result;
284                 spin_unlock_irqrestore(&atomic_lock, flags);
285         }
286
287         return result;
288 }
289
290 #define atomic_dec_return(v) atomic_sub_return(1,(v))
291 #define atomic_inc_return(v) atomic_add_return(1,(v))
292
293 /*
294  * atomic_sub_and_test - subtract value from variable and test result
295  * @i: integer value to subtract
296  * @v: pointer of type atomic_t
297  *
298  * Atomically subtracts @i from @v and returns
299  * true if the result is zero, or false for all
300  * other cases.
301  */
302 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
303
304 /*
305  * atomic_inc_and_test - increment and test
306  * @v: pointer of type atomic_t
307  *
308  * Atomically increments @v by 1
309  * and returns true if the result is zero, or false for all
310  * other cases.
311  */
312 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
313
314 /*
315  * atomic_dec_and_test - decrement by 1 and test
316  * @v: pointer of type atomic_t
317  *
318  * Atomically decrements @v by 1 and
319  * returns true if the result is 0, or false for all other
320  * cases.
321  */
322 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
323
324 /*
325  * atomic_dec_if_positive - decrement by 1 if old value positive
326  * @v: pointer of type atomic_t
327  */
328 #define atomic_dec_if_positive(v)       atomic_sub_if_positive(1, v)
329
330 /*
331  * atomic_inc - increment atomic variable
332  * @v: pointer of type atomic_t
333  *
334  * Atomically increments @v by 1.
335  */
336 #define atomic_inc(v) atomic_add(1,(v))
337
338 /*
339  * atomic_dec - decrement and test
340  * @v: pointer of type atomic_t
341  *
342  * Atomically decrements @v by 1.
343  */
344 #define atomic_dec(v) atomic_sub(1,(v))
345
346 /*
347  * atomic_add_negative - add and test if negative
348  * @v: pointer of type atomic_t
349  * @i: integer value to add
350  *
351  * Atomically adds @i to @v and returns true
352  * if the result is negative, or false when
353  * result is greater than or equal to zero.
354  */
355 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
356
357 #ifdef CONFIG_64BIT
358
359 typedef struct { volatile __s64 counter; } atomic64_t;
360
361 #define ATOMIC64_INIT(i)    { (i) }
362
363 /*
364  * atomic64_read - read atomic variable
365  * @v: pointer of type atomic64_t
366  *
367  */
368 #define atomic64_read(v)        ((v)->counter)
369
370 /*
371  * atomic64_set - set atomic variable
372  * @v: pointer of type atomic64_t
373  * @i: required value
374  */
375 #define atomic64_set(v,i)       ((v)->counter = (i))
376
377 /*
378  * atomic64_add - add integer to atomic variable
379  * @i: integer value to add
380  * @v: pointer of type atomic64_t
381  *
382  * Atomically adds @i to @v.
383  */
384 static __inline__ void atomic64_add(long i, atomic64_t * v)
385 {
386         if (cpu_has_llsc && R10000_LLSC_WAR) {
387                 unsigned long temp;
388
389                 __asm__ __volatile__(
390                 "       .set    mips3                                   \n"
391                 "1:     lld     %0, %1          # atomic64_add          \n"
392                 "       addu    %0, %2                                  \n"
393                 "       scd     %0, %1                                  \n"
394                 "       beqzl   %0, 1b                                  \n"
395                 "       .set    mips0                                   \n"
396                 : "=&r" (temp), "=m" (v->counter)
397                 : "Ir" (i), "m" (v->counter));
398         } else if (cpu_has_llsc) {
399                 unsigned long temp;
400
401                 __asm__ __volatile__(
402                 "       .set    mips3                                   \n"
403                 "1:     lld     %0, %1          # atomic64_add          \n"
404                 "       addu    %0, %2                                  \n"
405                 "       scd     %0, %1                                  \n"
406                 "       beqz    %0, 1b                                  \n"
407                 "       .set    mips0                                   \n"
408                 : "=&r" (temp), "=m" (v->counter)
409                 : "Ir" (i), "m" (v->counter));
410         } else {
411                 unsigned long flags;
412
413                 spin_lock_irqsave(&atomic_lock, flags);
414                 v->counter += i;
415                 spin_unlock_irqrestore(&atomic_lock, flags);
416         }
417 }
418
419 /*
420  * atomic64_sub - subtract the atomic variable
421  * @i: integer value to subtract
422  * @v: pointer of type atomic64_t
423  *
424  * Atomically subtracts @i from @v.
425  */
426 static __inline__ void atomic64_sub(long i, atomic64_t * v)
427 {
428         if (cpu_has_llsc && R10000_LLSC_WAR) {
429                 unsigned long temp;
430
431                 __asm__ __volatile__(
432                 "       .set    mips3                                   \n"
433                 "1:     lld     %0, %1          # atomic64_sub          \n"
434                 "       subu    %0, %2                                  \n"
435                 "       scd     %0, %1                                  \n"
436                 "       beqzl   %0, 1b                                  \n"
437                 "       .set    mips0                                   \n"
438                 : "=&r" (temp), "=m" (v->counter)
439                 : "Ir" (i), "m" (v->counter));
440         } else if (cpu_has_llsc) {
441                 unsigned long temp;
442
443                 __asm__ __volatile__(
444                 "       .set    mips3                                   \n"
445                 "1:     lld     %0, %1          # atomic64_sub          \n"
446                 "       subu    %0, %2                                  \n"
447                 "       scd     %0, %1                                  \n"
448                 "       beqz    %0, 1b                                  \n"
449                 "       .set    mips0                                   \n"
450                 : "=&r" (temp), "=m" (v->counter)
451                 : "Ir" (i), "m" (v->counter));
452         } else {
453                 unsigned long flags;
454
455                 spin_lock_irqsave(&atomic_lock, flags);
456                 v->counter -= i;
457                 spin_unlock_irqrestore(&atomic_lock, flags);
458         }
459 }
460
461 /*
462  * Same as above, but return the result value
463  */
464 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
465 {
466         unsigned long result;
467
468         if (cpu_has_llsc && R10000_LLSC_WAR) {
469                 unsigned long temp;
470
471                 __asm__ __volatile__(
472                 "       .set    mips3                                   \n"
473                 "1:     lld     %1, %2          # atomic64_add_return   \n"
474                 "       addu    %0, %1, %3                              \n"
475                 "       scd     %0, %2                                  \n"
476                 "       beqzl   %0, 1b                                  \n"
477                 "       addu    %0, %1, %3                              \n"
478                 "       sync                                            \n"
479                 "       .set    mips0                                   \n"
480                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
481                 : "Ir" (i), "m" (v->counter)
482                 : "memory");
483         } else if (cpu_has_llsc) {
484                 unsigned long temp;
485
486                 __asm__ __volatile__(
487                 "       .set    mips3                                   \n"
488                 "1:     lld     %1, %2          # atomic64_add_return   \n"
489                 "       addu    %0, %1, %3                              \n"
490                 "       scd     %0, %2                                  \n"
491                 "       beqz    %0, 1b                                  \n"
492                 "       addu    %0, %1, %3                              \n"
493                 "       sync                                            \n"
494                 "       .set    mips0                                   \n"
495                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
496                 : "Ir" (i), "m" (v->counter)
497                 : "memory");
498         } else {
499                 unsigned long flags;
500
501                 spin_lock_irqsave(&atomic_lock, flags);
502                 result = v->counter;
503                 result += i;
504                 v->counter = result;
505                 spin_unlock_irqrestore(&atomic_lock, flags);
506         }
507
508         return result;
509 }
510
511 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
512 {
513         unsigned long result;
514
515         if (cpu_has_llsc && R10000_LLSC_WAR) {
516                 unsigned long temp;
517
518                 __asm__ __volatile__(
519                 "       .set    mips3                                   \n"
520                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
521                 "       subu    %0, %1, %3                              \n"
522                 "       scd     %0, %2                                  \n"
523                 "       beqzl   %0, 1b                                  \n"
524                 "       subu    %0, %1, %3                              \n"
525                 "       sync                                            \n"
526                 "       .set    mips0                                   \n"
527                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
528                 : "Ir" (i), "m" (v->counter)
529                 : "memory");
530         } else if (cpu_has_llsc) {
531                 unsigned long temp;
532
533                 __asm__ __volatile__(
534                 "       .set    mips3                                   \n"
535                 "1:     lld     %1, %2          # atomic64_sub_return   \n"
536                 "       subu    %0, %1, %3                              \n"
537                 "       scd     %0, %2                                  \n"
538                 "       beqz    %0, 1b                                  \n"
539                 "       subu    %0, %1, %3                              \n"
540                 "       sync                                            \n"
541                 "       .set    mips0                                   \n"
542                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
543                 : "Ir" (i), "m" (v->counter)
544                 : "memory");
545         } else {
546                 unsigned long flags;
547
548                 spin_lock_irqsave(&atomic_lock, flags);
549                 result = v->counter;
550                 result -= i;
551                 v->counter = result;
552                 spin_unlock_irqrestore(&atomic_lock, flags);
553         }
554
555         return result;
556 }
557
558 /*
559  * atomic64_sub_if_positive - add integer to atomic variable
560  * @v: pointer of type atomic64_t
561  *
562  * Atomically test @v and decrement if it is greater than 0.
563  * The function returns the old value of @v minus 1.
564  */
565 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
566 {
567         unsigned long result;
568
569         if (cpu_has_llsc && R10000_LLSC_WAR) {
570                 unsigned long temp;
571
572                 __asm__ __volatile__(
573                 "       .set    mips3                                   \n"
574                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
575                 "       dsubu   %0, %1, %3                              \n"
576                 "       bltz    %0, 1f                                  \n"
577                 "       scd     %0, %2                                  \n"
578                 "       beqzl   %0, 1b                                  \n"
579                 "       sync                                            \n"
580                 "1:                                                     \n"
581                 "       .set    mips0                                   \n"
582                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
583                 : "Ir" (i), "m" (v->counter)
584                 : "memory");
585         } else if (cpu_has_llsc) {
586                 unsigned long temp;
587
588                 __asm__ __volatile__(
589                 "       .set    mips3                                   \n"
590                 "1:     lld     %1, %2          # atomic64_sub_if_positive\n"
591                 "       dsubu   %0, %1, %3                              \n"
592                 "       bltz    %0, 1f                                  \n"
593                 "       scd     %0, %2                                  \n"
594                 "       beqz    %0, 1b                                  \n"
595                 "       sync                                            \n"
596                 "1:                                                     \n"
597                 "       .set    mips0                                   \n"
598                 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
599                 : "Ir" (i), "m" (v->counter)
600                 : "memory");
601         } else {
602                 unsigned long flags;
603
604                 spin_lock_irqsave(&atomic_lock, flags);
605                 result = v->counter;
606                 result -= i;
607                 if (result >= 0)
608                         v->counter = result;
609                 spin_unlock_irqrestore(&atomic_lock, flags);
610         }
611
612         return result;
613 }
614
615 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
616 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
617
618 /*
619  * atomic64_sub_and_test - subtract value from variable and test result
620  * @i: integer value to subtract
621  * @v: pointer of type atomic64_t
622  *
623  * Atomically subtracts @i from @v and returns
624  * true if the result is zero, or false for all
625  * other cases.
626  */
627 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
628
629 /*
630  * atomic64_inc_and_test - increment and test
631  * @v: pointer of type atomic64_t
632  *
633  * Atomically increments @v by 1
634  * and returns true if the result is zero, or false for all
635  * other cases.
636  */
637 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
638
639 /*
640  * atomic64_dec_and_test - decrement by 1 and test
641  * @v: pointer of type atomic64_t
642  *
643  * Atomically decrements @v by 1 and
644  * returns true if the result is 0, or false for all other
645  * cases.
646  */
647 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
648
649 /*
650  * atomic64_dec_if_positive - decrement by 1 if old value positive
651  * @v: pointer of type atomic64_t
652  */
653 #define atomic64_dec_if_positive(v)     atomic64_sub_if_positive(1, v)
654
655 /*
656  * atomic64_inc - increment atomic variable
657  * @v: pointer of type atomic64_t
658  *
659  * Atomically increments @v by 1.
660  */
661 #define atomic64_inc(v) atomic64_add(1,(v))
662
663 /*
664  * atomic64_dec - decrement and test
665  * @v: pointer of type atomic64_t
666  *
667  * Atomically decrements @v by 1.
668  */
669 #define atomic64_dec(v) atomic64_sub(1,(v))
670
671 /*
672  * atomic64_add_negative - add and test if negative
673  * @v: pointer of type atomic64_t
674  * @i: integer value to add
675  *
676  * Atomically adds @i to @v and returns true
677  * if the result is negative, or false when
678  * result is greater than or equal to zero.
679  */
680 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
681
682 #endif /* CONFIG_64BIT */
683
684 /*
685  * atomic*_return operations are serializing but not the non-*_return
686  * versions.
687  */
688 #define smp_mb__before_atomic_dec()     smp_mb()
689 #define smp_mb__after_atomic_dec()      smp_mb()
690 #define smp_mb__before_atomic_inc()     smp_mb()
691 #define smp_mb__after_atomic_inc()      smp_mb()
692
693 #endif /* _ASM_ATOMIC_H */