Merge branch 'for_linus' of git://git.kernel.org/pub/scm/linux/kernel/git/mjg59/platf...
[pandora-kernel.git] / arch / frv / include / asm / system.h
1 /* system.h: FR-V CPU control definitions
2  *
3  * Copyright (C) 2003 Red Hat, Inc. All Rights Reserved.
4  * Written by David Howells (dhowells@redhat.com)
5  *
6  * This program is free software; you can redistribute it and/or
7  * modify it under the terms of the GNU General Public License
8  * as published by the Free Software Foundation; either version
9  * 2 of the License, or (at your option) any later version.
10  */
11
12 #ifndef _ASM_SYSTEM_H
13 #define _ASM_SYSTEM_H
14
15 #include <linux/types.h>
16 #include <linux/linkage.h>
17 #include <linux/kernel.h>
18
19 struct thread_struct;
20
21 /*
22  * switch_to(prev, next) should switch from task `prev' to `next'
23  * `prev' will never be the same as `next'.
24  * The `mb' is to tell GCC not to cache `current' across this call.
25  */
26 extern asmlinkage
27 struct task_struct *__switch_to(struct thread_struct *prev_thread,
28                                 struct thread_struct *next_thread,
29                                 struct task_struct *prev);
30
31 #define switch_to(prev, next, last)                                     \
32 do {                                                                    \
33         (prev)->thread.sched_lr =                                       \
34                 (unsigned long) __builtin_return_address(0);            \
35         (last) = __switch_to(&(prev)->thread, &(next)->thread, (prev)); \
36         mb();                                                           \
37 } while(0)
38
39 /*
40  * Force strict CPU ordering.
41  */
42 #define nop()                   asm volatile ("nop"::)
43 #define mb()                    asm volatile ("membar" : : :"memory")
44 #define rmb()                   asm volatile ("membar" : : :"memory")
45 #define wmb()                   asm volatile ("membar" : : :"memory")
46 #define read_barrier_depends()  do { } while (0)
47
48 #ifdef CONFIG_SMP
49 #define smp_mb()                        mb()
50 #define smp_rmb()                       rmb()
51 #define smp_wmb()                       wmb()
52 #define smp_read_barrier_depends()      read_barrier_depends()
53 #define set_mb(var, value) \
54         do { xchg(&var, (value)); } while (0)
55 #else
56 #define smp_mb()                        barrier()
57 #define smp_rmb()                       barrier()
58 #define smp_wmb()                       barrier()
59 #define smp_read_barrier_depends()      do {} while(0)
60 #define set_mb(var, value) \
61         do { var = (value); barrier(); } while (0)
62 #endif
63
64 extern void die_if_kernel(const char *, ...) __attribute__((format(printf, 1, 2)));
65 extern void free_initmem(void);
66
67 #define arch_align_stack(x) (x)
68
69 /*****************************************************************************/
70 /*
71  * compare and conditionally exchange value with memory
72  * - if (*ptr == test) then orig = *ptr; *ptr = test;
73  * - if (*ptr != test) then orig = *ptr;
74  */
75 extern uint64_t __cmpxchg_64(uint64_t test, uint64_t new, volatile uint64_t *v);
76
77 #ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
78
79 #define cmpxchg(ptr, test, new)                                                 \
80 ({                                                                              \
81         __typeof__(ptr) __xg_ptr = (ptr);                                       \
82         __typeof__(*(ptr)) __xg_orig, __xg_tmp;                                 \
83         __typeof__(*(ptr)) __xg_test = (test);                                  \
84         __typeof__(*(ptr)) __xg_new = (new);                                    \
85                                                                                 \
86         switch (sizeof(__xg_orig)) {                                            \
87         case 4:                                                                 \
88                 asm volatile(                                                   \
89                         "0:                                             \n"     \
90                         "       orcc            gr0,gr0,gr0,icc3        \n"     \
91                         "       ckeq            icc3,cc7                \n"     \
92                         "       ld.p            %M0,%1                  \n"     \
93                         "       orcr            cc7,cc7,cc3             \n"     \
94                         "       sub%I4cc        %1,%4,%2,icc0           \n"     \
95                         "       bne             icc0,#0,1f              \n"     \
96                         "       cst.p           %3,%M0          ,cc3,#1 \n"     \
97                         "       corcc           gr29,gr29,gr0   ,cc3,#1 \n"     \
98                         "       beq             icc3,#0,0b              \n"     \
99                         "1:                                             \n"     \
100                         : "+U"(*__xg_ptr), "=&r"(__xg_orig), "=&r"(__xg_tmp)    \
101                         : "r"(__xg_new), "NPr"(__xg_test)                       \
102                         : "memory", "cc7", "cc3", "icc3", "icc0"                \
103                         );                                                      \
104                 break;                                                          \
105                                                                                 \
106         default:                                                                \
107                 __xg_orig = (__typeof__(__xg_orig))0;                           \
108                 asm volatile("break");                                          \
109                 break;                                                          \
110         }                                                                       \
111                                                                                 \
112         __xg_orig;                                                              \
113 })
114
115 #else
116
117 extern uint32_t __cmpxchg_32(uint32_t *v, uint32_t test, uint32_t new);
118
119 #define cmpxchg(ptr, test, new)                                                 \
120 ({                                                                              \
121         __typeof__(ptr) __xg_ptr = (ptr);                                       \
122         __typeof__(*(ptr)) __xg_orig;                                           \
123         __typeof__(*(ptr)) __xg_test = (test);                                  \
124         __typeof__(*(ptr)) __xg_new = (new);                                    \
125                                                                                 \
126         switch (sizeof(__xg_orig)) {                                            \
127         case 4: __xg_orig = (__force __typeof__(*ptr))                          \
128                         __cmpxchg_32((__force uint32_t *)__xg_ptr,              \
129                                          (__force uint32_t)__xg_test,           \
130                                          (__force uint32_t)__xg_new); break;    \
131         default:                                                                \
132                 __xg_orig = (__typeof__(__xg_orig))0;                           \
133                 asm volatile("break");                                          \
134                 break;                                                          \
135         }                                                                       \
136                                                                                 \
137         __xg_orig;                                                              \
138 })
139
140 #endif
141
142 #include <asm-generic/cmpxchg-local.h>
143
144 static inline unsigned long __cmpxchg_local(volatile void *ptr,
145                                       unsigned long old,
146                                       unsigned long new, int size)
147 {
148         switch (size) {
149         case 4:
150                 return cmpxchg((unsigned long *)ptr, old, new);
151         default:
152                 return __cmpxchg_local_generic(ptr, old, new, size);
153         }
154
155         return old;
156 }
157
158 /*
159  * cmpxchg_local and cmpxchg64_local are atomic wrt current CPU. Always make
160  * them available.
161  */
162 #define cmpxchg_local(ptr, o, n)                                        \
163         ((__typeof__(*(ptr)))__cmpxchg_local((ptr), (unsigned long)(o), \
164                         (unsigned long)(n), sizeof(*(ptr))))
165 #define cmpxchg64_local(ptr, o, n) __cmpxchg64_local_generic((ptr), (o), (n))
166
167 #endif /* _ASM_SYSTEM_H */