Merge git://git.kernel.org/pub/scm/linux/kernel/git/sfrench/cifs-2.6
[pandora-kernel.git] / arch / sh / include / asm / system_32.h
1 #ifndef __ASM_SH_SYSTEM_32_H
2 #define __ASM_SH_SYSTEM_32_H
3
4 #include <linux/types.h>
5 #include <asm/mmu.h>
6
7 #ifdef CONFIG_SH_DSP
8
9 #define is_dsp_enabled(tsk)                                             \
10         (!!(tsk->thread.dsp_status.status & SR_DSP))
11
12 #define __restore_dsp(tsk)                                              \
13 do {                                                                    \
14         register u32 *__ts2 __asm__ ("r2") =                            \
15                         (u32 *)&tsk->thread.dsp_status;                 \
16         __asm__ __volatile__ (                                          \
17                 ".balign 4\n\t"                                         \
18                 "movs.l @r2+, a0\n\t"                                   \
19                 "movs.l @r2+, a1\n\t"                                   \
20                 "movs.l @r2+, a0g\n\t"                                  \
21                 "movs.l @r2+, a1g\n\t"                                  \
22                 "movs.l @r2+, m0\n\t"                                   \
23                 "movs.l @r2+, m1\n\t"                                   \
24                 "movs.l @r2+, x0\n\t"                                   \
25                 "movs.l @r2+, x1\n\t"                                   \
26                 "movs.l @r2+, y0\n\t"                                   \
27                 "movs.l @r2+, y1\n\t"                                   \
28                 "lds.l  @r2+, dsr\n\t"                                  \
29                 "ldc.l  @r2+, rs\n\t"                                   \
30                 "ldc.l  @r2+, re\n\t"                                   \
31                 "ldc.l  @r2+, mod\n\t"                                  \
32                 : : "r" (__ts2));                                       \
33 } while (0)
34
35
36 #define __save_dsp(tsk)                                                 \
37 do {                                                                    \
38         register u32 *__ts2 __asm__ ("r2") =                            \
39                         (u32 *)&tsk->thread.dsp_status + 14;            \
40                                                                         \
41         __asm__ __volatile__ (                                          \
42                 ".balign 4\n\t"                                         \
43                 "stc.l  mod, @-r2\n\t"                                  \
44                 "stc.l  re, @-r2\n\t"                                   \
45                 "stc.l  rs, @-r2\n\t"                                   \
46                 "sts.l  dsr, @-r2\n\t"                                  \
47                 "movs.l y1, @-r2\n\t"                                   \
48                 "movs.l y0, @-r2\n\t"                                   \
49                 "movs.l x1, @-r2\n\t"                                   \
50                 "movs.l x0, @-r2\n\t"                                   \
51                 "movs.l m1, @-r2\n\t"                                   \
52                 "movs.l m0, @-r2\n\t"                                   \
53                 "movs.l a1g, @-r2\n\t"                                  \
54                 "movs.l a0g, @-r2\n\t"                                  \
55                 "movs.l a1, @-r2\n\t"                                   \
56                 "movs.l a0, @-r2\n\t"                                   \
57                 : : "r" (__ts2));                                       \
58 } while (0)
59
60 #else
61
62 #define is_dsp_enabled(tsk)     (0)
63 #define __save_dsp(tsk)         do { } while (0)
64 #define __restore_dsp(tsk)      do { } while (0)
65 #endif
66
67 #if defined(CONFIG_CPU_SH4A)
68 #define __icbi(addr)    __asm__ __volatile__ ( "icbi @%0\n\t" : : "r" (addr))
69 #else
70 #define __icbi(addr)    mb()
71 #endif
72
73 #define __ocbp(addr)    __asm__ __volatile__ ( "ocbp @%0\n\t" : : "r" (addr))
74 #define __ocbi(addr)    __asm__ __volatile__ ( "ocbi @%0\n\t" : : "r" (addr))
75 #define __ocbwb(addr)   __asm__ __volatile__ ( "ocbwb @%0\n\t" : : "r" (addr))
76
77 struct task_struct *__switch_to(struct task_struct *prev,
78                                 struct task_struct *next);
79
80 /*
81  *      switch_to() should switch tasks to task nr n, first
82  */
83 #define switch_to(prev, next, last)                             \
84 do {                                                            \
85         register u32 *__ts1 __asm__ ("r1");                     \
86         register u32 *__ts2 __asm__ ("r2");                     \
87         register u32 *__ts4 __asm__ ("r4");                     \
88         register u32 *__ts5 __asm__ ("r5");                     \
89         register u32 *__ts6 __asm__ ("r6");                     \
90         register u32 __ts7 __asm__ ("r7");                      \
91         struct task_struct *__last;                             \
92                                                                 \
93         if (is_dsp_enabled(prev))                               \
94                 __save_dsp(prev);                               \
95                                                                 \
96         __ts1 = (u32 *)&prev->thread.sp;                        \
97         __ts2 = (u32 *)&prev->thread.pc;                        \
98         __ts4 = (u32 *)prev;                                    \
99         __ts5 = (u32 *)next;                                    \
100         __ts6 = (u32 *)&next->thread.sp;                        \
101         __ts7 = next->thread.pc;                                \
102                                                                 \
103         __asm__ __volatile__ (                                  \
104                 ".balign 4\n\t"                                 \
105                 "stc.l  gbr, @-r15\n\t"                         \
106                 "sts.l  pr, @-r15\n\t"                          \
107                 "mov.l  r8, @-r15\n\t"                          \
108                 "mov.l  r9, @-r15\n\t"                          \
109                 "mov.l  r10, @-r15\n\t"                         \
110                 "mov.l  r11, @-r15\n\t"                         \
111                 "mov.l  r12, @-r15\n\t"                         \
112                 "mov.l  r13, @-r15\n\t"                         \
113                 "mov.l  r14, @-r15\n\t"                         \
114                 "mov.l  r15, @r1\t! save SP\n\t"                \
115                 "mov.l  @r6, r15\t! change to new stack\n\t"    \
116                 "mova   1f, %0\n\t"                             \
117                 "mov.l  %0, @r2\t! save PC\n\t"                 \
118                 "mov.l  2f, %0\n\t"                             \
119                 "jmp    @%0\t! call __switch_to\n\t"            \
120                 " lds   r7, pr\t!  with return to new PC\n\t"   \
121                 ".balign        4\n"                            \
122                 "2:\n\t"                                        \
123                 ".long  __switch_to\n"                          \
124                 "1:\n\t"                                        \
125                 "mov.l  @r15+, r14\n\t"                         \
126                 "mov.l  @r15+, r13\n\t"                         \
127                 "mov.l  @r15+, r12\n\t"                         \
128                 "mov.l  @r15+, r11\n\t"                         \
129                 "mov.l  @r15+, r10\n\t"                         \
130                 "mov.l  @r15+, r9\n\t"                          \
131                 "mov.l  @r15+, r8\n\t"                          \
132                 "lds.l  @r15+, pr\n\t"                          \
133                 "ldc.l  @r15+, gbr\n\t"                         \
134                 : "=z" (__last)                                 \
135                 : "r" (__ts1), "r" (__ts2), "r" (__ts4),        \
136                   "r" (__ts5), "r" (__ts6), "r" (__ts7)         \
137                 : "r3", "t");                                   \
138                                                                 \
139         last = __last;                                          \
140 } while (0)
141
142 #define finish_arch_switch(prev)                                \
143 do {                                                            \
144         if (is_dsp_enabled(prev))                               \
145                 __restore_dsp(prev);                            \
146 } while (0)
147
148 /*
149  * Jump to uncached area.
150  * When handling TLB or caches, we need to do it from an uncached area.
151  */
152 #define jump_to_uncached()                      \
153 do {                                            \
154         unsigned long __dummy;                  \
155                                                 \
156         __asm__ __volatile__(                   \
157                 "mova   1f, %0\n\t"             \
158                 "add    %1, %0\n\t"             \
159                 "jmp    @%0\n\t"                \
160                 " nop\n\t"                      \
161                 ".balign 4\n"                   \
162                 "1:"                            \
163                 : "=&z" (__dummy)               \
164                 : "r" (cached_to_uncached));    \
165 } while (0)
166
167 /*
168  * Back to cached area.
169  */
170 #define back_to_cached()                                \
171 do {                                                    \
172         unsigned long __dummy;                          \
173         ctrl_barrier();                                 \
174         __asm__ __volatile__(                           \
175                 "mov.l  1f, %0\n\t"                     \
176                 "jmp    @%0\n\t"                        \
177                 " nop\n\t"                              \
178                 ".balign 4\n"                           \
179                 "1:     .long 2f\n"                     \
180                 "2:"                                    \
181                 : "=&r" (__dummy));                     \
182 } while (0)
183
184 #ifdef CONFIG_CPU_HAS_SR_RB
185 #define lookup_exception_vector()       \
186 ({                                      \
187         unsigned long _vec;             \
188                                         \
189         __asm__ __volatile__ (          \
190                 "stc r2_bank, %0\n\t"   \
191                 : "=r" (_vec)           \
192         );                              \
193                                         \
194         _vec;                           \
195 })
196 #else
197 #define lookup_exception_vector()       \
198 ({                                      \
199         unsigned long _vec;             \
200         __asm__ __volatile__ (          \
201                 "mov r4, %0\n\t"        \
202                 : "=r" (_vec)           \
203         );                              \
204                                         \
205         _vec;                           \
206 })
207 #endif
208
209 static inline reg_size_t register_align(void *val)
210 {
211         return (unsigned long)(signed long)val;
212 }
213
214 int handle_unaligned_access(insn_size_t instruction, struct pt_regs *regs,
215                             struct mem_access *ma, int, unsigned long address);
216
217 static inline void trigger_address_error(void)
218 {
219         __asm__ __volatile__ (
220                 "ldc %0, sr\n\t"
221                 "mov.l @%1, %0"
222                 :
223                 : "r" (0x10000000), "r" (0x80000001)
224         );
225 }
226
227 asmlinkage void do_address_error(struct pt_regs *regs,
228                                  unsigned long writeaccess,
229                                  unsigned long address);
230 asmlinkage void do_divide_error(unsigned long r4, unsigned long r5,
231                                 unsigned long r6, unsigned long r7,
232                                 struct pt_regs __regs);
233 asmlinkage void do_reserved_inst(unsigned long r4, unsigned long r5,
234                                 unsigned long r6, unsigned long r7,
235                                 struct pt_regs __regs);
236 asmlinkage void do_illegal_slot_inst(unsigned long r4, unsigned long r5,
237                                 unsigned long r6, unsigned long r7,
238                                 struct pt_regs __regs);
239 asmlinkage void do_exception_error(unsigned long r4, unsigned long r5,
240                                    unsigned long r6, unsigned long r7,
241                                    struct pt_regs __regs);
242
243 static inline void set_bl_bit(void)
244 {
245         unsigned long __dummy0, __dummy1;
246
247         __asm__ __volatile__ (
248                 "stc    sr, %0\n\t"
249                 "or     %2, %0\n\t"
250                 "and    %3, %0\n\t"
251                 "ldc    %0, sr\n\t"
252                 : "=&r" (__dummy0), "=r" (__dummy1)
253                 : "r" (0x10000000), "r" (0xffffff0f)
254                 : "memory"
255         );
256 }
257
258 static inline void clear_bl_bit(void)
259 {
260         unsigned long __dummy0, __dummy1;
261
262         __asm__ __volatile__ (
263                 "stc    sr, %0\n\t"
264                 "and    %2, %0\n\t"
265                 "ldc    %0, sr\n\t"
266                 : "=&r" (__dummy0), "=r" (__dummy1)
267                 : "1" (~0x10000000)
268                 : "memory"
269         );
270 }
271
272 #endif /* __ASM_SH_SYSTEM_32_H */