Merge git://git.kernel.org/pub/scm/linux/kernel/git/davem/sparc-2.6
[pandora-kernel.git] / arch / sh / include / asm / system_32.h
1 #ifndef __ASM_SH_SYSTEM_32_H
2 #define __ASM_SH_SYSTEM_32_H
3
4 #include <linux/types.h>
5 #include <asm/mmu.h>
6
7 #ifdef CONFIG_SH_DSP
8
9 #define is_dsp_enabled(tsk)                                             \
10         (!!(tsk->thread.dsp_status.status & SR_DSP))
11
12 #define __restore_dsp(tsk)                                              \
13 do {                                                                    \
14         register u32 *__ts2 __asm__ ("r2") =                            \
15                         (u32 *)&tsk->thread.dsp_status;                 \
16         __asm__ __volatile__ (                                          \
17                 ".balign 4\n\t"                                         \
18                 "movs.l @r2+, a0\n\t"                                   \
19                 "movs.l @r2+, a1\n\t"                                   \
20                 "movs.l @r2+, a0g\n\t"                                  \
21                 "movs.l @r2+, a1g\n\t"                                  \
22                 "movs.l @r2+, m0\n\t"                                   \
23                 "movs.l @r2+, m1\n\t"                                   \
24                 "movs.l @r2+, x0\n\t"                                   \
25                 "movs.l @r2+, x1\n\t"                                   \
26                 "movs.l @r2+, y0\n\t"                                   \
27                 "movs.l @r2+, y1\n\t"                                   \
28                 "lds.l  @r2+, dsr\n\t"                                  \
29                 "ldc.l  @r2+, rs\n\t"                                   \
30                 "ldc.l  @r2+, re\n\t"                                   \
31                 "ldc.l  @r2+, mod\n\t"                                  \
32                 : : "r" (__ts2));                                       \
33 } while (0)
34
35
36 #define __save_dsp(tsk)                                                 \
37 do {                                                                    \
38         register u32 *__ts2 __asm__ ("r2") =                            \
39                         (u32 *)&tsk->thread.dsp_status + 14;            \
40                                                                         \
41         __asm__ __volatile__ (                                          \
42                 ".balign 4\n\t"                                         \
43                 "stc.l  mod, @-r2\n\t"                                  \
44                 "stc.l  re, @-r2\n\t"                                   \
45                 "stc.l  rs, @-r2\n\t"                                   \
46                 "sts.l  dsr, @-r2\n\t"                                  \
47                 "movs.l y1, @-r2\n\t"                                   \
48                 "movs.l y0, @-r2\n\t"                                   \
49                 "movs.l x1, @-r2\n\t"                                   \
50                 "movs.l x0, @-r2\n\t"                                   \
51                 "movs.l m1, @-r2\n\t"                                   \
52                 "movs.l m0, @-r2\n\t"                                   \
53                 "movs.l a1g, @-r2\n\t"                                  \
54                 "movs.l a0g, @-r2\n\t"                                  \
55                 "movs.l a1, @-r2\n\t"                                   \
56                 "movs.l a0, @-r2\n\t"                                   \
57                 : : "r" (__ts2));                                       \
58 } while (0)
59
60 #else
61
62 #define is_dsp_enabled(tsk)     (0)
63 #define __save_dsp(tsk)         do { } while (0)
64 #define __restore_dsp(tsk)      do { } while (0)
65 #endif
66
67 #if defined(CONFIG_CPU_SH4A)
68 #define __icbi(addr)    __asm__ __volatile__ ( "icbi @%0\n\t" : : "r" (addr))
69 #else
70 #define __icbi(addr)    mb()
71 #endif
72
73 #define __ocbp(addr)    __asm__ __volatile__ ( "ocbp @%0\n\t" : : "r" (addr))
74 #define __ocbi(addr)    __asm__ __volatile__ ( "ocbi @%0\n\t" : : "r" (addr))
75 #define __ocbwb(addr)   __asm__ __volatile__ ( "ocbwb @%0\n\t" : : "r" (addr))
76
77 struct task_struct *__switch_to(struct task_struct *prev,
78                                 struct task_struct *next);
79
80 /*
81  *      switch_to() should switch tasks to task nr n, first
82  */
83 #define switch_to(prev, next, last)                             \
84 do {                                                            \
85         register u32 *__ts1 __asm__ ("r1");                     \
86         register u32 *__ts2 __asm__ ("r2");                     \
87         register u32 *__ts4 __asm__ ("r4");                     \
88         register u32 *__ts5 __asm__ ("r5");                     \
89         register u32 *__ts6 __asm__ ("r6");                     \
90         register u32 __ts7 __asm__ ("r7");                      \
91         struct task_struct *__last;                             \
92                                                                 \
93         if (is_dsp_enabled(prev))                               \
94                 __save_dsp(prev);                               \
95                                                                 \
96         __ts1 = (u32 *)&prev->thread.sp;                        \
97         __ts2 = (u32 *)&prev->thread.pc;                        \
98         __ts4 = (u32 *)prev;                                    \
99         __ts5 = (u32 *)next;                                    \
100         __ts6 = (u32 *)&next->thread.sp;                        \
101         __ts7 = next->thread.pc;                                \
102                                                                 \
103         __asm__ __volatile__ (                                  \
104                 ".balign 4\n\t"                                 \
105                 "stc.l  gbr, @-r15\n\t"                         \
106                 "sts.l  pr, @-r15\n\t"                          \
107                 "mov.l  r8, @-r15\n\t"                          \
108                 "mov.l  r9, @-r15\n\t"                          \
109                 "mov.l  r10, @-r15\n\t"                         \
110                 "mov.l  r11, @-r15\n\t"                         \
111                 "mov.l  r12, @-r15\n\t"                         \
112                 "mov.l  r13, @-r15\n\t"                         \
113                 "mov.l  r14, @-r15\n\t"                         \
114                 "mov.l  r15, @r1\t! save SP\n\t"                \
115                 "mov.l  @r6, r15\t! change to new stack\n\t"    \
116                 "mova   1f, %0\n\t"                             \
117                 "mov.l  %0, @r2\t! save PC\n\t"                 \
118                 "mov.l  2f, %0\n\t"                             \
119                 "jmp    @%0\t! call __switch_to\n\t"            \
120                 " lds   r7, pr\t!  with return to new PC\n\t"   \
121                 ".balign        4\n"                            \
122                 "2:\n\t"                                        \
123                 ".long  __switch_to\n"                          \
124                 "1:\n\t"                                        \
125                 "mov.l  @r15+, r14\n\t"                         \
126                 "mov.l  @r15+, r13\n\t"                         \
127                 "mov.l  @r15+, r12\n\t"                         \
128                 "mov.l  @r15+, r11\n\t"                         \
129                 "mov.l  @r15+, r10\n\t"                         \
130                 "mov.l  @r15+, r9\n\t"                          \
131                 "mov.l  @r15+, r8\n\t"                          \
132                 "lds.l  @r15+, pr\n\t"                          \
133                 "ldc.l  @r15+, gbr\n\t"                         \
134                 : "=z" (__last)                                 \
135                 : "r" (__ts1), "r" (__ts2), "r" (__ts4),        \
136                   "r" (__ts5), "r" (__ts6), "r" (__ts7)         \
137                 : "r3", "t");                                   \
138                                                                 \
139         last = __last;                                          \
140 } while (0)
141
142 #define finish_arch_switch(prev)                                \
143 do {                                                            \
144         if (is_dsp_enabled(prev))                               \
145                 __restore_dsp(prev);                            \
146 } while (0)
147
148 #ifdef CONFIG_CPU_HAS_SR_RB
149 #define lookup_exception_vector()       \
150 ({                                      \
151         unsigned long _vec;             \
152                                         \
153         __asm__ __volatile__ (          \
154                 "stc r2_bank, %0\n\t"   \
155                 : "=r" (_vec)           \
156         );                              \
157                                         \
158         _vec;                           \
159 })
160 #else
161 #define lookup_exception_vector()       \
162 ({                                      \
163         unsigned long _vec;             \
164         __asm__ __volatile__ (          \
165                 "mov r4, %0\n\t"        \
166                 : "=r" (_vec)           \
167         );                              \
168                                         \
169         _vec;                           \
170 })
171 #endif
172
173 static inline reg_size_t register_align(void *val)
174 {
175         return (unsigned long)(signed long)val;
176 }
177
178 int handle_unaligned_access(insn_size_t instruction, struct pt_regs *regs,
179                             struct mem_access *ma, int, unsigned long address);
180
181 static inline void trigger_address_error(void)
182 {
183         __asm__ __volatile__ (
184                 "ldc %0, sr\n\t"
185                 "mov.l @%1, %0"
186                 :
187                 : "r" (0x10000000), "r" (0x80000001)
188         );
189 }
190
191 asmlinkage void do_address_error(struct pt_regs *regs,
192                                  unsigned long writeaccess,
193                                  unsigned long address);
194 asmlinkage void do_divide_error(unsigned long r4, unsigned long r5,
195                                 unsigned long r6, unsigned long r7,
196                                 struct pt_regs __regs);
197 asmlinkage void do_reserved_inst(unsigned long r4, unsigned long r5,
198                                 unsigned long r6, unsigned long r7,
199                                 struct pt_regs __regs);
200 asmlinkage void do_illegal_slot_inst(unsigned long r4, unsigned long r5,
201                                 unsigned long r6, unsigned long r7,
202                                 struct pt_regs __regs);
203 asmlinkage void do_exception_error(unsigned long r4, unsigned long r5,
204                                    unsigned long r6, unsigned long r7,
205                                    struct pt_regs __regs);
206
207 static inline void set_bl_bit(void)
208 {
209         unsigned long __dummy0, __dummy1;
210
211         __asm__ __volatile__ (
212                 "stc    sr, %0\n\t"
213                 "or     %2, %0\n\t"
214                 "and    %3, %0\n\t"
215                 "ldc    %0, sr\n\t"
216                 : "=&r" (__dummy0), "=r" (__dummy1)
217                 : "r" (0x10000000), "r" (0xffffff0f)
218                 : "memory"
219         );
220 }
221
222 static inline void clear_bl_bit(void)
223 {
224         unsigned long __dummy0, __dummy1;
225
226         __asm__ __volatile__ (
227                 "stc    sr, %0\n\t"
228                 "and    %2, %0\n\t"
229                 "ldc    %0, sr\n\t"
230                 : "=&r" (__dummy0), "=r" (__dummy1)
231                 : "1" (~0x10000000)
232                 : "memory"
233         );
234 }
235
236 #endif /* __ASM_SH_SYSTEM_32_H */