ARM: 8634/1: hw_breakpoint: blacklist Scorpion CPUs
[pandora-kernel.git] / arch / arm / include / asm / futex.h
1 #ifndef _ASM_ARM_FUTEX_H
2 #define _ASM_ARM_FUTEX_H
3
4 #ifdef __KERNEL__
5
6 #include <linux/futex.h>
7 #include <linux/uaccess.h>
8 #include <asm/errno.h>
9
10 #define __futex_atomic_ex_table(err_reg)                        \
11         "3:\n"                                                  \
12         "       .pushsection __ex_table,\"a\"\n"                \
13         "       .align  3\n"                                    \
14         "       .long   1b, 4f, 2b, 4f\n"                       \
15         "       .popsection\n"                                  \
16         "       .pushsection .fixup,\"ax\"\n"                   \
17         "4:     mov     %0, " err_reg "\n"                      \
18         "       b       3b\n"                                   \
19         "       .popsection"
20
21 #ifdef CONFIG_SMP
22
23 #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
24         smp_mb();                                               \
25         __asm__ __volatile__(                                   \
26         "1:     ldrex   %1, [%3]\n"                             \
27         "       " insn "\n"                                     \
28         "2:     strex   %2, %0, [%3]\n"                         \
29         "       teq     %2, #0\n"                               \
30         "       bne     1b\n"                                   \
31         "       mov     %0, #0\n"                               \
32         __futex_atomic_ex_table("%5")                           \
33         : "=&r" (ret), "=&r" (oldval), "=&r" (tmp)              \
34         : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)              \
35         : "cc", "memory")
36
37 static inline int
38 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
39                               u32 oldval, u32 newval)
40 {
41         int ret;
42         u32 val;
43
44         if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
45                 return -EFAULT;
46
47         smp_mb();
48         __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
49         "1:     ldrex   %1, [%4]\n"
50         "       teq     %1, %2\n"
51         "       ite     eq      @ explicit IT needed for the 2b label\n"
52         "2:     strexeq %0, %3, [%4]\n"
53         "       movne   %0, #0\n"
54         "       teq     %0, #0\n"
55         "       bne     1b\n"
56         __futex_atomic_ex_table("%5")
57         : "=&r" (ret), "=&r" (val)
58         : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
59         : "cc", "memory");
60         smp_mb();
61
62         *uval = val;
63         return ret;
64 }
65
66 #else /* !SMP, we can work around lack of atomic ops by disabling preemption */
67
68 #include <linux/preempt.h>
69 #include <asm/domain.h>
70
71 #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
72         __asm__ __volatile__(                                   \
73         "1:     " T(ldr) "      %1, [%3]\n"                     \
74         "       " insn "\n"                                     \
75         "2:     " T(str) "      %0, [%3]\n"                     \
76         "       mov     %0, #0\n"                               \
77         __futex_atomic_ex_table("%5")                           \
78         : "=&r" (ret), "=&r" (oldval), "=&r" (tmp)              \
79         : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)              \
80         : "cc", "memory")
81
82 static inline int
83 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
84                               u32 oldval, u32 newval)
85 {
86         int ret = 0;
87         u32 val;
88
89         if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
90                 return -EFAULT;
91
92         __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
93         "1:     " T(ldr) "      %1, [%4]\n"
94         "       teq     %1, %2\n"
95         "       it      eq      @ explicit IT needed for the 2b label\n"
96         "2:     " T(streq) "    %3, [%4]\n"
97         __futex_atomic_ex_table("%5")
98         : "+r" (ret), "=&r" (val)
99         : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
100         : "cc", "memory");
101
102         *uval = val;
103         return ret;
104 }
105
106 #endif /* !SMP */
107
108 static inline int
109 futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
110 {
111         int op = (encoded_op >> 28) & 7;
112         int cmp = (encoded_op >> 24) & 15;
113         int oparg = (encoded_op << 8) >> 20;
114         int cmparg = (encoded_op << 20) >> 20;
115         int oldval = 0, ret, tmp;
116
117         if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
118                 oparg = 1 << oparg;
119
120         if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
121                 return -EFAULT;
122
123         pagefault_disable();    /* implies preempt_disable() */
124
125         switch (op) {
126         case FUTEX_OP_SET:
127                 __futex_atomic_op("mov  %0, %4", ret, oldval, tmp, uaddr, oparg);
128                 break;
129         case FUTEX_OP_ADD:
130                 __futex_atomic_op("add  %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
131                 break;
132         case FUTEX_OP_OR:
133                 __futex_atomic_op("orr  %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
134                 break;
135         case FUTEX_OP_ANDN:
136                 __futex_atomic_op("and  %0, %1, %4", ret, oldval, tmp, uaddr, ~oparg);
137                 break;
138         case FUTEX_OP_XOR:
139                 __futex_atomic_op("eor  %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
140                 break;
141         default:
142                 ret = -ENOSYS;
143         }
144
145         pagefault_enable();     /* subsumes preempt_enable() */
146
147         if (!ret) {
148                 switch (cmp) {
149                 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
150                 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
151                 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
152                 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
153                 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
154                 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
155                 default: ret = -ENOSYS;
156                 }
157         }
158         return ret;
159 }
160
161 #endif /* __KERNEL__ */
162 #endif /* _ASM_ARM_FUTEX_H */