Merge branch 'bcmring/cleanup' into bcmring/removal
[linux.git] / arch / arm / include / asm / futex.h
1 #ifndef _ASM_ARM_FUTEX_H
2 #define _ASM_ARM_FUTEX_H
3
4 #ifdef __KERNEL__
5
6 #if defined(CONFIG_CPU_USE_DOMAINS) && defined(CONFIG_SMP)
7 /* ARM doesn't provide unprivileged exclusive memory accessors */
8 #include <asm-generic/futex.h>
9 #else
10
11 #include <linux/futex.h>
12 #include <linux/uaccess.h>
13 #include <asm/errno.h>
14
15 #define __futex_atomic_ex_table(err_reg)                        \
16         "3:\n"                                                  \
17         "       .pushsection __ex_table,\"a\"\n"                \
18         "       .align  3\n"                                    \
19         "       .long   1b, 4f, 2b, 4f\n"                       \
20         "       .popsection\n"                                  \
21         "       .pushsection .fixup,\"ax\"\n"                   \
22         "       .align  2\n"                                    \
23         "4:     mov     %0, " err_reg "\n"                      \
24         "       b       3b\n"                                   \
25         "       .popsection"
26
27 #ifdef CONFIG_SMP
28
29 #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
30         smp_mb();                                               \
31         __asm__ __volatile__(                                   \
32         "1:     ldrex   %1, [%3]\n"                             \
33         "       " insn "\n"                                     \
34         "2:     strex   %2, %0, [%3]\n"                         \
35         "       teq     %2, #0\n"                               \
36         "       bne     1b\n"                                   \
37         "       mov     %0, #0\n"                               \
38         __futex_atomic_ex_table("%5")                           \
39         : "=&r" (ret), "=&r" (oldval), "=&r" (tmp)              \
40         : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)              \
41         : "cc", "memory")
42
43 static inline int
44 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
45                               u32 oldval, u32 newval)
46 {
47         int ret;
48         u32 val;
49
50         if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
51                 return -EFAULT;
52
53         smp_mb();
54         __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
55         "1:     ldrex   %1, [%4]\n"
56         "       teq     %1, %2\n"
57         "       ite     eq      @ explicit IT needed for the 2b label\n"
58         "2:     strexeq %0, %3, [%4]\n"
59         "       movne   %0, #0\n"
60         "       teq     %0, #0\n"
61         "       bne     1b\n"
62         __futex_atomic_ex_table("%5")
63         : "=&r" (ret), "=&r" (val)
64         : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
65         : "cc", "memory");
66         smp_mb();
67
68         *uval = val;
69         return ret;
70 }
71
72 #else /* !SMP, we can work around lack of atomic ops by disabling preemption */
73
74 #include <linux/preempt.h>
75 #include <asm/domain.h>
76
77 #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
78         __asm__ __volatile__(                                   \
79         "1:     " TUSER(ldr) "  %1, [%3]\n"                     \
80         "       " insn "\n"                                     \
81         "2:     " TUSER(str) "  %0, [%3]\n"                     \
82         "       mov     %0, #0\n"                               \
83         __futex_atomic_ex_table("%5")                           \
84         : "=&r" (ret), "=&r" (oldval), "=&r" (tmp)              \
85         : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)              \
86         : "cc", "memory")
87
88 static inline int
89 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
90                               u32 oldval, u32 newval)
91 {
92         int ret = 0;
93         u32 val;
94
95         if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
96                 return -EFAULT;
97
98         __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
99         "1:     " TUSER(ldr) "  %1, [%4]\n"
100         "       teq     %1, %2\n"
101         "       it      eq      @ explicit IT needed for the 2b label\n"
102         "2:     " TUSER(streq) "        %3, [%4]\n"
103         __futex_atomic_ex_table("%5")
104         : "+r" (ret), "=&r" (val)
105         : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
106         : "cc", "memory");
107
108         *uval = val;
109         return ret;
110 }
111
112 #endif /* !SMP */
113
114 static inline int
115 futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
116 {
117         int op = (encoded_op >> 28) & 7;
118         int cmp = (encoded_op >> 24) & 15;
119         int oparg = (encoded_op << 8) >> 20;
120         int cmparg = (encoded_op << 20) >> 20;
121         int oldval = 0, ret, tmp;
122
123         if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
124                 oparg = 1 << oparg;
125
126         if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
127                 return -EFAULT;
128
129         pagefault_disable();    /* implies preempt_disable() */
130
131         switch (op) {
132         case FUTEX_OP_SET:
133                 __futex_atomic_op("mov  %0, %4", ret, oldval, tmp, uaddr, oparg);
134                 break;
135         case FUTEX_OP_ADD:
136                 __futex_atomic_op("add  %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
137                 break;
138         case FUTEX_OP_OR:
139                 __futex_atomic_op("orr  %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
140                 break;
141         case FUTEX_OP_ANDN:
142                 __futex_atomic_op("and  %0, %1, %4", ret, oldval, tmp, uaddr, ~oparg);
143                 break;
144         case FUTEX_OP_XOR:
145                 __futex_atomic_op("eor  %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
146                 break;
147         default:
148                 ret = -ENOSYS;
149         }
150
151         pagefault_enable();     /* subsumes preempt_enable() */
152
153         if (!ret) {
154                 switch (cmp) {
155                 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
156                 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
157                 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
158                 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
159                 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
160                 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
161                 default: ret = -ENOSYS;
162                 }
163         }
164         return ret;
165 }
166
167 #endif /* !(CPU_USE_DOMAINS && SMP) */
168 #endif /* __KERNEL__ */
169 #endif /* _ASM_ARM_FUTEX_H */