ARM: 7954/1: mm: remove remaining domain support from ARMv6
[linux.git] / arch / arm / include / asm / futex.h
1 #ifndef _ASM_ARM_FUTEX_H
2 #define _ASM_ARM_FUTEX_H
3
4 #ifdef __KERNEL__
5
6 #include <linux/futex.h>
7 #include <linux/uaccess.h>
8 #include <asm/errno.h>
9
10 #define __futex_atomic_ex_table(err_reg)                        \
11         "3:\n"                                                  \
12         "       .pushsection __ex_table,\"a\"\n"                \
13         "       .align  3\n"                                    \
14         "       .long   1b, 4f, 2b, 4f\n"                       \
15         "       .popsection\n"                                  \
16         "       .pushsection .fixup,\"ax\"\n"                   \
17         "       .align  2\n"                                    \
18         "4:     mov     %0, " err_reg "\n"                      \
19         "       b       3b\n"                                   \
20         "       .popsection"
21
22 #ifdef CONFIG_SMP
23
24 #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
25         smp_mb();                                               \
26         __asm__ __volatile__(                                   \
27         "1:     ldrex   %1, [%3]\n"                             \
28         "       " insn "\n"                                     \
29         "2:     strex   %2, %0, [%3]\n"                         \
30         "       teq     %2, #0\n"                               \
31         "       bne     1b\n"                                   \
32         "       mov     %0, #0\n"                               \
33         __futex_atomic_ex_table("%5")                           \
34         : "=&r" (ret), "=&r" (oldval), "=&r" (tmp)              \
35         : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)              \
36         : "cc", "memory")
37
38 static inline int
39 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
40                               u32 oldval, u32 newval)
41 {
42         int ret;
43         u32 val;
44
45         if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
46                 return -EFAULT;
47
48         smp_mb();
49         __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
50         "1:     ldrex   %1, [%4]\n"
51         "       teq     %1, %2\n"
52         "       ite     eq      @ explicit IT needed for the 2b label\n"
53         "2:     strexeq %0, %3, [%4]\n"
54         "       movne   %0, #0\n"
55         "       teq     %0, #0\n"
56         "       bne     1b\n"
57         __futex_atomic_ex_table("%5")
58         : "=&r" (ret), "=&r" (val)
59         : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
60         : "cc", "memory");
61         smp_mb();
62
63         *uval = val;
64         return ret;
65 }
66
67 #else /* !SMP, we can work around lack of atomic ops by disabling preemption */
68
69 #include <linux/preempt.h>
70 #include <asm/domain.h>
71
72 #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
73         __asm__ __volatile__(                                   \
74         "1:     " TUSER(ldr) "  %1, [%3]\n"                     \
75         "       " insn "\n"                                     \
76         "2:     " TUSER(str) "  %0, [%3]\n"                     \
77         "       mov     %0, #0\n"                               \
78         __futex_atomic_ex_table("%5")                           \
79         : "=&r" (ret), "=&r" (oldval), "=&r" (tmp)              \
80         : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)              \
81         : "cc", "memory")
82
83 static inline int
84 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
85                               u32 oldval, u32 newval)
86 {
87         int ret = 0;
88         u32 val;
89
90         if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
91                 return -EFAULT;
92
93         __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
94         "1:     " TUSER(ldr) "  %1, [%4]\n"
95         "       teq     %1, %2\n"
96         "       it      eq      @ explicit IT needed for the 2b label\n"
97         "2:     " TUSER(streq) "        %3, [%4]\n"
98         __futex_atomic_ex_table("%5")
99         : "+r" (ret), "=&r" (val)
100         : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
101         : "cc", "memory");
102
103         *uval = val;
104         return ret;
105 }
106
107 #endif /* !SMP */
108
109 static inline int
110 futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
111 {
112         int op = (encoded_op >> 28) & 7;
113         int cmp = (encoded_op >> 24) & 15;
114         int oparg = (encoded_op << 8) >> 20;
115         int cmparg = (encoded_op << 20) >> 20;
116         int oldval = 0, ret, tmp;
117
118         if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
119                 oparg = 1 << oparg;
120
121         if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
122                 return -EFAULT;
123
124         pagefault_disable();    /* implies preempt_disable() */
125
126         switch (op) {
127         case FUTEX_OP_SET:
128                 __futex_atomic_op("mov  %0, %4", ret, oldval, tmp, uaddr, oparg);
129                 break;
130         case FUTEX_OP_ADD:
131                 __futex_atomic_op("add  %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
132                 break;
133         case FUTEX_OP_OR:
134                 __futex_atomic_op("orr  %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
135                 break;
136         case FUTEX_OP_ANDN:
137                 __futex_atomic_op("and  %0, %1, %4", ret, oldval, tmp, uaddr, ~oparg);
138                 break;
139         case FUTEX_OP_XOR:
140                 __futex_atomic_op("eor  %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
141                 break;
142         default:
143                 ret = -ENOSYS;
144         }
145
146         pagefault_enable();     /* subsumes preempt_enable() */
147
148         if (!ret) {
149                 switch (cmp) {
150                 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
151                 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
152                 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
153                 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
154                 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
155                 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
156                 default: ret = -ENOSYS;
157                 }
158         }
159         return ret;
160 }
161
162 #endif /* __KERNEL__ */
163 #endif /* _ASM_ARM_FUTEX_H */