Merge remote-tracking branches 'regulator/fix/88pm800', 'regulator/fix/max8973',...
[linux-drm-fsl-dcu.git] / arch / arm / include / asm / futex.h
1 #ifndef _ASM_ARM_FUTEX_H
2 #define _ASM_ARM_FUTEX_H
3
4 #ifdef __KERNEL__
5
6 #include <linux/futex.h>
7 #include <linux/uaccess.h>
8 #include <asm/errno.h>
9
10 #define __futex_atomic_ex_table(err_reg)                        \
11         "3:\n"                                                  \
12         "       .pushsection __ex_table,\"a\"\n"                \
13         "       .align  3\n"                                    \
14         "       .long   1b, 4f, 2b, 4f\n"                       \
15         "       .popsection\n"                                  \
16         "       .pushsection .text.fixup,\"ax\"\n"              \
17         "       .align  2\n"                                    \
18         "4:     mov     %0, " err_reg "\n"                      \
19         "       b       3b\n"                                   \
20         "       .popsection"
21
22 #ifdef CONFIG_SMP
23
24 #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
25         smp_mb();                                               \
26         prefetchw(uaddr);                                       \
27         __asm__ __volatile__(                                   \
28         "1:     ldrex   %1, [%3]\n"                             \
29         "       " insn "\n"                                     \
30         "2:     strex   %2, %0, [%3]\n"                         \
31         "       teq     %2, #0\n"                               \
32         "       bne     1b\n"                                   \
33         "       mov     %0, #0\n"                               \
34         __futex_atomic_ex_table("%5")                           \
35         : "=&r" (ret), "=&r" (oldval), "=&r" (tmp)              \
36         : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)              \
37         : "cc", "memory")
38
39 static inline int
40 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
41                               u32 oldval, u32 newval)
42 {
43         int ret;
44         u32 val;
45
46         if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
47                 return -EFAULT;
48
49         smp_mb();
50         /* Prefetching cannot fault */
51         prefetchw(uaddr);
52         __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
53         "1:     ldrex   %1, [%4]\n"
54         "       teq     %1, %2\n"
55         "       ite     eq      @ explicit IT needed for the 2b label\n"
56         "2:     strexeq %0, %3, [%4]\n"
57         "       movne   %0, #0\n"
58         "       teq     %0, #0\n"
59         "       bne     1b\n"
60         __futex_atomic_ex_table("%5")
61         : "=&r" (ret), "=&r" (val)
62         : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
63         : "cc", "memory");
64         smp_mb();
65
66         *uval = val;
67         return ret;
68 }
69
70 #else /* !SMP, we can work around lack of atomic ops by disabling preemption */
71
72 #include <linux/preempt.h>
73 #include <asm/domain.h>
74
75 #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
76         __asm__ __volatile__(                                   \
77         "1:     " TUSER(ldr) "  %1, [%3]\n"                     \
78         "       " insn "\n"                                     \
79         "2:     " TUSER(str) "  %0, [%3]\n"                     \
80         "       mov     %0, #0\n"                               \
81         __futex_atomic_ex_table("%5")                           \
82         : "=&r" (ret), "=&r" (oldval), "=&r" (tmp)              \
83         : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT)              \
84         : "cc", "memory")
85
86 static inline int
87 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
88                               u32 oldval, u32 newval)
89 {
90         int ret = 0;
91         u32 val;
92
93         if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
94                 return -EFAULT;
95
96         preempt_disable();
97         __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
98         "1:     " TUSER(ldr) "  %1, [%4]\n"
99         "       teq     %1, %2\n"
100         "       it      eq      @ explicit IT needed for the 2b label\n"
101         "2:     " TUSER(streq) "        %3, [%4]\n"
102         __futex_atomic_ex_table("%5")
103         : "+r" (ret), "=&r" (val)
104         : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
105         : "cc", "memory");
106
107         *uval = val;
108         preempt_enable();
109
110         return ret;
111 }
112
113 #endif /* !SMP */
114
115 static inline int
116 futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
117 {
118         int op = (encoded_op >> 28) & 7;
119         int cmp = (encoded_op >> 24) & 15;
120         int oparg = (encoded_op << 8) >> 20;
121         int cmparg = (encoded_op << 20) >> 20;
122         int oldval = 0, ret, tmp;
123
124         if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
125                 oparg = 1 << oparg;
126
127         if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
128                 return -EFAULT;
129
130 #ifndef CONFIG_SMP
131         preempt_disable();
132 #endif
133         pagefault_disable();
134
135         switch (op) {
136         case FUTEX_OP_SET:
137                 __futex_atomic_op("mov  %0, %4", ret, oldval, tmp, uaddr, oparg);
138                 break;
139         case FUTEX_OP_ADD:
140                 __futex_atomic_op("add  %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
141                 break;
142         case FUTEX_OP_OR:
143                 __futex_atomic_op("orr  %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
144                 break;
145         case FUTEX_OP_ANDN:
146                 __futex_atomic_op("and  %0, %1, %4", ret, oldval, tmp, uaddr, ~oparg);
147                 break;
148         case FUTEX_OP_XOR:
149                 __futex_atomic_op("eor  %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
150                 break;
151         default:
152                 ret = -ENOSYS;
153         }
154
155         pagefault_enable();
156 #ifndef CONFIG_SMP
157         preempt_enable();
158 #endif
159
160         if (!ret) {
161                 switch (cmp) {
162                 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
163                 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
164                 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
165                 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
166                 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
167                 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
168                 default: ret = -ENOSYS;
169                 }
170         }
171         return ret;
172 }
173
174 #endif /* __KERNEL__ */
175 #endif /* _ASM_ARM_FUTEX_H */