Merge branch 'for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/viro/vfs
[cascardo/linux.git] / arch / x86 / include / asm / futex.h
1 #ifndef _ASM_X86_FUTEX_H
2 #define _ASM_X86_FUTEX_H
3
4 #ifdef __KERNEL__
5
6 #include <linux/futex.h>
7 #include <linux/uaccess.h>
8
9 #include <asm/asm.h>
10 #include <asm/errno.h>
11 #include <asm/processor.h>
12 #include <asm/smap.h>
13
14 #define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg)     \
15         asm volatile("\t" ASM_STAC "\n"                         \
16                      "1:\t" insn "\n"                           \
17                      "2:\t" ASM_CLAC "\n"                       \
18                      "\t.section .fixup,\"ax\"\n"               \
19                      "3:\tmov\t%3, %1\n"                        \
20                      "\tjmp\t2b\n"                              \
21                      "\t.previous\n"                            \
22                      _ASM_EXTABLE(1b, 3b)                       \
23                      : "=r" (oldval), "=r" (ret), "+m" (*uaddr) \
24                      : "i" (-EFAULT), "0" (oparg), "1" (0))
25
26 #define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg)     \
27         asm volatile("\t" ASM_STAC "\n"                         \
28                      "1:\tmovl  %2, %0\n"                       \
29                      "\tmovl\t%0, %3\n"                         \
30                      "\t" insn "\n"                             \
31                      "2:\t" LOCK_PREFIX "cmpxchgl %3, %2\n"     \
32                      "\tjnz\t1b\n"                              \
33                      "3:\t" ASM_CLAC "\n"                       \
34                      "\t.section .fixup,\"ax\"\n"               \
35                      "4:\tmov\t%5, %1\n"                        \
36                      "\tjmp\t3b\n"                              \
37                      "\t.previous\n"                            \
38                      _ASM_EXTABLE(1b, 4b)                       \
39                      _ASM_EXTABLE(2b, 4b)                       \
40                      : "=&a" (oldval), "=&r" (ret),             \
41                        "+m" (*uaddr), "=&r" (tem)               \
42                      : "r" (oparg), "i" (-EFAULT), "1" (0))
43
44 static inline int futex_atomic_op_inuser(int encoded_op, u32 __user *uaddr)
45 {
46         int op = (encoded_op >> 28) & 7;
47         int cmp = (encoded_op >> 24) & 15;
48         int oparg = (encoded_op << 8) >> 20;
49         int cmparg = (encoded_op << 20) >> 20;
50         int oldval = 0, ret, tem;
51
52         if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
53                 oparg = 1 << oparg;
54
55         if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
56                 return -EFAULT;
57
58 #if defined(CONFIG_X86_32) && !defined(CONFIG_X86_BSWAP)
59         /* Real i386 machines can only support FUTEX_OP_SET */
60         if (op != FUTEX_OP_SET && boot_cpu_data.x86 == 3)
61                 return -ENOSYS;
62 #endif
63
64         pagefault_disable();
65
66         switch (op) {
67         case FUTEX_OP_SET:
68                 __futex_atomic_op1("xchgl %0, %2", ret, oldval, uaddr, oparg);
69                 break;
70         case FUTEX_OP_ADD:
71                 __futex_atomic_op1(LOCK_PREFIX "xaddl %0, %2", ret, oldval,
72                                    uaddr, oparg);
73                 break;
74         case FUTEX_OP_OR:
75                 __futex_atomic_op2("orl %4, %3", ret, oldval, uaddr, oparg);
76                 break;
77         case FUTEX_OP_ANDN:
78                 __futex_atomic_op2("andl %4, %3", ret, oldval, uaddr, ~oparg);
79                 break;
80         case FUTEX_OP_XOR:
81                 __futex_atomic_op2("xorl %4, %3", ret, oldval, uaddr, oparg);
82                 break;
83         default:
84                 ret = -ENOSYS;
85         }
86
87         pagefault_enable();
88
89         if (!ret) {
90                 switch (cmp) {
91                 case FUTEX_OP_CMP_EQ:
92                         ret = (oldval == cmparg);
93                         break;
94                 case FUTEX_OP_CMP_NE:
95                         ret = (oldval != cmparg);
96                         break;
97                 case FUTEX_OP_CMP_LT:
98                         ret = (oldval < cmparg);
99                         break;
100                 case FUTEX_OP_CMP_GE:
101                         ret = (oldval >= cmparg);
102                         break;
103                 case FUTEX_OP_CMP_LE:
104                         ret = (oldval <= cmparg);
105                         break;
106                 case FUTEX_OP_CMP_GT:
107                         ret = (oldval > cmparg);
108                         break;
109                 default:
110                         ret = -ENOSYS;
111                 }
112         }
113         return ret;
114 }
115
116 static inline int futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
117                                                 u32 oldval, u32 newval)
118 {
119         int ret = 0;
120
121 #if defined(CONFIG_X86_32) && !defined(CONFIG_X86_BSWAP)
122         /* Real i386 machines have no cmpxchg instruction */
123         if (boot_cpu_data.x86 == 3)
124                 return -ENOSYS;
125 #endif
126
127         if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
128                 return -EFAULT;
129
130         asm volatile("\t" ASM_STAC "\n"
131                      "1:\t" LOCK_PREFIX "cmpxchgl %4, %2\n"
132                      "2:\t" ASM_CLAC "\n"
133                      "\t.section .fixup, \"ax\"\n"
134                      "3:\tmov     %3, %0\n"
135                      "\tjmp     2b\n"
136                      "\t.previous\n"
137                      _ASM_EXTABLE(1b, 3b)
138                      : "+r" (ret), "=a" (oldval), "+m" (*uaddr)
139                      : "i" (-EFAULT), "r" (newval), "1" (oldval)
140                      : "memory"
141         );
142
143         *uval = oldval;
144         return ret;
145 }
146
147 #endif
148 #endif /* _ASM_X86_FUTEX_H */