Add the rt linux 4.1.3-rt3 as base
[kvmfornfv.git] / kernel / arch / ia64 / include / asm / futex.h
1 #ifndef _ASM_FUTEX_H
2 #define _ASM_FUTEX_H
3
4 #include <linux/futex.h>
5 #include <linux/uaccess.h>
6 #include <asm/errno.h>
7
8 #define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \
9 do {                                                                    \
10         register unsigned long r8 __asm ("r8") = 0;                     \
11         __asm__ __volatile__(                                           \
12                 "       mf;;                                    \n"     \
13                 "[1:] " insn ";;                                \n"     \
14                 "       .xdata4 \"__ex_table\", 1b-., 2f-.      \n"     \
15                 "[2:]"                                                  \
16                 : "+r" (r8), "=r" (oldval)                              \
17                 : "r" (uaddr), "r" (oparg)                              \
18                 : "memory");                                            \
19         ret = r8;                                                       \
20 } while (0)
21
22 #define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \
23 do {                                                                    \
24         register unsigned long r8 __asm ("r8") = 0;                     \
25         int val, newval;                                                \
26         do {                                                            \
27                 __asm__ __volatile__(                                   \
28                         "       mf;;                              \n"   \
29                         "[1:]   ld4 %3=[%4];;                     \n"   \
30                         "       mov %2=%3                         \n"   \
31                                 insn    ";;                       \n"   \
32                         "       mov ar.ccv=%2;;                   \n"   \
33                         "[2:]   cmpxchg4.acq %1=[%4],%3,ar.ccv;;  \n"   \
34                         "       .xdata4 \"__ex_table\", 1b-., 3f-.\n"   \
35                         "       .xdata4 \"__ex_table\", 2b-., 3f-.\n"   \
36                         "[3:]"                                          \
37                         : "+r" (r8), "=r" (val), "=&r" (oldval),        \
38                            "=&r" (newval)                               \
39                         : "r" (uaddr), "r" (oparg)                      \
40                         : "memory");                                    \
41                 if (unlikely (r8))                                      \
42                         break;                                          \
43         } while (unlikely (val != oldval));                             \
44         ret = r8;                                                       \
45 } while (0)
46
47 static inline int
48 futex_atomic_op_inuser (int encoded_op, u32 __user *uaddr)
49 {
50         int op = (encoded_op >> 28) & 7;
51         int cmp = (encoded_op >> 24) & 15;
52         int oparg = (encoded_op << 8) >> 20;
53         int cmparg = (encoded_op << 20) >> 20;
54         int oldval = 0, ret;
55         if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
56                 oparg = 1 << oparg;
57
58         if (! access_ok (VERIFY_WRITE, uaddr, sizeof(u32)))
59                 return -EFAULT;
60
61         pagefault_disable();
62
63         switch (op) {
64         case FUTEX_OP_SET:
65                 __futex_atomic_op1("xchg4 %1=[%2],%3", ret, oldval, uaddr,
66                                    oparg);
67                 break;
68         case FUTEX_OP_ADD:
69                 __futex_atomic_op2("add %3=%3,%5", ret, oldval, uaddr, oparg);
70                 break;
71         case FUTEX_OP_OR:
72                 __futex_atomic_op2("or %3=%3,%5", ret, oldval, uaddr, oparg);
73                 break;
74         case FUTEX_OP_ANDN:
75                 __futex_atomic_op2("and %3=%3,%5", ret, oldval, uaddr,
76                                    ~oparg);
77                 break;
78         case FUTEX_OP_XOR:
79                 __futex_atomic_op2("xor %3=%3,%5", ret, oldval, uaddr, oparg);
80                 break;
81         default:
82                 ret = -ENOSYS;
83         }
84
85         pagefault_enable();
86
87         if (!ret) {
88                 switch (cmp) {
89                 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
90                 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
91                 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
92                 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
93                 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
94                 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
95                 default: ret = -ENOSYS;
96                 }
97         }
98         return ret;
99 }
100
101 static inline int
102 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
103                               u32 oldval, u32 newval)
104 {
105         if (!access_ok(VERIFY_WRITE, uaddr, sizeof(u32)))
106                 return -EFAULT;
107
108         {
109                 register unsigned long r8 __asm ("r8") = 0;
110                 unsigned long prev;
111                 __asm__ __volatile__(
112                         "       mf;;                                    \n"
113                         "       mov ar.ccv=%4;;                         \n"
114                         "[1:]   cmpxchg4.acq %1=[%2],%3,ar.ccv          \n"
115                         "       .xdata4 \"__ex_table\", 1b-., 2f-.      \n"
116                         "[2:]"
117                         : "+r" (r8), "=&r" (prev)
118                         : "r" (uaddr), "r" (newval),
119                           "rO" ((long) (unsigned) oldval)
120                         : "memory");
121                 *uval = prev;
122                 return r8;
123         }
124 }
125
126 #endif /* _ASM_FUTEX_H */