kallsyms: unify 32- and 64-bit code
[linux/fpc-iii.git] / include / asm-ia64 / futex.h
blobc7f0f062239cd541112ecbe10cdd34dc54672eec
1 #ifndef _ASM_FUTEX_H
2 #define _ASM_FUTEX_H
4 #include <linux/futex.h>
5 #include <linux/uaccess.h>
6 #include <asm/errno.h>
7 #include <asm/system.h>
9 #define __futex_atomic_op1(insn, ret, oldval, uaddr, oparg) \
10 do { \
11 register unsigned long r8 __asm ("r8") = 0; \
12 __asm__ __volatile__( \
13 " mf;; \n" \
14 "[1:] " insn ";; \n" \
15 " .xdata4 \"__ex_table\", 1b-., 2f-. \n" \
16 "[2:]" \
17 : "+r" (r8), "=r" (oldval) \
18 : "r" (uaddr), "r" (oparg) \
19 : "memory"); \
20 ret = r8; \
21 } while (0)
23 #define __futex_atomic_op2(insn, ret, oldval, uaddr, oparg) \
24 do { \
25 register unsigned long r8 __asm ("r8") = 0; \
26 int val, newval; \
27 do { \
28 __asm__ __volatile__( \
29 " mf;; \n" \
30 "[1:] ld4 %3=[%4];; \n" \
31 " mov %2=%3 \n" \
32 insn ";; \n" \
33 " mov ar.ccv=%2;; \n" \
34 "[2:] cmpxchg4.acq %1=[%4],%3,ar.ccv;; \n" \
35 " .xdata4 \"__ex_table\", 1b-., 3f-.\n" \
36 " .xdata4 \"__ex_table\", 2b-., 3f-.\n" \
37 "[3:]" \
38 : "+r" (r8), "=r" (val), "=&r" (oldval), \
39 "=&r" (newval) \
40 : "r" (uaddr), "r" (oparg) \
41 : "memory"); \
42 if (unlikely (r8)) \
43 break; \
44 } while (unlikely (val != oldval)); \
45 ret = r8; \
46 } while (0)
48 static inline int
49 futex_atomic_op_inuser (int encoded_op, int __user *uaddr)
51 int op = (encoded_op >> 28) & 7;
52 int cmp = (encoded_op >> 24) & 15;
53 int oparg = (encoded_op << 8) >> 20;
54 int cmparg = (encoded_op << 20) >> 20;
55 int oldval = 0, ret;
56 if (encoded_op & (FUTEX_OP_OPARG_SHIFT << 28))
57 oparg = 1 << oparg;
59 if (! access_ok (VERIFY_WRITE, uaddr, sizeof(int)))
60 return -EFAULT;
62 pagefault_disable();
64 switch (op) {
65 case FUTEX_OP_SET:
66 __futex_atomic_op1("xchg4 %1=[%2],%3", ret, oldval, uaddr,
67 oparg);
68 break;
69 case FUTEX_OP_ADD:
70 __futex_atomic_op2("add %3=%3,%5", ret, oldval, uaddr, oparg);
71 break;
72 case FUTEX_OP_OR:
73 __futex_atomic_op2("or %3=%3,%5", ret, oldval, uaddr, oparg);
74 break;
75 case FUTEX_OP_ANDN:
76 __futex_atomic_op2("and %3=%3,%5", ret, oldval, uaddr,
77 ~oparg);
78 break;
79 case FUTEX_OP_XOR:
80 __futex_atomic_op2("xor %3=%3,%5", ret, oldval, uaddr, oparg);
81 break;
82 default:
83 ret = -ENOSYS;
86 pagefault_enable();
88 if (!ret) {
89 switch (cmp) {
90 case FUTEX_OP_CMP_EQ: ret = (oldval == cmparg); break;
91 case FUTEX_OP_CMP_NE: ret = (oldval != cmparg); break;
92 case FUTEX_OP_CMP_LT: ret = (oldval < cmparg); break;
93 case FUTEX_OP_CMP_GE: ret = (oldval >= cmparg); break;
94 case FUTEX_OP_CMP_LE: ret = (oldval <= cmparg); break;
95 case FUTEX_OP_CMP_GT: ret = (oldval > cmparg); break;
96 default: ret = -ENOSYS;
99 return ret;
102 static inline int
103 futex_atomic_cmpxchg_inatomic(int __user *uaddr, int oldval, int newval)
105 if (!access_ok(VERIFY_WRITE, uaddr, sizeof(int)))
106 return -EFAULT;
109 register unsigned long r8 __asm ("r8");
110 __asm__ __volatile__(
111 " mf;; \n"
112 " mov ar.ccv=%3;; \n"
113 "[1:] cmpxchg4.acq %0=[%1],%2,ar.ccv \n"
114 " .xdata4 \"__ex_table\", 1b-., 2f-. \n"
115 "[2:]"
116 : "=r" (r8)
117 : "r" (uaddr), "r" (newval),
118 "rO" ((long) (unsigned) oldval)
119 : "memory");
120 return r8;
124 #endif /* _ASM_FUTEX_H */