mm: hugetlb: fix hugepage memory leak caused by wrong reserve count
[linux/fpc-iii.git] / arch / arm / lib / bitops.h
blob7d807cfd8ef57ed2bdde29d98ddb6a7094f725f7
1 #include <asm/assembler.h>
2 #include <asm/unwind.h>
4 #if __LINUX_ARM_ARCH__ >= 6
5 .macro bitop, name, instr
6 ENTRY( \name )
7 UNWIND( .fnstart )
8 ands ip, r1, #3
9 strneb r1, [ip] @ assert word-aligned
10 mov r2, #1
11 and r3, r0, #31 @ Get bit offset
12 mov r0, r0, lsr #5
13 add r1, r1, r0, lsl #2 @ Get word offset
14 #if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
15 .arch_extension mp
16 ALT_SMP(W(pldw) [r1])
17 ALT_UP(W(nop))
18 #endif
19 mov r3, r2, lsl r3
20 1: ldrex r2, [r1]
21 \instr r2, r2, r3
22 strex r0, r2, [r1]
23 cmp r0, #0
24 bne 1b
25 bx lr
26 UNWIND( .fnend )
27 ENDPROC(\name )
28 .endm
30 .macro testop, name, instr, store
31 ENTRY( \name )
32 UNWIND( .fnstart )
33 ands ip, r1, #3
34 strneb r1, [ip] @ assert word-aligned
35 mov r2, #1
36 and r3, r0, #31 @ Get bit offset
37 mov r0, r0, lsr #5
38 add r1, r1, r0, lsl #2 @ Get word offset
39 mov r3, r2, lsl r3 @ create mask
40 smp_dmb
41 #if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
42 .arch_extension mp
43 ALT_SMP(W(pldw) [r1])
44 ALT_UP(W(nop))
45 #endif
46 1: ldrex r2, [r1]
47 ands r0, r2, r3 @ save old value of bit
48 \instr r2, r2, r3 @ toggle bit
49 strex ip, r2, [r1]
50 cmp ip, #0
51 bne 1b
52 smp_dmb
53 cmp r0, #0
54 movne r0, #1
55 2: bx lr
56 UNWIND( .fnend )
57 ENDPROC(\name )
58 .endm
59 #else
60 .macro bitop, name, instr
61 ENTRY( \name )
62 UNWIND( .fnstart )
63 ands ip, r1, #3
64 strneb r1, [ip] @ assert word-aligned
65 and r2, r0, #31
66 mov r0, r0, lsr #5
67 mov r3, #1
68 mov r3, r3, lsl r2
69 save_and_disable_irqs ip
70 ldr r2, [r1, r0, lsl #2]
71 \instr r2, r2, r3
72 str r2, [r1, r0, lsl #2]
73 restore_irqs ip
74 ret lr
75 UNWIND( .fnend )
76 ENDPROC(\name )
77 .endm
79 /**
80 * testop - implement a test_and_xxx_bit operation.
81 * @instr: operational instruction
82 * @store: store instruction
84 * Note: we can trivially conditionalise the store instruction
85 * to avoid dirtying the data cache.
87 .macro testop, name, instr, store
88 ENTRY( \name )
89 UNWIND( .fnstart )
90 ands ip, r1, #3
91 strneb r1, [ip] @ assert word-aligned
92 and r3, r0, #31
93 mov r0, r0, lsr #5
94 save_and_disable_irqs ip
95 ldr r2, [r1, r0, lsl #2]!
96 mov r0, #1
97 tst r2, r0, lsl r3
98 \instr r2, r2, r0, lsl r3
99 \store r2, [r1]
100 moveq r0, #0
101 restore_irqs ip
102 ret lr
103 UNWIND( .fnend )
104 ENDPROC(\name )
105 .endm
106 #endif