Merge remote-tracking branch 'iommu/next'
[deliverable/linux.git] / arch / arm / lib / bitops.h
1 #include <asm/assembler.h>
2 #include <asm/unwind.h>
3 #include <asm/export.h>
4
5 #if __LINUX_ARM_ARCH__ >= 6
6 .macro bitop, name, instr
7 ENTRY( \name )
8 UNWIND( .fnstart )
9 ands ip, r1, #3
10 strneb r1, [ip] @ assert word-aligned
11 mov r2, #1
12 and r3, r0, #31 @ Get bit offset
13 mov r0, r0, lsr #5
14 add r1, r1, r0, lsl #2 @ Get word offset
15 #if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
16 .arch_extension mp
17 ALT_SMP(W(pldw) [r1])
18 ALT_UP(W(nop))
19 #endif
20 mov r3, r2, lsl r3
21 1: ldrex r2, [r1]
22 \instr r2, r2, r3
23 strex r0, r2, [r1]
24 cmp r0, #0
25 bne 1b
26 bx lr
27 UNWIND( .fnend )
28 ENDPROC(\name )
29 EXPORT_SYMBOL(\name )
30 .endm
31
32 .macro testop, name, instr, store
33 ENTRY( \name )
34 UNWIND( .fnstart )
35 ands ip, r1, #3
36 strneb r1, [ip] @ assert word-aligned
37 mov r2, #1
38 and r3, r0, #31 @ Get bit offset
39 mov r0, r0, lsr #5
40 add r1, r1, r0, lsl #2 @ Get word offset
41 mov r3, r2, lsl r3 @ create mask
42 smp_dmb
43 #if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
44 .arch_extension mp
45 ALT_SMP(W(pldw) [r1])
46 ALT_UP(W(nop))
47 #endif
48 1: ldrex r2, [r1]
49 ands r0, r2, r3 @ save old value of bit
50 \instr r2, r2, r3 @ toggle bit
51 strex ip, r2, [r1]
52 cmp ip, #0
53 bne 1b
54 smp_dmb
55 cmp r0, #0
56 movne r0, #1
57 2: bx lr
58 UNWIND( .fnend )
59 ENDPROC(\name )
60 EXPORT_SYMBOL(\name )
61 .endm
62 #else
63 .macro bitop, name, instr
64 ENTRY( \name )
65 UNWIND( .fnstart )
66 ands ip, r1, #3
67 strneb r1, [ip] @ assert word-aligned
68 and r2, r0, #31
69 mov r0, r0, lsr #5
70 mov r3, #1
71 mov r3, r3, lsl r2
72 save_and_disable_irqs ip
73 ldr r2, [r1, r0, lsl #2]
74 \instr r2, r2, r3
75 str r2, [r1, r0, lsl #2]
76 restore_irqs ip
77 ret lr
78 UNWIND( .fnend )
79 ENDPROC(\name )
80 EXPORT_SYMBOL(\name )
81 .endm
82
83 /**
84 * testop - implement a test_and_xxx_bit operation.
85 * @instr: operational instruction
86 * @store: store instruction
87 *
88 * Note: we can trivially conditionalise the store instruction
89 * to avoid dirtying the data cache.
90 */
91 .macro testop, name, instr, store
92 ENTRY( \name )
93 UNWIND( .fnstart )
94 ands ip, r1, #3
95 strneb r1, [ip] @ assert word-aligned
96 and r3, r0, #31
97 mov r0, r0, lsr #5
98 save_and_disable_irqs ip
99 ldr r2, [r1, r0, lsl #2]!
100 mov r0, #1
101 tst r2, r0, lsl r3
102 \instr r2, r2, r0, lsl r3
103 \store r2, [r1]
104 moveq r0, #0
105 restore_irqs ip
106 ret lr
107 UNWIND( .fnend )
108 ENDPROC(\name )
109 EXPORT_SYMBOL(\name )
110 .endm
111 #endif
This page took 0.032558 seconds and 5 git commands to generate.