Loading...
1#include <asm/unwind.h>
2
3#if __LINUX_ARM_ARCH__ >= 6
4 .macro bitop, name, instr
5ENTRY( \name )
6UNWIND( .fnstart )
7 ands ip, r1, #3
8 strneb r1, [ip] @ assert word-aligned
9 mov r2, #1
10 and r3, r0, #31 @ Get bit offset
11 mov r0, r0, lsr #5
12 add r1, r1, r0, lsl #2 @ Get word offset
13#if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
14 .arch_extension mp
15 ALT_SMP(W(pldw) [r1])
16 ALT_UP(W(nop))
17#endif
18 mov r3, r2, lsl r3
191: ldrex r2, [r1]
20 \instr r2, r2, r3
21 strex r0, r2, [r1]
22 cmp r0, #0
23 bne 1b
24 bx lr
25UNWIND( .fnend )
26ENDPROC(\name )
27 .endm
28
29 .macro testop, name, instr, store
30ENTRY( \name )
31UNWIND( .fnstart )
32 ands ip, r1, #3
33 strneb r1, [ip] @ assert word-aligned
34 mov r2, #1
35 and r3, r0, #31 @ Get bit offset
36 mov r0, r0, lsr #5
37 add r1, r1, r0, lsl #2 @ Get word offset
38 mov r3, r2, lsl r3 @ create mask
39 smp_dmb
40#if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
41 .arch_extension mp
42 ALT_SMP(W(pldw) [r1])
43 ALT_UP(W(nop))
44#endif
451: ldrex r2, [r1]
46 ands r0, r2, r3 @ save old value of bit
47 \instr r2, r2, r3 @ toggle bit
48 strex ip, r2, [r1]
49 cmp ip, #0
50 bne 1b
51 smp_dmb
52 cmp r0, #0
53 movne r0, #1
542: bx lr
55UNWIND( .fnend )
56ENDPROC(\name )
57 .endm
58#else
59 .macro bitop, name, instr
60ENTRY( \name )
61UNWIND( .fnstart )
62 ands ip, r1, #3
63 strneb r1, [ip] @ assert word-aligned
64 and r2, r0, #31
65 mov r0, r0, lsr #5
66 mov r3, #1
67 mov r3, r3, lsl r2
68 save_and_disable_irqs ip
69 ldr r2, [r1, r0, lsl #2]
70 \instr r2, r2, r3
71 str r2, [r1, r0, lsl #2]
72 restore_irqs ip
73 mov pc, lr
74UNWIND( .fnend )
75ENDPROC(\name )
76 .endm
77
78/**
79 * testop - implement a test_and_xxx_bit operation.
80 * @instr: operational instruction
81 * @store: store instruction
82 *
83 * Note: we can trivially conditionalise the store instruction
84 * to avoid dirtying the data cache.
85 */
86 .macro testop, name, instr, store
87ENTRY( \name )
88UNWIND( .fnstart )
89 ands ip, r1, #3
90 strneb r1, [ip] @ assert word-aligned
91 and r3, r0, #31
92 mov r0, r0, lsr #5
93 save_and_disable_irqs ip
94 ldr r2, [r1, r0, lsl #2]!
95 mov r0, #1
96 tst r2, r0, lsl r3
97 \instr r2, r2, r0, lsl r3
98 \store r2, [r1]
99 moveq r0, #0
100 restore_irqs ip
101 mov pc, lr
102UNWIND( .fnend )
103ENDPROC(\name )
104 .endm
105#endif
1#if __LINUX_ARM_ARCH__ >= 6
2 .macro bitop, instr
3 ands ip, r1, #3
4 strneb r1, [ip] @ assert word-aligned
5 mov r2, #1
6 and r3, r0, #31 @ Get bit offset
7 mov r0, r0, lsr #5
8 add r1, r1, r0, lsl #2 @ Get word offset
9 mov r3, r2, lsl r3
101: ldrex r2, [r1]
11 \instr r2, r2, r3
12 strex r0, r2, [r1]
13 cmp r0, #0
14 bne 1b
15 bx lr
16 .endm
17
18 .macro testop, instr, store
19 ands ip, r1, #3
20 strneb r1, [ip] @ assert word-aligned
21 mov r2, #1
22 and r3, r0, #31 @ Get bit offset
23 mov r0, r0, lsr #5
24 add r1, r1, r0, lsl #2 @ Get word offset
25 mov r3, r2, lsl r3 @ create mask
26 smp_dmb
271: ldrex r2, [r1]
28 ands r0, r2, r3 @ save old value of bit
29 \instr r2, r2, r3 @ toggle bit
30 strex ip, r2, [r1]
31 cmp ip, #0
32 bne 1b
33 smp_dmb
34 cmp r0, #0
35 movne r0, #1
362: bx lr
37 .endm
38#else
39 .macro bitop, instr
40 ands ip, r1, #3
41 strneb r1, [ip] @ assert word-aligned
42 and r2, r0, #31
43 mov r0, r0, lsr #5
44 mov r3, #1
45 mov r3, r3, lsl r2
46 save_and_disable_irqs ip
47 ldr r2, [r1, r0, lsl #2]
48 \instr r2, r2, r3
49 str r2, [r1, r0, lsl #2]
50 restore_irqs ip
51 mov pc, lr
52 .endm
53
54/**
55 * testop - implement a test_and_xxx_bit operation.
56 * @instr: operational instruction
57 * @store: store instruction
58 *
59 * Note: we can trivially conditionalise the store instruction
60 * to avoid dirtying the data cache.
61 */
62 .macro testop, instr, store
63 ands ip, r1, #3
64 strneb r1, [ip] @ assert word-aligned
65 and r3, r0, #31
66 mov r0, r0, lsr #5
67 save_and_disable_irqs ip
68 ldr r2, [r1, r0, lsl #2]!
69 mov r0, #1
70 tst r2, r0, lsl r3
71 \instr r2, r2, r0, lsl r3
72 \store r2, [r1]
73 moveq r0, #0
74 restore_irqs ip
75 mov pc, lr
76 .endm
77#endif