Loading...
1#if __LINUX_ARM_ARCH__ >= 6
2 .macro bitop, instr
3 ands ip, r1, #3
4 strneb r1, [ip] @ assert word-aligned
5 mov r2, #1
6 and r3, r0, #31 @ Get bit offset
7 mov r0, r0, lsr #5
8 add r1, r1, r0, lsl #2 @ Get word offset
9 mov r3, r2, lsl r3
101: ldrex r2, [r1]
11 \instr r2, r2, r3
12 strex r0, r2, [r1]
13 cmp r0, #0
14 bne 1b
15 bx lr
16 .endm
17
18 .macro testop, instr, store
19 ands ip, r1, #3
20 strneb r1, [ip] @ assert word-aligned
21 mov r2, #1
22 and r3, r0, #31 @ Get bit offset
23 mov r0, r0, lsr #5
24 add r1, r1, r0, lsl #2 @ Get word offset
25 mov r3, r2, lsl r3 @ create mask
26 smp_dmb
271: ldrex r2, [r1]
28 ands r0, r2, r3 @ save old value of bit
29 \instr r2, r2, r3 @ toggle bit
30 strex ip, r2, [r1]
31 cmp ip, #0
32 bne 1b
33 smp_dmb
34 cmp r0, #0
35 movne r0, #1
362: bx lr
37 .endm
38#else
39 .macro bitop, instr
40 ands ip, r1, #3
41 strneb r1, [ip] @ assert word-aligned
42 and r2, r0, #31
43 mov r0, r0, lsr #5
44 mov r3, #1
45 mov r3, r3, lsl r2
46 save_and_disable_irqs ip
47 ldr r2, [r1, r0, lsl #2]
48 \instr r2, r2, r3
49 str r2, [r1, r0, lsl #2]
50 restore_irqs ip
51 mov pc, lr
52 .endm
53
54/**
55 * testop - implement a test_and_xxx_bit operation.
56 * @instr: operational instruction
57 * @store: store instruction
58 *
59 * Note: we can trivially conditionalise the store instruction
60 * to avoid dirtying the data cache.
61 */
62 .macro testop, instr, store
63 ands ip, r1, #3
64 strneb r1, [ip] @ assert word-aligned
65 and r3, r0, #31
66 mov r0, r0, lsr #5
67 save_and_disable_irqs ip
68 ldr r2, [r1, r0, lsl #2]!
69 mov r0, #1
70 tst r2, r0, lsl r3
71 \instr r2, r2, r0, lsl r3
72 \store r2, [r1]
73 moveq r0, #0
74 restore_irqs ip
75 mov pc, lr
76 .endm
77#endif
1#include <asm/unwind.h>
2
3#if __LINUX_ARM_ARCH__ >= 6
4 .macro bitop, name, instr
5ENTRY( \name )
6UNWIND( .fnstart )
7 ands ip, r1, #3
8 strneb r1, [ip] @ assert word-aligned
9 mov r2, #1
10 and r3, r0, #31 @ Get bit offset
11 mov r0, r0, lsr #5
12 add r1, r1, r0, lsl #2 @ Get word offset
13 mov r3, r2, lsl r3
141: ldrex r2, [r1]
15 \instr r2, r2, r3
16 strex r0, r2, [r1]
17 cmp r0, #0
18 bne 1b
19 bx lr
20UNWIND( .fnend )
21ENDPROC(\name )
22 .endm
23
24 .macro testop, name, instr, store
25ENTRY( \name )
26UNWIND( .fnstart )
27 ands ip, r1, #3
28 strneb r1, [ip] @ assert word-aligned
29 mov r2, #1
30 and r3, r0, #31 @ Get bit offset
31 mov r0, r0, lsr #5
32 add r1, r1, r0, lsl #2 @ Get word offset
33 mov r3, r2, lsl r3 @ create mask
34 smp_dmb
351: ldrex r2, [r1]
36 ands r0, r2, r3 @ save old value of bit
37 \instr r2, r2, r3 @ toggle bit
38 strex ip, r2, [r1]
39 cmp ip, #0
40 bne 1b
41 smp_dmb
42 cmp r0, #0
43 movne r0, #1
442: bx lr
45UNWIND( .fnend )
46ENDPROC(\name )
47 .endm
48#else
49 .macro bitop, name, instr
50ENTRY( \name )
51UNWIND( .fnstart )
52 ands ip, r1, #3
53 strneb r1, [ip] @ assert word-aligned
54 and r2, r0, #31
55 mov r0, r0, lsr #5
56 mov r3, #1
57 mov r3, r3, lsl r2
58 save_and_disable_irqs ip
59 ldr r2, [r1, r0, lsl #2]
60 \instr r2, r2, r3
61 str r2, [r1, r0, lsl #2]
62 restore_irqs ip
63 mov pc, lr
64UNWIND( .fnend )
65ENDPROC(\name )
66 .endm
67
68/**
69 * testop - implement a test_and_xxx_bit operation.
70 * @instr: operational instruction
71 * @store: store instruction
72 *
73 * Note: we can trivially conditionalise the store instruction
74 * to avoid dirtying the data cache.
75 */
76 .macro testop, name, instr, store
77ENTRY( \name )
78UNWIND( .fnstart )
79 ands ip, r1, #3
80 strneb r1, [ip] @ assert word-aligned
81 and r3, r0, #31
82 mov r0, r0, lsr #5
83 save_and_disable_irqs ip
84 ldr r2, [r1, r0, lsl #2]!
85 mov r0, #1
86 tst r2, r0, lsl r3
87 \instr r2, r2, r0, lsl r3
88 \store r2, [r1]
89 moveq r0, #0
90 restore_irqs ip
91 mov pc, lr
92UNWIND( .fnend )
93ENDPROC(\name )
94 .endm
95#endif