Loading...
1#include <asm/unwind.h>
2
3#if __LINUX_ARM_ARCH__ >= 6
4 .macro bitop, name, instr
5ENTRY( \name )
6UNWIND( .fnstart )
7 ands ip, r1, #3
8 strneb r1, [ip] @ assert word-aligned
9 mov r2, #1
10 and r3, r0, #31 @ Get bit offset
11 mov r0, r0, lsr #5
12 add r1, r1, r0, lsl #2 @ Get word offset
13#if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
14 .arch_extension mp
15 ALT_SMP(W(pldw) [r1])
16 ALT_UP(W(nop))
17#endif
18 mov r3, r2, lsl r3
191: ldrex r2, [r1]
20 \instr r2, r2, r3
21 strex r0, r2, [r1]
22 cmp r0, #0
23 bne 1b
24 bx lr
25UNWIND( .fnend )
26ENDPROC(\name )
27 .endm
28
29 .macro testop, name, instr, store
30ENTRY( \name )
31UNWIND( .fnstart )
32 ands ip, r1, #3
33 strneb r1, [ip] @ assert word-aligned
34 mov r2, #1
35 and r3, r0, #31 @ Get bit offset
36 mov r0, r0, lsr #5
37 add r1, r1, r0, lsl #2 @ Get word offset
38 mov r3, r2, lsl r3 @ create mask
39 smp_dmb
40#if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
41 .arch_extension mp
42 ALT_SMP(W(pldw) [r1])
43 ALT_UP(W(nop))
44#endif
451: ldrex r2, [r1]
46 ands r0, r2, r3 @ save old value of bit
47 \instr r2, r2, r3 @ toggle bit
48 strex ip, r2, [r1]
49 cmp ip, #0
50 bne 1b
51 smp_dmb
52 cmp r0, #0
53 movne r0, #1
542: bx lr
55UNWIND( .fnend )
56ENDPROC(\name )
57 .endm
58#else
59 .macro bitop, name, instr
60ENTRY( \name )
61UNWIND( .fnstart )
62 ands ip, r1, #3
63 strneb r1, [ip] @ assert word-aligned
64 and r2, r0, #31
65 mov r0, r0, lsr #5
66 mov r3, #1
67 mov r3, r3, lsl r2
68 save_and_disable_irqs ip
69 ldr r2, [r1, r0, lsl #2]
70 \instr r2, r2, r3
71 str r2, [r1, r0, lsl #2]
72 restore_irqs ip
73 mov pc, lr
74UNWIND( .fnend )
75ENDPROC(\name )
76 .endm
77
78/**
79 * testop - implement a test_and_xxx_bit operation.
80 * @instr: operational instruction
81 * @store: store instruction
82 *
83 * Note: we can trivially conditionalise the store instruction
84 * to avoid dirtying the data cache.
85 */
86 .macro testop, name, instr, store
87ENTRY( \name )
88UNWIND( .fnstart )
89 ands ip, r1, #3
90 strneb r1, [ip] @ assert word-aligned
91 and r3, r0, #31
92 mov r0, r0, lsr #5
93 save_and_disable_irqs ip
94 ldr r2, [r1, r0, lsl #2]!
95 mov r0, #1
96 tst r2, r0, lsl r3
97 \instr r2, r2, r0, lsl r3
98 \store r2, [r1]
99 moveq r0, #0
100 restore_irqs ip
101 mov pc, lr
102UNWIND( .fnend )
103ENDPROC(\name )
104 .endm
105#endif
1/* SPDX-License-Identifier: GPL-2.0 */
2#include <asm/assembler.h>
3#include <asm/unwind.h>
4
5#if __LINUX_ARM_ARCH__ >= 6
6 .macro bitop, name, instr
7ENTRY( \name )
8UNWIND( .fnstart )
9 ands ip, r1, #3
10 strbne r1, [ip] @ assert word-aligned
11 mov r2, #1
12 and r3, r0, #31 @ Get bit offset
13 mov r0, r0, lsr #5
14 add r1, r1, r0, lsl #2 @ Get word offset
15#if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
16 .arch_extension mp
17 ALT_SMP(W(pldw) [r1])
18 ALT_UP(W(nop))
19#endif
20 mov r3, r2, lsl r3
211: ldrex r2, [r1]
22 \instr r2, r2, r3
23 strex r0, r2, [r1]
24 cmp r0, #0
25 bne 1b
26 bx lr
27UNWIND( .fnend )
28ENDPROC(\name )
29 .endm
30
31 .macro testop, name, instr, store
32ENTRY( \name )
33UNWIND( .fnstart )
34 ands ip, r1, #3
35 strbne r1, [ip] @ assert word-aligned
36 mov r2, #1
37 and r3, r0, #31 @ Get bit offset
38 mov r0, r0, lsr #5
39 add r1, r1, r0, lsl #2 @ Get word offset
40 mov r3, r2, lsl r3 @ create mask
41 smp_dmb
42#if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
43 .arch_extension mp
44 ALT_SMP(W(pldw) [r1])
45 ALT_UP(W(nop))
46#endif
471: ldrex r2, [r1]
48 ands r0, r2, r3 @ save old value of bit
49 \instr r2, r2, r3 @ toggle bit
50 strex ip, r2, [r1]
51 cmp ip, #0
52 bne 1b
53 smp_dmb
54 cmp r0, #0
55 movne r0, #1
562: bx lr
57UNWIND( .fnend )
58ENDPROC(\name )
59 .endm
60#else
61 .macro bitop, name, instr
62ENTRY( \name )
63UNWIND( .fnstart )
64 ands ip, r1, #3
65 strbne r1, [ip] @ assert word-aligned
66 and r2, r0, #31
67 mov r0, r0, lsr #5
68 mov r3, #1
69 mov r3, r3, lsl r2
70 save_and_disable_irqs ip
71 ldr r2, [r1, r0, lsl #2]
72 \instr r2, r2, r3
73 str r2, [r1, r0, lsl #2]
74 restore_irqs ip
75 ret lr
76UNWIND( .fnend )
77ENDPROC(\name )
78 .endm
79
80/**
81 * testop - implement a test_and_xxx_bit operation.
82 * @instr: operational instruction
83 * @store: store instruction
84 *
85 * Note: we can trivially conditionalise the store instruction
86 * to avoid dirtying the data cache.
87 */
88 .macro testop, name, instr, store
89ENTRY( \name )
90UNWIND( .fnstart )
91 ands ip, r1, #3
92 strbne r1, [ip] @ assert word-aligned
93 and r3, r0, #31
94 mov r0, r0, lsr #5
95 save_and_disable_irqs ip
96 ldr r2, [r1, r0, lsl #2]!
97 mov r0, #1
98 tst r2, r0, lsl r3
99 \instr r2, r2, r0, lsl r3
100 \store r2, [r1]
101 moveq r0, #0
102 restore_irqs ip
103 ret lr
104UNWIND( .fnend )
105ENDPROC(\name )
106 .endm
107#endif