Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2#include <asm/assembler.h>
3#include <asm/unwind.h>
4
5#if __LINUX_ARM_ARCH__ >= 6
6 .macro bitop, name, instr
7ENTRY( \name )
8UNWIND( .fnstart )
9 ands ip, r1, #3
10 strbne r1, [ip] @ assert word-aligned
11 mov r2, #1
12 and r3, r0, #31 @ Get bit offset
13 mov r0, r0, lsr #5
14 add r1, r1, r0, lsl #2 @ Get word offset
15#if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
16 .arch_extension mp
17 ALT_SMP(W(pldw) [r1])
18 ALT_UP(W(nop))
19#endif
20 mov r3, r2, lsl r3
211: ldrex r2, [r1]
22 \instr r2, r2, r3
23 strex r0, r2, [r1]
24 cmp r0, #0
25 bne 1b
26 bx lr
27UNWIND( .fnend )
28ENDPROC(\name )
29 .endm
30
31 .macro __testop, name, instr, store, barrier
32ENTRY( \name )
33UNWIND( .fnstart )
34 ands ip, r1, #3
35 strbne r1, [ip] @ assert word-aligned
36 mov r2, #1
37 and r3, r0, #31 @ Get bit offset
38 mov r0, r0, lsr #5
39 add r1, r1, r0, lsl #2 @ Get word offset
40 mov r3, r2, lsl r3 @ create mask
41 \barrier
42#if __LINUX_ARM_ARCH__ >= 7 && defined(CONFIG_SMP)
43 .arch_extension mp
44 ALT_SMP(W(pldw) [r1])
45 ALT_UP(W(nop))
46#endif
471: ldrex r2, [r1]
48 ands r0, r2, r3 @ save old value of bit
49 \instr r2, r2, r3 @ toggle bit
50 strex ip, r2, [r1]
51 cmp ip, #0
52 bne 1b
53 \barrier
54 cmp r0, #0
55 movne r0, #1
562: bx lr
57UNWIND( .fnend )
58ENDPROC(\name )
59 .endm
60
61 .macro testop, name, instr, store
62 __testop \name, \instr, \store, smp_dmb
63 .endm
64
65 .macro sync_testop, name, instr, store
66 __testop \name, \instr, \store, __smp_dmb
67 .endm
68#else
69 .macro bitop, name, instr
70ENTRY( \name )
71UNWIND( .fnstart )
72 ands ip, r1, #3
73 strbne r1, [ip] @ assert word-aligned
74 and r2, r0, #31
75 mov r0, r0, lsr #5
76 mov r3, #1
77 mov r3, r3, lsl r2
78 save_and_disable_irqs ip
79 ldr r2, [r1, r0, lsl #2]
80 \instr r2, r2, r3
81 str r2, [r1, r0, lsl #2]
82 restore_irqs ip
83 ret lr
84UNWIND( .fnend )
85ENDPROC(\name )
86 .endm
87
88/**
89 * testop - implement a test_and_xxx_bit operation.
90 * @instr: operational instruction
91 * @store: store instruction
92 *
93 * Note: we can trivially conditionalise the store instruction
94 * to avoid dirtying the data cache.
95 */
96 .macro testop, name, instr, store
97ENTRY( \name )
98UNWIND( .fnstart )
99 ands ip, r1, #3
100 strbne r1, [ip] @ assert word-aligned
101 and r3, r0, #31
102 mov r0, r0, lsr #5
103 save_and_disable_irqs ip
104 ldr r2, [r1, r0, lsl #2]!
105 mov r0, #1
106 tst r2, r0, lsl r3
107 \instr r2, r2, r0, lsl r3
108 \store r2, [r1]
109 moveq r0, #0
110 restore_irqs ip
111 ret lr
112UNWIND( .fnend )
113ENDPROC(\name )
114 .endm
115#endif
1#if __LINUX_ARM_ARCH__ >= 6
2 .macro bitop, instr
3 ands ip, r1, #3
4 strneb r1, [ip] @ assert word-aligned
5 mov r2, #1
6 and r3, r0, #31 @ Get bit offset
7 mov r0, r0, lsr #5
8 add r1, r1, r0, lsl #2 @ Get word offset
9 mov r3, r2, lsl r3
101: ldrex r2, [r1]
11 \instr r2, r2, r3
12 strex r0, r2, [r1]
13 cmp r0, #0
14 bne 1b
15 bx lr
16 .endm
17
18 .macro testop, instr, store
19 ands ip, r1, #3
20 strneb r1, [ip] @ assert word-aligned
21 mov r2, #1
22 and r3, r0, #31 @ Get bit offset
23 mov r0, r0, lsr #5
24 add r1, r1, r0, lsl #2 @ Get word offset
25 mov r3, r2, lsl r3 @ create mask
26 smp_dmb
271: ldrex r2, [r1]
28 ands r0, r2, r3 @ save old value of bit
29 \instr r2, r2, r3 @ toggle bit
30 strex ip, r2, [r1]
31 cmp ip, #0
32 bne 1b
33 smp_dmb
34 cmp r0, #0
35 movne r0, #1
362: bx lr
37 .endm
38#else
39 .macro bitop, instr
40 ands ip, r1, #3
41 strneb r1, [ip] @ assert word-aligned
42 and r2, r0, #31
43 mov r0, r0, lsr #5
44 mov r3, #1
45 mov r3, r3, lsl r2
46 save_and_disable_irqs ip
47 ldr r2, [r1, r0, lsl #2]
48 \instr r2, r2, r3
49 str r2, [r1, r0, lsl #2]
50 restore_irqs ip
51 mov pc, lr
52 .endm
53
54/**
55 * testop - implement a test_and_xxx_bit operation.
56 * @instr: operational instruction
57 * @store: store instruction
58 *
59 * Note: we can trivially conditionalise the store instruction
60 * to avoid dirtying the data cache.
61 */
62 .macro testop, instr, store
63 ands ip, r1, #3
64 strneb r1, [ip] @ assert word-aligned
65 and r3, r0, #31
66 mov r0, r0, lsr #5
67 save_and_disable_irqs ip
68 ldr r2, [r1, r0, lsl #2]!
69 mov r0, #1
70 tst r2, r0, lsl r3
71 \instr r2, r2, r0, lsl r3
72 \store r2, [r1]
73 moveq r0, #0
74 restore_irqs ip
75 mov pc, lr
76 .endm
77#endif