Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __ASM_SH_BITOPS_LLSC_H
3#define __ASM_SH_BITOPS_LLSC_H
4
5static inline void set_bit(int nr, volatile void *addr)
6{
7 int mask;
8 volatile unsigned int *a = addr;
9 unsigned long tmp;
10
11 a += nr >> 5;
12 mask = 1 << (nr & 0x1f);
13
14 __asm__ __volatile__ (
15 "1: \n\t"
16 "movli.l @%1, %0 ! set_bit \n\t"
17 "or %2, %0 \n\t"
18 "movco.l %0, @%1 \n\t"
19 "bf 1b \n\t"
20 : "=&z" (tmp)
21 : "r" (a), "r" (mask)
22 : "t", "memory"
23 );
24}
25
26static inline void clear_bit(int nr, volatile void *addr)
27{
28 int mask;
29 volatile unsigned int *a = addr;
30 unsigned long tmp;
31
32 a += nr >> 5;
33 mask = 1 << (nr & 0x1f);
34
35 __asm__ __volatile__ (
36 "1: \n\t"
37 "movli.l @%1, %0 ! clear_bit \n\t"
38 "and %2, %0 \n\t"
39 "movco.l %0, @%1 \n\t"
40 "bf 1b \n\t"
41 : "=&z" (tmp)
42 : "r" (a), "r" (~mask)
43 : "t", "memory"
44 );
45}
46
47static inline void change_bit(int nr, volatile void *addr)
48{
49 int mask;
50 volatile unsigned int *a = addr;
51 unsigned long tmp;
52
53 a += nr >> 5;
54 mask = 1 << (nr & 0x1f);
55
56 __asm__ __volatile__ (
57 "1: \n\t"
58 "movli.l @%1, %0 ! change_bit \n\t"
59 "xor %2, %0 \n\t"
60 "movco.l %0, @%1 \n\t"
61 "bf 1b \n\t"
62 : "=&z" (tmp)
63 : "r" (a), "r" (mask)
64 : "t", "memory"
65 );
66}
67
68static inline int test_and_set_bit(int nr, volatile void *addr)
69{
70 int mask, retval;
71 volatile unsigned int *a = addr;
72 unsigned long tmp;
73
74 a += nr >> 5;
75 mask = 1 << (nr & 0x1f);
76
77 __asm__ __volatile__ (
78 "1: \n\t"
79 "movli.l @%2, %0 ! test_and_set_bit \n\t"
80 "mov %0, %1 \n\t"
81 "or %3, %0 \n\t"
82 "movco.l %0, @%2 \n\t"
83 "bf 1b \n\t"
84 "and %3, %1 \n\t"
85 : "=&z" (tmp), "=&r" (retval)
86 : "r" (a), "r" (mask)
87 : "t", "memory"
88 );
89
90 return retval != 0;
91}
92
93static inline int test_and_clear_bit(int nr, volatile void *addr)
94{
95 int mask, retval;
96 volatile unsigned int *a = addr;
97 unsigned long tmp;
98
99 a += nr >> 5;
100 mask = 1 << (nr & 0x1f);
101
102 __asm__ __volatile__ (
103 "1: \n\t"
104 "movli.l @%2, %0 ! test_and_clear_bit \n\t"
105 "mov %0, %1 \n\t"
106 "and %4, %0 \n\t"
107 "movco.l %0, @%2 \n\t"
108 "bf 1b \n\t"
109 "and %3, %1 \n\t"
110 "synco \n\t"
111 : "=&z" (tmp), "=&r" (retval)
112 : "r" (a), "r" (mask), "r" (~mask)
113 : "t", "memory"
114 );
115
116 return retval != 0;
117}
118
119static inline int test_and_change_bit(int nr, volatile void *addr)
120{
121 int mask, retval;
122 volatile unsigned int *a = addr;
123 unsigned long tmp;
124
125 a += nr >> 5;
126 mask = 1 << (nr & 0x1f);
127
128 __asm__ __volatile__ (
129 "1: \n\t"
130 "movli.l @%2, %0 ! test_and_change_bit \n\t"
131 "mov %0, %1 \n\t"
132 "xor %3, %0 \n\t"
133 "movco.l %0, @%2 \n\t"
134 "bf 1b \n\t"
135 "and %3, %1 \n\t"
136 "synco \n\t"
137 : "=&z" (tmp), "=&r" (retval)
138 : "r" (a), "r" (mask)
139 : "t", "memory"
140 );
141
142 return retval != 0;
143}
144
145#include <asm-generic/bitops/non-atomic.h>
146
147#endif /* __ASM_SH_BITOPS_LLSC_H */
1#ifndef __ASM_SH_BITOPS_LLSC_H
2#define __ASM_SH_BITOPS_LLSC_H
3
4static inline void set_bit(int nr, volatile void *addr)
5{
6 int mask;
7 volatile unsigned int *a = addr;
8 unsigned long tmp;
9
10 a += nr >> 5;
11 mask = 1 << (nr & 0x1f);
12
13 __asm__ __volatile__ (
14 "1: \n\t"
15 "movli.l @%1, %0 ! set_bit \n\t"
16 "or %2, %0 \n\t"
17 "movco.l %0, @%1 \n\t"
18 "bf 1b \n\t"
19 : "=&z" (tmp)
20 : "r" (a), "r" (mask)
21 : "t", "memory"
22 );
23}
24
25static inline void clear_bit(int nr, volatile void *addr)
26{
27 int mask;
28 volatile unsigned int *a = addr;
29 unsigned long tmp;
30
31 a += nr >> 5;
32 mask = 1 << (nr & 0x1f);
33
34 __asm__ __volatile__ (
35 "1: \n\t"
36 "movli.l @%1, %0 ! clear_bit \n\t"
37 "and %2, %0 \n\t"
38 "movco.l %0, @%1 \n\t"
39 "bf 1b \n\t"
40 : "=&z" (tmp)
41 : "r" (a), "r" (~mask)
42 : "t", "memory"
43 );
44}
45
46static inline void change_bit(int nr, volatile void *addr)
47{
48 int mask;
49 volatile unsigned int *a = addr;
50 unsigned long tmp;
51
52 a += nr >> 5;
53 mask = 1 << (nr & 0x1f);
54
55 __asm__ __volatile__ (
56 "1: \n\t"
57 "movli.l @%1, %0 ! change_bit \n\t"
58 "xor %2, %0 \n\t"
59 "movco.l %0, @%1 \n\t"
60 "bf 1b \n\t"
61 : "=&z" (tmp)
62 : "r" (a), "r" (mask)
63 : "t", "memory"
64 );
65}
66
67static inline int test_and_set_bit(int nr, volatile void *addr)
68{
69 int mask, retval;
70 volatile unsigned int *a = addr;
71 unsigned long tmp;
72
73 a += nr >> 5;
74 mask = 1 << (nr & 0x1f);
75
76 __asm__ __volatile__ (
77 "1: \n\t"
78 "movli.l @%2, %0 ! test_and_set_bit \n\t"
79 "mov %0, %1 \n\t"
80 "or %3, %0 \n\t"
81 "movco.l %0, @%2 \n\t"
82 "bf 1b \n\t"
83 "and %3, %1 \n\t"
84 : "=&z" (tmp), "=&r" (retval)
85 : "r" (a), "r" (mask)
86 : "t", "memory"
87 );
88
89 return retval != 0;
90}
91
92static inline int test_and_clear_bit(int nr, volatile void *addr)
93{
94 int mask, retval;
95 volatile unsigned int *a = addr;
96 unsigned long tmp;
97
98 a += nr >> 5;
99 mask = 1 << (nr & 0x1f);
100
101 __asm__ __volatile__ (
102 "1: \n\t"
103 "movli.l @%2, %0 ! test_and_clear_bit \n\t"
104 "mov %0, %1 \n\t"
105 "and %4, %0 \n\t"
106 "movco.l %0, @%2 \n\t"
107 "bf 1b \n\t"
108 "and %3, %1 \n\t"
109 "synco \n\t"
110 : "=&z" (tmp), "=&r" (retval)
111 : "r" (a), "r" (mask), "r" (~mask)
112 : "t", "memory"
113 );
114
115 return retval != 0;
116}
117
118static inline int test_and_change_bit(int nr, volatile void *addr)
119{
120 int mask, retval;
121 volatile unsigned int *a = addr;
122 unsigned long tmp;
123
124 a += nr >> 5;
125 mask = 1 << (nr & 0x1f);
126
127 __asm__ __volatile__ (
128 "1: \n\t"
129 "movli.l @%2, %0 ! test_and_change_bit \n\t"
130 "mov %0, %1 \n\t"
131 "xor %3, %0 \n\t"
132 "movco.l %0, @%2 \n\t"
133 "bf 1b \n\t"
134 "and %3, %1 \n\t"
135 "synco \n\t"
136 : "=&z" (tmp), "=&r" (retval)
137 : "r" (a), "r" (mask)
138 : "t", "memory"
139 );
140
141 return retval != 0;
142}
143
144#include <asm-generic/bitops/non-atomic.h>
145
146#endif /* __ASM_SH_BITOPS_LLSC_H */