Loading...
1#ifndef __ASM_SH_BITOPS_GRB_H
2#define __ASM_SH_BITOPS_GRB_H
3
4static inline void set_bit(int nr, volatile void * addr)
5{
6 int mask;
7 volatile unsigned int *a = addr;
8 unsigned long tmp;
9
10 a += nr >> 5;
11 mask = 1 << (nr & 0x1f);
12
13 __asm__ __volatile__ (
14 " .align 2 \n\t"
15 " mova 1f, r0 \n\t" /* r0 = end point */
16 " mov r15, r1 \n\t" /* r1 = saved sp */
17 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
18 " mov.l @%1, %0 \n\t" /* load old value */
19 " or %2, %0 \n\t" /* or */
20 " mov.l %0, @%1 \n\t" /* store new value */
21 "1: mov r1, r15 \n\t" /* LOGOUT */
22 : "=&r" (tmp),
23 "+r" (a)
24 : "r" (mask)
25 : "memory" , "r0", "r1");
26}
27
28static inline void clear_bit(int nr, volatile void * addr)
29{
30 int mask;
31 volatile unsigned int *a = addr;
32 unsigned long tmp;
33
34 a += nr >> 5;
35 mask = ~(1 << (nr & 0x1f));
36 __asm__ __volatile__ (
37 " .align 2 \n\t"
38 " mova 1f, r0 \n\t" /* r0 = end point */
39 " mov r15, r1 \n\t" /* r1 = saved sp */
40 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
41 " mov.l @%1, %0 \n\t" /* load old value */
42 " and %2, %0 \n\t" /* and */
43 " mov.l %0, @%1 \n\t" /* store new value */
44 "1: mov r1, r15 \n\t" /* LOGOUT */
45 : "=&r" (tmp),
46 "+r" (a)
47 : "r" (mask)
48 : "memory" , "r0", "r1");
49}
50
51static inline void change_bit(int nr, volatile void * addr)
52{
53 int mask;
54 volatile unsigned int *a = addr;
55 unsigned long tmp;
56
57 a += nr >> 5;
58 mask = 1 << (nr & 0x1f);
59 __asm__ __volatile__ (
60 " .align 2 \n\t"
61 " mova 1f, r0 \n\t" /* r0 = end point */
62 " mov r15, r1 \n\t" /* r1 = saved sp */
63 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
64 " mov.l @%1, %0 \n\t" /* load old value */
65 " xor %2, %0 \n\t" /* xor */
66 " mov.l %0, @%1 \n\t" /* store new value */
67 "1: mov r1, r15 \n\t" /* LOGOUT */
68 : "=&r" (tmp),
69 "+r" (a)
70 : "r" (mask)
71 : "memory" , "r0", "r1");
72}
73
74static inline int test_and_set_bit(int nr, volatile void * addr)
75{
76 int mask, retval;
77 volatile unsigned int *a = addr;
78 unsigned long tmp;
79
80 a += nr >> 5;
81 mask = 1 << (nr & 0x1f);
82
83 __asm__ __volatile__ (
84 " .align 2 \n\t"
85 " mova 1f, r0 \n\t" /* r0 = end point */
86 " mov r15, r1 \n\t" /* r1 = saved sp */
87 " mov #-14, r15 \n\t" /* LOGIN: r15 = size */
88 " mov.l @%2, %0 \n\t" /* load old value */
89 " mov %0, %1 \n\t"
90 " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
91 " mov #-1, %1 \n\t" /* retvat = -1 */
92 " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
93 " or %3, %0 \n\t"
94 " mov.l %0, @%2 \n\t" /* store new value */
95 "1: mov r1, r15 \n\t" /* LOGOUT */
96 : "=&r" (tmp),
97 "=&r" (retval),
98 "+r" (a)
99 : "r" (mask)
100 : "memory" , "r0", "r1" ,"t");
101
102 return retval;
103}
104
105static inline int test_and_clear_bit(int nr, volatile void * addr)
106{
107 int mask, retval,not_mask;
108 volatile unsigned int *a = addr;
109 unsigned long tmp;
110
111 a += nr >> 5;
112 mask = 1 << (nr & 0x1f);
113
114 not_mask = ~mask;
115
116 __asm__ __volatile__ (
117 " .align 2 \n\t"
118 " mova 1f, r0 \n\t" /* r0 = end point */
119 " mov r15, r1 \n\t" /* r1 = saved sp */
120 " mov #-14, r15 \n\t" /* LOGIN */
121 " mov.l @%2, %0 \n\t" /* load old value */
122 " mov %0, %1 \n\t" /* %1 = *a */
123 " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
124 " mov #-1, %1 \n\t" /* retvat = -1 */
125 " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
126 " and %4, %0 \n\t"
127 " mov.l %0, @%2 \n\t" /* store new value */
128 "1: mov r1, r15 \n\t" /* LOGOUT */
129 : "=&r" (tmp),
130 "=&r" (retval),
131 "+r" (a)
132 : "r" (mask),
133 "r" (not_mask)
134 : "memory" , "r0", "r1", "t");
135
136 return retval;
137}
138
139static inline int test_and_change_bit(int nr, volatile void * addr)
140{
141 int mask, retval;
142 volatile unsigned int *a = addr;
143 unsigned long tmp;
144
145 a += nr >> 5;
146 mask = 1 << (nr & 0x1f);
147
148 __asm__ __volatile__ (
149 " .align 2 \n\t"
150 " mova 1f, r0 \n\t" /* r0 = end point */
151 " mov r15, r1 \n\t" /* r1 = saved sp */
152 " mov #-14, r15 \n\t" /* LOGIN */
153 " mov.l @%2, %0 \n\t" /* load old value */
154 " mov %0, %1 \n\t" /* %1 = *a */
155 " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
156 " mov #-1, %1 \n\t" /* retvat = -1 */
157 " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
158 " xor %3, %0 \n\t"
159 " mov.l %0, @%2 \n\t" /* store new value */
160 "1: mov r1, r15 \n\t" /* LOGOUT */
161 : "=&r" (tmp),
162 "=&r" (retval),
163 "+r" (a)
164 : "r" (mask)
165 : "memory" , "r0", "r1", "t");
166
167 return retval;
168}
169
170#include <asm-generic/bitops/non-atomic.h>
171
172#endif /* __ASM_SH_BITOPS_GRB_H */
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __ASM_SH_BITOPS_GRB_H
3#define __ASM_SH_BITOPS_GRB_H
4
5static inline void set_bit(int nr, volatile void * addr)
6{
7 int mask;
8 volatile unsigned int *a = addr;
9 unsigned long tmp;
10
11 a += nr >> 5;
12 mask = 1 << (nr & 0x1f);
13
14 __asm__ __volatile__ (
15 " .align 2 \n\t"
16 " mova 1f, r0 \n\t" /* r0 = end point */
17 " mov r15, r1 \n\t" /* r1 = saved sp */
18 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
19 " mov.l @%1, %0 \n\t" /* load old value */
20 " or %2, %0 \n\t" /* or */
21 " mov.l %0, @%1 \n\t" /* store new value */
22 "1: mov r1, r15 \n\t" /* LOGOUT */
23 : "=&r" (tmp),
24 "+r" (a)
25 : "r" (mask)
26 : "memory" , "r0", "r1");
27}
28
29static inline void clear_bit(int nr, volatile void * addr)
30{
31 int mask;
32 volatile unsigned int *a = addr;
33 unsigned long tmp;
34
35 a += nr >> 5;
36 mask = ~(1 << (nr & 0x1f));
37 __asm__ __volatile__ (
38 " .align 2 \n\t"
39 " mova 1f, r0 \n\t" /* r0 = end point */
40 " mov r15, r1 \n\t" /* r1 = saved sp */
41 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
42 " mov.l @%1, %0 \n\t" /* load old value */
43 " and %2, %0 \n\t" /* and */
44 " mov.l %0, @%1 \n\t" /* store new value */
45 "1: mov r1, r15 \n\t" /* LOGOUT */
46 : "=&r" (tmp),
47 "+r" (a)
48 : "r" (mask)
49 : "memory" , "r0", "r1");
50}
51
52static inline void change_bit(int nr, volatile void * addr)
53{
54 int mask;
55 volatile unsigned int *a = addr;
56 unsigned long tmp;
57
58 a += nr >> 5;
59 mask = 1 << (nr & 0x1f);
60 __asm__ __volatile__ (
61 " .align 2 \n\t"
62 " mova 1f, r0 \n\t" /* r0 = end point */
63 " mov r15, r1 \n\t" /* r1 = saved sp */
64 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
65 " mov.l @%1, %0 \n\t" /* load old value */
66 " xor %2, %0 \n\t" /* xor */
67 " mov.l %0, @%1 \n\t" /* store new value */
68 "1: mov r1, r15 \n\t" /* LOGOUT */
69 : "=&r" (tmp),
70 "+r" (a)
71 : "r" (mask)
72 : "memory" , "r0", "r1");
73}
74
75static inline int test_and_set_bit(int nr, volatile void * addr)
76{
77 int mask, retval;
78 volatile unsigned int *a = addr;
79 unsigned long tmp;
80
81 a += nr >> 5;
82 mask = 1 << (nr & 0x1f);
83
84 __asm__ __volatile__ (
85 " .align 2 \n\t"
86 " mova 1f, r0 \n\t" /* r0 = end point */
87 " mov r15, r1 \n\t" /* r1 = saved sp */
88 " mov #-14, r15 \n\t" /* LOGIN: r15 = size */
89 " mov.l @%2, %0 \n\t" /* load old value */
90 " mov %0, %1 \n\t"
91 " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
92 " mov #-1, %1 \n\t" /* retvat = -1 */
93 " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
94 " or %3, %0 \n\t"
95 " mov.l %0, @%2 \n\t" /* store new value */
96 "1: mov r1, r15 \n\t" /* LOGOUT */
97 : "=&r" (tmp),
98 "=&r" (retval),
99 "+r" (a)
100 : "r" (mask)
101 : "memory" , "r0", "r1" ,"t");
102
103 return retval;
104}
105
106static inline int test_and_clear_bit(int nr, volatile void * addr)
107{
108 int mask, retval,not_mask;
109 volatile unsigned int *a = addr;
110 unsigned long tmp;
111
112 a += nr >> 5;
113 mask = 1 << (nr & 0x1f);
114
115 not_mask = ~mask;
116
117 __asm__ __volatile__ (
118 " .align 2 \n\t"
119 " mova 1f, r0 \n\t" /* r0 = end point */
120 " mov r15, r1 \n\t" /* r1 = saved sp */
121 " mov #-14, r15 \n\t" /* LOGIN */
122 " mov.l @%2, %0 \n\t" /* load old value */
123 " mov %0, %1 \n\t" /* %1 = *a */
124 " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
125 " mov #-1, %1 \n\t" /* retvat = -1 */
126 " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
127 " and %4, %0 \n\t"
128 " mov.l %0, @%2 \n\t" /* store new value */
129 "1: mov r1, r15 \n\t" /* LOGOUT */
130 : "=&r" (tmp),
131 "=&r" (retval),
132 "+r" (a)
133 : "r" (mask),
134 "r" (not_mask)
135 : "memory" , "r0", "r1", "t");
136
137 return retval;
138}
139
140static inline int test_and_change_bit(int nr, volatile void * addr)
141{
142 int mask, retval;
143 volatile unsigned int *a = addr;
144 unsigned long tmp;
145
146 a += nr >> 5;
147 mask = 1 << (nr & 0x1f);
148
149 __asm__ __volatile__ (
150 " .align 2 \n\t"
151 " mova 1f, r0 \n\t" /* r0 = end point */
152 " mov r15, r1 \n\t" /* r1 = saved sp */
153 " mov #-14, r15 \n\t" /* LOGIN */
154 " mov.l @%2, %0 \n\t" /* load old value */
155 " mov %0, %1 \n\t" /* %1 = *a */
156 " tst %1, %3 \n\t" /* T = ((*a & mask) == 0) */
157 " mov #-1, %1 \n\t" /* retvat = -1 */
158 " negc %1, %1 \n\t" /* retval = (mask & *a) != 0 */
159 " xor %3, %0 \n\t"
160 " mov.l %0, @%2 \n\t" /* store new value */
161 "1: mov r1, r15 \n\t" /* LOGOUT */
162 : "=&r" (tmp),
163 "=&r" (retval),
164 "+r" (a)
165 : "r" (mask)
166 : "memory" , "r0", "r1", "t");
167
168 return retval;
169}
170
171#include <asm-generic/bitops/non-atomic.h>
172
173#endif /* __ASM_SH_BITOPS_GRB_H */