Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef __ASM_SH_ATOMIC_GRB_H
3#define __ASM_SH_ATOMIC_GRB_H
4
5#define ATOMIC_OP(op) \
6static inline void atomic_##op(int i, atomic_t *v) \
7{ \
8 int tmp; \
9 \
10 __asm__ __volatile__ ( \
11 " .align 2 \n\t" \
12 " mova 1f, r0 \n\t" /* r0 = end point */ \
13 " mov r15, r1 \n\t" /* r1 = saved sp */ \
14 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */ \
15 " mov.l @%1, %0 \n\t" /* load old value */ \
16 " " #op " %2, %0 \n\t" /* $op */ \
17 " mov.l %0, @%1 \n\t" /* store new value */ \
18 "1: mov r1, r15 \n\t" /* LOGOUT */ \
19 : "=&r" (tmp), \
20 "+r" (v) \
21 : "r" (i) \
22 : "memory" , "r0", "r1"); \
23} \
24
25#define ATOMIC_OP_RETURN(op) \
26static inline int atomic_##op##_return(int i, atomic_t *v) \
27{ \
28 int tmp; \
29 \
30 __asm__ __volatile__ ( \
31 " .align 2 \n\t" \
32 " mova 1f, r0 \n\t" /* r0 = end point */ \
33 " mov r15, r1 \n\t" /* r1 = saved sp */ \
34 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */ \
35 " mov.l @%1, %0 \n\t" /* load old value */ \
36 " " #op " %2, %0 \n\t" /* $op */ \
37 " mov.l %0, @%1 \n\t" /* store new value */ \
38 "1: mov r1, r15 \n\t" /* LOGOUT */ \
39 : "=&r" (tmp), \
40 "+r" (v) \
41 : "r" (i) \
42 : "memory" , "r0", "r1"); \
43 \
44 return tmp; \
45}
46
47#define ATOMIC_FETCH_OP(op) \
48static inline int atomic_fetch_##op(int i, atomic_t *v) \
49{ \
50 int res, tmp; \
51 \
52 __asm__ __volatile__ ( \
53 " .align 2 \n\t" \
54 " mova 1f, r0 \n\t" /* r0 = end point */ \
55 " mov r15, r1 \n\t" /* r1 = saved sp */ \
56 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */ \
57 " mov.l @%2, %0 \n\t" /* load old value */ \
58 " mov %0, %1 \n\t" /* save old value */ \
59 " " #op " %3, %0 \n\t" /* $op */ \
60 " mov.l %0, @%2 \n\t" /* store new value */ \
61 "1: mov r1, r15 \n\t" /* LOGOUT */ \
62 : "=&r" (tmp), "=&r" (res), "+r" (v) \
63 : "r" (i) \
64 : "memory" , "r0", "r1"); \
65 \
66 return res; \
67}
68
69#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op) ATOMIC_FETCH_OP(op)
70
71ATOMIC_OPS(add)
72ATOMIC_OPS(sub)
73
74#undef ATOMIC_OPS
75#define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_FETCH_OP(op)
76
77ATOMIC_OPS(and)
78ATOMIC_OPS(or)
79ATOMIC_OPS(xor)
80
81#undef ATOMIC_OPS
82#undef ATOMIC_FETCH_OP
83#undef ATOMIC_OP_RETURN
84#undef ATOMIC_OP
85
86#endif /* __ASM_SH_ATOMIC_GRB_H */
1#ifndef __ASM_SH_ATOMIC_GRB_H
2#define __ASM_SH_ATOMIC_GRB_H
3
4static inline void atomic_add(int i, atomic_t *v)
5{
6 int tmp;
7
8 __asm__ __volatile__ (
9 " .align 2 \n\t"
10 " mova 1f, r0 \n\t" /* r0 = end point */
11 " mov r15, r1 \n\t" /* r1 = saved sp */
12 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
13 " mov.l @%1, %0 \n\t" /* load old value */
14 " add %2, %0 \n\t" /* add */
15 " mov.l %0, @%1 \n\t" /* store new value */
16 "1: mov r1, r15 \n\t" /* LOGOUT */
17 : "=&r" (tmp),
18 "+r" (v)
19 : "r" (i)
20 : "memory" , "r0", "r1");
21}
22
23static inline void atomic_sub(int i, atomic_t *v)
24{
25 int tmp;
26
27 __asm__ __volatile__ (
28 " .align 2 \n\t"
29 " mova 1f, r0 \n\t" /* r0 = end point */
30 " mov r15, r1 \n\t" /* r1 = saved sp */
31 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
32 " mov.l @%1, %0 \n\t" /* load old value */
33 " sub %2, %0 \n\t" /* sub */
34 " mov.l %0, @%1 \n\t" /* store new value */
35 "1: mov r1, r15 \n\t" /* LOGOUT */
36 : "=&r" (tmp),
37 "+r" (v)
38 : "r" (i)
39 : "memory" , "r0", "r1");
40}
41
42static inline int atomic_add_return(int i, atomic_t *v)
43{
44 int tmp;
45
46 __asm__ __volatile__ (
47 " .align 2 \n\t"
48 " mova 1f, r0 \n\t" /* r0 = end point */
49 " mov r15, r1 \n\t" /* r1 = saved sp */
50 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
51 " mov.l @%1, %0 \n\t" /* load old value */
52 " add %2, %0 \n\t" /* add */
53 " mov.l %0, @%1 \n\t" /* store new value */
54 "1: mov r1, r15 \n\t" /* LOGOUT */
55 : "=&r" (tmp),
56 "+r" (v)
57 : "r" (i)
58 : "memory" , "r0", "r1");
59
60 return tmp;
61}
62
63static inline int atomic_sub_return(int i, atomic_t *v)
64{
65 int tmp;
66
67 __asm__ __volatile__ (
68 " .align 2 \n\t"
69 " mova 1f, r0 \n\t" /* r0 = end point */
70 " mov r15, r1 \n\t" /* r1 = saved sp */
71 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
72 " mov.l @%1, %0 \n\t" /* load old value */
73 " sub %2, %0 \n\t" /* sub */
74 " mov.l %0, @%1 \n\t" /* store new value */
75 "1: mov r1, r15 \n\t" /* LOGOUT */
76 : "=&r" (tmp),
77 "+r" (v)
78 : "r" (i)
79 : "memory", "r0", "r1");
80
81 return tmp;
82}
83
84static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
85{
86 int tmp;
87 unsigned int _mask = ~mask;
88
89 __asm__ __volatile__ (
90 " .align 2 \n\t"
91 " mova 1f, r0 \n\t" /* r0 = end point */
92 " mov r15, r1 \n\t" /* r1 = saved sp */
93 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
94 " mov.l @%1, %0 \n\t" /* load old value */
95 " and %2, %0 \n\t" /* add */
96 " mov.l %0, @%1 \n\t" /* store new value */
97 "1: mov r1, r15 \n\t" /* LOGOUT */
98 : "=&r" (tmp),
99 "+r" (v)
100 : "r" (_mask)
101 : "memory" , "r0", "r1");
102}
103
104static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
105{
106 int tmp;
107
108 __asm__ __volatile__ (
109 " .align 2 \n\t"
110 " mova 1f, r0 \n\t" /* r0 = end point */
111 " mov r15, r1 \n\t" /* r1 = saved sp */
112 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
113 " mov.l @%1, %0 \n\t" /* load old value */
114 " or %2, %0 \n\t" /* or */
115 " mov.l %0, @%1 \n\t" /* store new value */
116 "1: mov r1, r15 \n\t" /* LOGOUT */
117 : "=&r" (tmp),
118 "+r" (v)
119 : "r" (mask)
120 : "memory" , "r0", "r1");
121}
122
123#endif /* __ASM_SH_ATOMIC_GRB_H */