Linux Audio

Check our new training course

Loading...
v5.4
 1/* SPDX-License-Identifier: GPL-2.0 */
 2#ifndef _ASM_GENERIC_BITOPS_LOCK_H_
 3#define _ASM_GENERIC_BITOPS_LOCK_H_
 4
 5#include <linux/atomic.h>
 6#include <linux/compiler.h>
 7#include <asm/barrier.h>
 8
 9/**
10 * test_and_set_bit_lock - Set a bit and return its old value, for lock
11 * @nr: Bit to set
12 * @addr: Address to count from
13 *
14 * This operation is atomic and provides acquire barrier semantics if
15 * the returned value is 0.
16 * It can be used to implement bit locks.
17 */
18static inline int test_and_set_bit_lock(unsigned int nr,
19					volatile unsigned long *p)
20{
21	long old;
22	unsigned long mask = BIT_MASK(nr);
23
24	p += BIT_WORD(nr);
25	if (READ_ONCE(*p) & mask)
26		return 1;
27
28	old = atomic_long_fetch_or_acquire(mask, (atomic_long_t *)p);
29	return !!(old & mask);
30}
31
32
33/**
34 * clear_bit_unlock - Clear a bit in memory, for unlock
35 * @nr: the bit to set
36 * @addr: the address to start counting from
37 *
38 * This operation is atomic and provides release barrier semantics.
39 */
40static inline void clear_bit_unlock(unsigned int nr, volatile unsigned long *p)
41{
42	p += BIT_WORD(nr);
43	atomic_long_fetch_andnot_release(BIT_MASK(nr), (atomic_long_t *)p);
44}
45
46/**
47 * __clear_bit_unlock - Clear a bit in memory, for unlock
48 * @nr: the bit to set
49 * @addr: the address to start counting from
50 *
51 * A weaker form of clear_bit_unlock() as used by __bit_lock_unlock(). If all
52 * the bits in the word are protected by this lock some archs can use weaker
53 * ops to safely unlock.
54 *
55 * See for example x86's implementation.
56 */
57static inline void __clear_bit_unlock(unsigned int nr,
58				      volatile unsigned long *p)
59{
60	unsigned long old;
61
62	p += BIT_WORD(nr);
63	old = READ_ONCE(*p);
64	old &= ~BIT_MASK(nr);
65	atomic_long_set_release((atomic_long_t *)p, old);
66}
67
68/**
69 * clear_bit_unlock_is_negative_byte - Clear a bit in memory and test if bottom
70 *                                     byte is negative, for unlock.
71 * @nr: the bit to clear
72 * @addr: the address to start counting from
73 *
74 * This is a bit of a one-trick-pony for the filemap code, which clears
75 * PG_locked and tests PG_waiters,
76 */
77#ifndef clear_bit_unlock_is_negative_byte
78static inline bool clear_bit_unlock_is_negative_byte(unsigned int nr,
79						     volatile unsigned long *p)
80{
81	long old;
82	unsigned long mask = BIT_MASK(nr);
83
84	p += BIT_WORD(nr);
85	old = atomic_long_fetch_andnot_release(mask, (atomic_long_t *)p);
86	return !!(old & BIT(7));
87}
88#define clear_bit_unlock_is_negative_byte clear_bit_unlock_is_negative_byte
89#endif
90
91#endif /* _ASM_GENERIC_BITOPS_LOCK_H_ */
v4.10.11
 
 1#ifndef _ASM_GENERIC_BITOPS_LOCK_H_
 2#define _ASM_GENERIC_BITOPS_LOCK_H_
 3
 
 
 
 
 4/**
 5 * test_and_set_bit_lock - Set a bit and return its old value, for lock
 6 * @nr: Bit to set
 7 * @addr: Address to count from
 8 *
 9 * This operation is atomic and provides acquire barrier semantics.
 
10 * It can be used to implement bit locks.
11 */
12#define test_and_set_bit_lock(nr, addr)	test_and_set_bit(nr, addr)
 
 
 
 
 
 
 
 
 
 
 
 
 
13
14/**
15 * clear_bit_unlock - Clear a bit in memory, for unlock
16 * @nr: the bit to set
17 * @addr: the address to start counting from
18 *
19 * This operation is atomic and provides release barrier semantics.
20 */
21#define clear_bit_unlock(nr, addr)	\
22do {					\
23	smp_mb__before_atomic();	\
24	clear_bit(nr, addr);		\
25} while (0)
26
27/**
28 * __clear_bit_unlock - Clear a bit in memory, for unlock
29 * @nr: the bit to set
30 * @addr: the address to start counting from
31 *
32 * A weaker form of clear_bit_unlock() as used by __bit_lock_unlock(). If all
33 * the bits in the word are protected by this lock some archs can use weaker
34 * ops to safely unlock.
35 *
36 * See for example x86's implementation.
37 */
38#define __clear_bit_unlock(nr, addr)	\
39do {					\
40	smp_mb__before_atomic();	\
41	clear_bit(nr, addr);		\
42} while (0)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
44#endif /* _ASM_GENERIC_BITOPS_LOCK_H_ */
45