Loading...
1/*
2 * Copyright IBM Corp. 1999, 2009
3 *
4 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
5 */
6
7#ifndef __ASM_BARRIER_H
8#define __ASM_BARRIER_H
9
10/*
11 * Force strict CPU ordering.
12 * And yes, this is required on UP too when we're talking
13 * to devices.
14 */
15
16static inline void mb(void)
17{
18#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
19 /* Fast-BCR without checkpoint synchronization */
20 asm volatile("bcr 14,0" : : : "memory");
21#else
22 asm volatile("bcr 15,0" : : : "memory");
23#endif
24}
25
26#define rmb() mb()
27#define wmb() mb()
28#define read_barrier_depends() do { } while(0)
29#define smp_mb() mb()
30#define smp_rmb() rmb()
31#define smp_wmb() wmb()
32#define smp_read_barrier_depends() read_barrier_depends()
33#define smp_mb__before_clear_bit() smp_mb()
34#define smp_mb__after_clear_bit() smp_mb()
35
36#define set_mb(var, value) do { var = value; mb(); } while (0)
37
38#endif /* __ASM_BARRIER_H */
1/*
2 * Copyright IBM Corp. 1999, 2009
3 *
4 * Author(s): Martin Schwidefsky <schwidefsky@de.ibm.com>
5 */
6
7#ifndef __ASM_BARRIER_H
8#define __ASM_BARRIER_H
9
10/*
11 * Force strict CPU ordering.
12 * And yes, this is required on UP too when we're talking
13 * to devices.
14 */
15
16#ifdef CONFIG_HAVE_MARCH_Z196_FEATURES
17/* Fast-BCR without checkpoint synchronization */
18#define mb() do { asm volatile("bcr 14,0" : : : "memory"); } while (0)
19#else
20#define mb() do { asm volatile("bcr 15,0" : : : "memory"); } while (0)
21#endif
22
23#define rmb() mb()
24#define wmb() mb()
25#define read_barrier_depends() do { } while(0)
26#define smp_mb() mb()
27#define smp_rmb() rmb()
28#define smp_wmb() wmb()
29#define smp_read_barrier_depends() read_barrier_depends()
30#define smp_mb__before_clear_bit() smp_mb()
31#define smp_mb__after_clear_bit() smp_mb()
32
33#define set_mb(var, value) do { var = value; mb(); } while (0)
34
35#define smp_store_release(p, v) \
36do { \
37 compiletime_assert_atomic_type(*p); \
38 barrier(); \
39 ACCESS_ONCE(*p) = (v); \
40} while (0)
41
42#define smp_load_acquire(p) \
43({ \
44 typeof(*p) ___p1 = ACCESS_ONCE(*p); \
45 compiletime_assert_atomic_type(*p); \
46 barrier(); \
47 ___p1; \
48})
49
50#endif /* __ASM_BARRIER_H */