Linux Audio

Check our new training course

Loading...
v4.6
 1#ifndef _ASM_UM_BARRIER_H_
 2#define _ASM_UM_BARRIER_H_
 3
 4#include <asm/asm.h>
 5#include <asm/segment.h>
 6#include <asm/cpufeatures.h>
 7#include <asm/cmpxchg.h>
 8#include <asm/nops.h>
 9
10#include <linux/kernel.h>
11#include <linux/irqflags.h>
12
13/*
14 * Force strict CPU ordering.
15 * And yes, this is required on UP too when we're talking
16 * to devices.
17 */
18#ifdef CONFIG_X86_32
19
20#define mb()	alternative("lock; addl $0,0(%%esp)", "mfence", X86_FEATURE_XMM2)
21#define rmb()	alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2)
22#define wmb()	alternative("lock; addl $0,0(%%esp)", "sfence", X86_FEATURE_XMM)
23
24#else /* CONFIG_X86_32 */
25
26#define mb()	asm volatile("mfence" : : : "memory")
27#define rmb()	asm volatile("lfence" : : : "memory")
28#define wmb()	asm volatile("sfence" : : : "memory")
29
30#endif /* CONFIG_X86_32 */
31
 
 
 
 
 
32#ifdef CONFIG_X86_PPRO_FENCE
33#define dma_rmb()	rmb()
34#else /* CONFIG_X86_PPRO_FENCE */
35#define dma_rmb()	barrier()
36#endif /* CONFIG_X86_PPRO_FENCE */
37#define dma_wmb()	barrier()
38
39#include <asm-generic/barrier.h>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
40
41#endif
v3.15
 1#ifndef _ASM_UM_BARRIER_H_
 2#define _ASM_UM_BARRIER_H_
 3
 4#include <asm/asm.h>
 5#include <asm/segment.h>
 6#include <asm/cpufeature.h>
 7#include <asm/cmpxchg.h>
 8#include <asm/nops.h>
 9
10#include <linux/kernel.h>
11#include <linux/irqflags.h>
12
13/*
14 * Force strict CPU ordering.
15 * And yes, this is required on UP too when we're talking
16 * to devices.
17 */
18#ifdef CONFIG_X86_32
19
20#define mb()	alternative("lock; addl $0,0(%%esp)", "mfence", X86_FEATURE_XMM2)
21#define rmb()	alternative("lock; addl $0,0(%%esp)", "lfence", X86_FEATURE_XMM2)
22#define wmb()	alternative("lock; addl $0,0(%%esp)", "sfence", X86_FEATURE_XMM)
23
24#else /* CONFIG_X86_32 */
25
26#define mb()	asm volatile("mfence" : : : "memory")
27#define rmb()	asm volatile("lfence" : : : "memory")
28#define wmb()	asm volatile("sfence" : : : "memory")
29
30#endif /* CONFIG_X86_32 */
31
32#define read_barrier_depends()	do { } while (0)
33
34#ifdef CONFIG_SMP
35
36#define smp_mb()	mb()
37#ifdef CONFIG_X86_PPRO_FENCE
38#define smp_rmb()	rmb()
39#else /* CONFIG_X86_PPRO_FENCE */
40#define smp_rmb()	barrier()
41#endif /* CONFIG_X86_PPRO_FENCE */
 
42
43#define smp_wmb()	barrier()
44
45#define smp_read_barrier_depends()	read_barrier_depends()
46#define set_mb(var, value) do { (void)xchg(&var, value); } while (0)
47
48#else /* CONFIG_SMP */
49
50#define smp_mb()	barrier()
51#define smp_rmb()	barrier()
52#define smp_wmb()	barrier()
53#define smp_read_barrier_depends()	do { } while (0)
54#define set_mb(var, value) do { var = value; barrier(); } while (0)
55
56#endif /* CONFIG_SMP */
57
58/*
59 * Stop RDTSC speculation. This is needed when you need to use RDTSC
60 * (or get_cycles or vread that possibly accesses the TSC) in a defined
61 * code region.
62 *
63 * (Could use an alternative three way for this if there was one.)
64 */
65static inline void rdtsc_barrier(void)
66{
67	alternative(ASM_NOP3, "mfence", X86_FEATURE_MFENCE_RDTSC);
68	alternative(ASM_NOP3, "lfence", X86_FEATURE_LFENCE_RDTSC);
69}
70
71#endif