Loading...
1#ifndef _ASM_X86_EDAC_H
2#define _ASM_X86_EDAC_H
3
4/* ECC atomic, DMA, SMP and interrupt safe scrub function */
5
6static inline void edac_atomic_scrub(void *va, u32 size)
7{
8 u32 i, *virt_addr = va;
9
10 /*
11 * Very carefully read and write to memory atomically so we
12 * are interrupt, DMA and SMP safe.
13 */
14 for (i = 0; i < size / 4; i++, virt_addr++)
15 asm volatile("lock; addl $0, %0"::"m" (*virt_addr));
16}
17
18#endif /* _ASM_X86_EDAC_H */
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef _ASM_X86_EDAC_H
3#define _ASM_X86_EDAC_H
4
5/* ECC atomic, DMA, SMP and interrupt safe scrub function */
6
7static inline void edac_atomic_scrub(void *va, u32 size)
8{
9 u32 i, *virt_addr = va;
10
11 /*
12 * Very carefully read and write to memory atomically so we
13 * are interrupt, DMA and SMP safe.
14 */
15 for (i = 0; i < size / 4; i++, virt_addr++)
16 asm volatile("lock; addl $0, %0"::"m" (*virt_addr));
17}
18
19#endif /* _ASM_X86_EDAC_H */