Loading...
1#ifndef ASM_EDAC_H
2#define ASM_EDAC_H
3
4/* ECC atomic, DMA, SMP and interrupt safe scrub function */
5
6static inline void atomic_scrub(void *va, u32 size)
7{
8 unsigned long *virt_addr = va;
9 unsigned long temp;
10 u32 i;
11
12 for (i = 0; i < size / sizeof(unsigned long); i++) {
13 /*
14 * Very carefully read and write to memory atomically
15 * so we are interrupt, DMA and SMP safe.
16 *
17 * Intel: asm("lock; addl $0, %0"::"m"(*virt_addr));
18 */
19
20 __asm__ __volatile__ (
21 " .set mips2 \n"
22 "1: ll %0, %1 # atomic_scrub \n"
23 " addu %0, $0 \n"
24 " sc %0, %1 \n"
25 " beqz %0, 1b \n"
26 " .set mips0 \n"
27 : "=&r" (temp), "=m" (*virt_addr)
28 : "m" (*virt_addr));
29
30 virt_addr++;
31 }
32}
33
34#endif
1/* SPDX-License-Identifier: GPL-2.0 */
2#ifndef ASM_EDAC_H
3#define ASM_EDAC_H
4
5#include <asm/compiler.h>
6
7/* ECC atomic, DMA, SMP and interrupt safe scrub function */
8
9static inline void edac_atomic_scrub(void *va, u32 size)
10{
11 unsigned long *virt_addr = va;
12 unsigned long temp;
13 u32 i;
14
15 for (i = 0; i < size / sizeof(unsigned long); i++) {
16 /*
17 * Very carefully read and write to memory atomically
18 * so we are interrupt, DMA and SMP safe.
19 *
20 * Intel: asm("lock; addl $0, %0"::"m"(*virt_addr));
21 */
22
23 __asm__ __volatile__ (
24 " .set push \n"
25 " .set mips2 \n"
26 "1: ll %0, %1 # edac_atomic_scrub \n"
27 " addu %0, $0 \n"
28 " sc %0, %1 \n"
29 " beqz %0, 1b \n"
30 " .set pop \n"
31 : "=&r" (temp), "=" GCC_OFF_SMALL_ASM() (*virt_addr)
32 : GCC_OFF_SMALL_ASM() (*virt_addr));
33
34 virt_addr++;
35 }
36}
37
38#endif