Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * r2300_switch.S: R2300 specific task switching code.
4 *
5 * Copyright (C) 1994, 1995, 1996, 1999 by Ralf Baechle
6 * Copyright (C) 1994, 1995, 1996 by Andreas Busse
7 *
8 * Multi-cpu abstraction and macros for easier reading:
9 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
10 *
11 * Further modifications to make this work:
12 * Copyright (c) 1998-2000 Harald Koerfgen
13 */
14#include <asm/asm.h>
15#include <asm/cachectl.h>
16#include <asm/export.h>
17#include <asm/fpregdef.h>
18#include <asm/mipsregs.h>
19#include <asm/asm-offsets.h>
20#include <asm/regdef.h>
21#include <asm/stackframe.h>
22#include <asm/thread_info.h>
23
24#include <asm/asmmacro.h>
25
26 .set mips1
27 .align 5
28
29/*
30 * task_struct *resume(task_struct *prev, task_struct *next,
31 * struct thread_info *next_ti)
32 */
33LEAF(resume)
34 mfc0 t1, CP0_STATUS
35 sw t1, THREAD_STATUS(a0)
36 cpu_save_nonscratch a0
37 sw ra, THREAD_REG31(a0)
38
39#if defined(CONFIG_STACKPROTECTOR) && !defined(CONFIG_SMP)
40 PTR_LA t8, __stack_chk_guard
41 LONG_L t9, TASK_STACK_CANARY(a1)
42 LONG_S t9, 0(t8)
43#endif
44
45 /*
46 * The order of restoring the registers takes care of the race
47 * updating $28, $29 and kernelsp without disabling ints.
48 */
49 move $28, a2
50 cpu_restore_nonscratch a1
51
52 addiu t1, $28, _THREAD_SIZE - 32
53 sw t1, kernelsp
54
55 mfc0 t1, CP0_STATUS /* Do we really need this? */
56 li a3, 0xff01
57 and t1, a3
58 lw a2, THREAD_STATUS(a1)
59 nor a3, $0, a3
60 and a2, a3
61 or a2, t1
62 mtc0 a2, CP0_STATUS
63 move v0, a0
64 jr ra
65 END(resume)
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * r2300_switch.S: R2300 specific task switching code.
4 *
5 * Copyright (C) 1994, 1995, 1996, 1999 by Ralf Baechle
6 * Copyright (C) 1994, 1995, 1996 by Andreas Busse
7 *
8 * Multi-cpu abstraction and macros for easier reading:
9 * Copyright (C) 1996 David S. Miller (davem@davemloft.net)
10 *
11 * Further modifications to make this work:
12 * Copyright (c) 1998-2000 Harald Koerfgen
13 */
14#include <asm/asm.h>
15#include <asm/cachectl.h>
16#include <asm/export.h>
17#include <asm/fpregdef.h>
18#include <asm/mipsregs.h>
19#include <asm/asm-offsets.h>
20#include <asm/regdef.h>
21#include <asm/stackframe.h>
22#include <asm/thread_info.h>
23
24#include <asm/asmmacro.h>
25
26 .set mips1
27 .align 5
28
29/*
30 * task_struct *resume(task_struct *prev, task_struct *next,
31 * struct thread_info *next_ti)
32 */
33LEAF(resume)
34 mfc0 t1, CP0_STATUS
35 sw t1, THREAD_STATUS(a0)
36 cpu_save_nonscratch a0
37 sw ra, THREAD_REG31(a0)
38
39#if defined(CONFIG_STACKPROTECTOR) && !defined(CONFIG_SMP)
40 PTR_LA t8, __stack_chk_guard
41 LONG_L t9, TASK_STACK_CANARY(a1)
42 LONG_S t9, 0(t8)
43#endif
44
45 /*
46 * The order of restoring the registers takes care of the race
47 * updating $28, $29 and kernelsp without disabling ints.
48 */
49 move $28, a2
50 cpu_restore_nonscratch a1
51
52 addiu t1, $28, _THREAD_SIZE - 32
53 sw t1, kernelsp
54
55 mfc0 t1, CP0_STATUS /* Do we really need this? */
56 li a3, 0xff01
57 and t1, a3
58 lw a2, THREAD_STATUS(a1)
59 nor a3, $0, a3
60 and a2, a3
61 or a2, t1
62 mtc0 a2, CP0_STATUS
63 move v0, a0
64 jr ra
65 END(resume)