Loading...
1.text
2#include <linux/linkage.h>
3#include <asm/segment.h>
4#include <asm/pgtable_types.h>
5#include <asm/page_types.h>
6#include <asm/msr.h>
7#include <asm/asm-offsets.h>
8#include <asm/frame.h>
9
10# Copyright 2003 Pavel Machek <pavel@suse.cz>, distribute under GPLv2
11
12.code64
13 /*
14 * Hooray, we are in Long 64-bit mode (but still running in low memory)
15 */
16ENTRY(wakeup_long64)
17 movq saved_magic, %rax
18 movq $0x123456789abcdef0, %rdx
19 cmpq %rdx, %rax
20 jne bogus_64_magic
21
22 movw $__KERNEL_DS, %ax
23 movw %ax, %ss
24 movw %ax, %ds
25 movw %ax, %es
26 movw %ax, %fs
27 movw %ax, %gs
28 movq saved_rsp, %rsp
29
30 movq saved_rbx, %rbx
31 movq saved_rdi, %rdi
32 movq saved_rsi, %rsi
33 movq saved_rbp, %rbp
34
35 movq saved_rip, %rax
36 jmp *%rax
37ENDPROC(wakeup_long64)
38
39bogus_64_magic:
40 jmp bogus_64_magic
41
42ENTRY(do_suspend_lowlevel)
43 FRAME_BEGIN
44 subq $8, %rsp
45 xorl %eax, %eax
46 call save_processor_state
47
48 movq $saved_context, %rax
49 movq %rsp, pt_regs_sp(%rax)
50 movq %rbp, pt_regs_bp(%rax)
51 movq %rsi, pt_regs_si(%rax)
52 movq %rdi, pt_regs_di(%rax)
53 movq %rbx, pt_regs_bx(%rax)
54 movq %rcx, pt_regs_cx(%rax)
55 movq %rdx, pt_regs_dx(%rax)
56 movq %r8, pt_regs_r8(%rax)
57 movq %r9, pt_regs_r9(%rax)
58 movq %r10, pt_regs_r10(%rax)
59 movq %r11, pt_regs_r11(%rax)
60 movq %r12, pt_regs_r12(%rax)
61 movq %r13, pt_regs_r13(%rax)
62 movq %r14, pt_regs_r14(%rax)
63 movq %r15, pt_regs_r15(%rax)
64 pushfq
65 popq pt_regs_flags(%rax)
66
67 movq $.Lresume_point, saved_rip(%rip)
68
69 movq %rsp, saved_rsp
70 movq %rbp, saved_rbp
71 movq %rbx, saved_rbx
72 movq %rdi, saved_rdi
73 movq %rsi, saved_rsi
74
75 addq $8, %rsp
76 movl $3, %edi
77 xorl %eax, %eax
78 call x86_acpi_enter_sleep_state
79 /* in case something went wrong, restore the machine status and go on */
80 jmp .Lresume_point
81
82 .align 4
83.Lresume_point:
84 /* We don't restore %rax, it must be 0 anyway */
85 movq $saved_context, %rax
86 movq saved_context_cr4(%rax), %rbx
87 movq %rbx, %cr4
88 movq saved_context_cr3(%rax), %rbx
89 movq %rbx, %cr3
90 movq saved_context_cr2(%rax), %rbx
91 movq %rbx, %cr2
92 movq saved_context_cr0(%rax), %rbx
93 movq %rbx, %cr0
94 pushq pt_regs_flags(%rax)
95 popfq
96 movq pt_regs_sp(%rax), %rsp
97 movq pt_regs_bp(%rax), %rbp
98 movq pt_regs_si(%rax), %rsi
99 movq pt_regs_di(%rax), %rdi
100 movq pt_regs_bx(%rax), %rbx
101 movq pt_regs_cx(%rax), %rcx
102 movq pt_regs_dx(%rax), %rdx
103 movq pt_regs_r8(%rax), %r8
104 movq pt_regs_r9(%rax), %r9
105 movq pt_regs_r10(%rax), %r10
106 movq pt_regs_r11(%rax), %r11
107 movq pt_regs_r12(%rax), %r12
108 movq pt_regs_r13(%rax), %r13
109 movq pt_regs_r14(%rax), %r14
110 movq pt_regs_r15(%rax), %r15
111
112#ifdef CONFIG_KASAN
113 /*
114 * The suspend path may have poisoned some areas deeper in the stack,
115 * which we now need to unpoison.
116 */
117 movq %rsp, %rdi
118 call kasan_unpoison_task_stack_below
119#endif
120
121 xorl %eax, %eax
122 addq $8, %rsp
123 FRAME_END
124 jmp restore_processor_state
125ENDPROC(do_suspend_lowlevel)
126
127.data
128ENTRY(saved_rbp) .quad 0
129ENTRY(saved_rsi) .quad 0
130ENTRY(saved_rdi) .quad 0
131ENTRY(saved_rbx) .quad 0
132
133ENTRY(saved_rip) .quad 0
134ENTRY(saved_rsp) .quad 0
135
136ENTRY(saved_magic) .quad 0
1/* SPDX-License-Identifier: GPL-2.0-only */
2.text
3#include <linux/linkage.h>
4#include <linux/objtool.h>
5#include <asm/segment.h>
6#include <asm/pgtable_types.h>
7#include <asm/page_types.h>
8#include <asm/msr.h>
9#include <asm/asm-offsets.h>
10#include <asm/frame.h>
11#include <asm/nospec-branch.h>
12
13# Copyright 2003 Pavel Machek <pavel@suse.cz
14
15.code64
16 /*
17 * Hooray, we are in Long 64-bit mode (but still running in low memory)
18 */
19SYM_FUNC_START(wakeup_long64)
20 movq saved_magic, %rax
21 movq $0x123456789abcdef0, %rdx
22 cmpq %rdx, %rax
23 je 2f
24
25 /* stop here on a saved_magic mismatch */
26 movq $0xbad6d61676963, %rcx
271:
28 jmp 1b
292:
30 movw $__KERNEL_DS, %ax
31 movw %ax, %ss
32 movw %ax, %ds
33 movw %ax, %es
34 movw %ax, %fs
35 movw %ax, %gs
36 movq saved_rsp, %rsp
37
38 movq saved_rbx, %rbx
39 movq saved_rdi, %rdi
40 movq saved_rsi, %rsi
41 movq saved_rbp, %rbp
42
43 movq saved_rip, %rax
44 ANNOTATE_RETPOLINE_SAFE
45 jmp *%rax
46SYM_FUNC_END(wakeup_long64)
47
48SYM_FUNC_START(do_suspend_lowlevel)
49 FRAME_BEGIN
50 subq $8, %rsp
51 xorl %eax, %eax
52 call save_processor_state
53
54 movq $saved_context, %rax
55 movq %rsp, pt_regs_sp(%rax)
56 movq %rbp, pt_regs_bp(%rax)
57 movq %rsi, pt_regs_si(%rax)
58 movq %rdi, pt_regs_di(%rax)
59 movq %rbx, pt_regs_bx(%rax)
60 movq %rcx, pt_regs_cx(%rax)
61 movq %rdx, pt_regs_dx(%rax)
62 movq %r8, pt_regs_r8(%rax)
63 movq %r9, pt_regs_r9(%rax)
64 movq %r10, pt_regs_r10(%rax)
65 movq %r11, pt_regs_r11(%rax)
66 movq %r12, pt_regs_r12(%rax)
67 movq %r13, pt_regs_r13(%rax)
68 movq %r14, pt_regs_r14(%rax)
69 movq %r15, pt_regs_r15(%rax)
70 pushfq
71 popq pt_regs_flags(%rax)
72
73 movq $.Lresume_point, saved_rip(%rip)
74
75 movq %rsp, saved_rsp
76 movq %rbp, saved_rbp
77 movq %rbx, saved_rbx
78 movq %rdi, saved_rdi
79 movq %rsi, saved_rsi
80
81 addq $8, %rsp
82 movl $3, %edi
83 xorl %eax, %eax
84 call x86_acpi_enter_sleep_state
85 /* in case something went wrong, restore the machine status and go on */
86 jmp .Lresume_point
87
88 .align 4
89.Lresume_point:
90 /* We don't restore %rax, it must be 0 anyway */
91 movq $saved_context, %rax
92 movq saved_context_cr4(%rax), %rbx
93 movq %rbx, %cr4
94 movq saved_context_cr3(%rax), %rbx
95 movq %rbx, %cr3
96 movq saved_context_cr2(%rax), %rbx
97 movq %rbx, %cr2
98 movq saved_context_cr0(%rax), %rbx
99 movq %rbx, %cr0
100 pushq pt_regs_flags(%rax)
101 popfq
102 movq pt_regs_sp(%rax), %rsp
103 movq pt_regs_bp(%rax), %rbp
104 movq pt_regs_si(%rax), %rsi
105 movq pt_regs_di(%rax), %rdi
106 movq pt_regs_bx(%rax), %rbx
107 movq pt_regs_cx(%rax), %rcx
108 movq pt_regs_dx(%rax), %rdx
109 movq pt_regs_r8(%rax), %r8
110 movq pt_regs_r9(%rax), %r9
111 movq pt_regs_r10(%rax), %r10
112 movq pt_regs_r11(%rax), %r11
113 movq pt_regs_r12(%rax), %r12
114 movq pt_regs_r13(%rax), %r13
115 movq pt_regs_r14(%rax), %r14
116 movq pt_regs_r15(%rax), %r15
117
118#if defined(CONFIG_KASAN) && defined(CONFIG_KASAN_STACK)
119 /*
120 * The suspend path may have poisoned some areas deeper in the stack,
121 * which we now need to unpoison.
122 */
123 movq %rsp, %rdi
124 call kasan_unpoison_task_stack_below
125#endif
126
127 xorl %eax, %eax
128 addq $8, %rsp
129 FRAME_END
130 jmp restore_processor_state
131SYM_FUNC_END(do_suspend_lowlevel)
132STACK_FRAME_NON_STANDARD do_suspend_lowlevel
133
134.data
135saved_rbp: .quad 0
136saved_rsi: .quad 0
137saved_rdi: .quad 0
138saved_rbx: .quad 0
139
140saved_rip: .quad 0
141saved_rsp: .quad 0
142
143SYM_DATA(saved_magic, .quad 0)