Loading...
1/* SPDX-License-Identifier: GPL-2.0-only */
2.text
3#include <linux/linkage.h>
4#include <asm/segment.h>
5#include <asm/pgtable_types.h>
6#include <asm/page_types.h>
7#include <asm/msr.h>
8#include <asm/asm-offsets.h>
9#include <asm/frame.h>
10
11# Copyright 2003 Pavel Machek <pavel@suse.cz
12
13.code64
14 /*
15 * Hooray, we are in Long 64-bit mode (but still running in low memory)
16 */
17ENTRY(wakeup_long64)
18 movq saved_magic, %rax
19 movq $0x123456789abcdef0, %rdx
20 cmpq %rdx, %rax
21 je 2f
22
23 /* stop here on a saved_magic mismatch */
24 movq $0xbad6d61676963, %rcx
251:
26 jmp 1b
272:
28 movw $__KERNEL_DS, %ax
29 movw %ax, %ss
30 movw %ax, %ds
31 movw %ax, %es
32 movw %ax, %fs
33 movw %ax, %gs
34 movq saved_rsp, %rsp
35
36 movq saved_rbx, %rbx
37 movq saved_rdi, %rdi
38 movq saved_rsi, %rsi
39 movq saved_rbp, %rbp
40
41 movq saved_rip, %rax
42 jmp *%rax
43ENDPROC(wakeup_long64)
44
45ENTRY(do_suspend_lowlevel)
46 FRAME_BEGIN
47 subq $8, %rsp
48 xorl %eax, %eax
49 call save_processor_state
50
51 movq $saved_context, %rax
52 movq %rsp, pt_regs_sp(%rax)
53 movq %rbp, pt_regs_bp(%rax)
54 movq %rsi, pt_regs_si(%rax)
55 movq %rdi, pt_regs_di(%rax)
56 movq %rbx, pt_regs_bx(%rax)
57 movq %rcx, pt_regs_cx(%rax)
58 movq %rdx, pt_regs_dx(%rax)
59 movq %r8, pt_regs_r8(%rax)
60 movq %r9, pt_regs_r9(%rax)
61 movq %r10, pt_regs_r10(%rax)
62 movq %r11, pt_regs_r11(%rax)
63 movq %r12, pt_regs_r12(%rax)
64 movq %r13, pt_regs_r13(%rax)
65 movq %r14, pt_regs_r14(%rax)
66 movq %r15, pt_regs_r15(%rax)
67 pushfq
68 popq pt_regs_flags(%rax)
69
70 movq $.Lresume_point, saved_rip(%rip)
71
72 movq %rsp, saved_rsp
73 movq %rbp, saved_rbp
74 movq %rbx, saved_rbx
75 movq %rdi, saved_rdi
76 movq %rsi, saved_rsi
77
78 addq $8, %rsp
79 movl $3, %edi
80 xorl %eax, %eax
81 call x86_acpi_enter_sleep_state
82 /* in case something went wrong, restore the machine status and go on */
83 jmp .Lresume_point
84
85 .align 4
86.Lresume_point:
87 /* We don't restore %rax, it must be 0 anyway */
88 movq $saved_context, %rax
89 movq saved_context_cr4(%rax), %rbx
90 movq %rbx, %cr4
91 movq saved_context_cr3(%rax), %rbx
92 movq %rbx, %cr3
93 movq saved_context_cr2(%rax), %rbx
94 movq %rbx, %cr2
95 movq saved_context_cr0(%rax), %rbx
96 movq %rbx, %cr0
97 pushq pt_regs_flags(%rax)
98 popfq
99 movq pt_regs_sp(%rax), %rsp
100 movq pt_regs_bp(%rax), %rbp
101 movq pt_regs_si(%rax), %rsi
102 movq pt_regs_di(%rax), %rdi
103 movq pt_regs_bx(%rax), %rbx
104 movq pt_regs_cx(%rax), %rcx
105 movq pt_regs_dx(%rax), %rdx
106 movq pt_regs_r8(%rax), %r8
107 movq pt_regs_r9(%rax), %r9
108 movq pt_regs_r10(%rax), %r10
109 movq pt_regs_r11(%rax), %r11
110 movq pt_regs_r12(%rax), %r12
111 movq pt_regs_r13(%rax), %r13
112 movq pt_regs_r14(%rax), %r14
113 movq pt_regs_r15(%rax), %r15
114
115#ifdef CONFIG_KASAN
116 /*
117 * The suspend path may have poisoned some areas deeper in the stack,
118 * which we now need to unpoison.
119 */
120 movq %rsp, %rdi
121 call kasan_unpoison_task_stack_below
122#endif
123
124 xorl %eax, %eax
125 addq $8, %rsp
126 FRAME_END
127 jmp restore_processor_state
128ENDPROC(do_suspend_lowlevel)
129
130.data
131saved_rbp: .quad 0
132saved_rsi: .quad 0
133saved_rdi: .quad 0
134saved_rbx: .quad 0
135
136saved_rip: .quad 0
137saved_rsp: .quad 0
138
139ENTRY(saved_magic) .quad 0
1.text
2#include <linux/linkage.h>
3#include <asm/segment.h>
4#include <asm/pgtable_types.h>
5#include <asm/page_types.h>
6#include <asm/msr.h>
7#include <asm/asm-offsets.h>
8#include <asm/frame.h>
9
10# Copyright 2003 Pavel Machek <pavel@suse.cz>, distribute under GPLv2
11
12.code64
13 /*
14 * Hooray, we are in Long 64-bit mode (but still running in low memory)
15 */
16ENTRY(wakeup_long64)
17 movq saved_magic, %rax
18 movq $0x123456789abcdef0, %rdx
19 cmpq %rdx, %rax
20 jne bogus_64_magic
21
22 movw $__KERNEL_DS, %ax
23 movw %ax, %ss
24 movw %ax, %ds
25 movw %ax, %es
26 movw %ax, %fs
27 movw %ax, %gs
28 movq saved_rsp, %rsp
29
30 movq saved_rbx, %rbx
31 movq saved_rdi, %rdi
32 movq saved_rsi, %rsi
33 movq saved_rbp, %rbp
34
35 movq saved_rip, %rax
36 jmp *%rax
37ENDPROC(wakeup_long64)
38
39bogus_64_magic:
40 jmp bogus_64_magic
41
42ENTRY(do_suspend_lowlevel)
43 FRAME_BEGIN
44 subq $8, %rsp
45 xorl %eax, %eax
46 call save_processor_state
47
48 movq $saved_context, %rax
49 movq %rsp, pt_regs_sp(%rax)
50 movq %rbp, pt_regs_bp(%rax)
51 movq %rsi, pt_regs_si(%rax)
52 movq %rdi, pt_regs_di(%rax)
53 movq %rbx, pt_regs_bx(%rax)
54 movq %rcx, pt_regs_cx(%rax)
55 movq %rdx, pt_regs_dx(%rax)
56 movq %r8, pt_regs_r8(%rax)
57 movq %r9, pt_regs_r9(%rax)
58 movq %r10, pt_regs_r10(%rax)
59 movq %r11, pt_regs_r11(%rax)
60 movq %r12, pt_regs_r12(%rax)
61 movq %r13, pt_regs_r13(%rax)
62 movq %r14, pt_regs_r14(%rax)
63 movq %r15, pt_regs_r15(%rax)
64 pushfq
65 popq pt_regs_flags(%rax)
66
67 movq $.Lresume_point, saved_rip(%rip)
68
69 movq %rsp, saved_rsp
70 movq %rbp, saved_rbp
71 movq %rbx, saved_rbx
72 movq %rdi, saved_rdi
73 movq %rsi, saved_rsi
74
75 addq $8, %rsp
76 movl $3, %edi
77 xorl %eax, %eax
78 call x86_acpi_enter_sleep_state
79 /* in case something went wrong, restore the machine status and go on */
80 jmp .Lresume_point
81
82 .align 4
83.Lresume_point:
84 /* We don't restore %rax, it must be 0 anyway */
85 movq $saved_context, %rax
86 movq saved_context_cr4(%rax), %rbx
87 movq %rbx, %cr4
88 movq saved_context_cr3(%rax), %rbx
89 movq %rbx, %cr3
90 movq saved_context_cr2(%rax), %rbx
91 movq %rbx, %cr2
92 movq saved_context_cr0(%rax), %rbx
93 movq %rbx, %cr0
94 pushq pt_regs_flags(%rax)
95 popfq
96 movq pt_regs_sp(%rax), %rsp
97 movq pt_regs_bp(%rax), %rbp
98 movq pt_regs_si(%rax), %rsi
99 movq pt_regs_di(%rax), %rdi
100 movq pt_regs_bx(%rax), %rbx
101 movq pt_regs_cx(%rax), %rcx
102 movq pt_regs_dx(%rax), %rdx
103 movq pt_regs_r8(%rax), %r8
104 movq pt_regs_r9(%rax), %r9
105 movq pt_regs_r10(%rax), %r10
106 movq pt_regs_r11(%rax), %r11
107 movq pt_regs_r12(%rax), %r12
108 movq pt_regs_r13(%rax), %r13
109 movq pt_regs_r14(%rax), %r14
110 movq pt_regs_r15(%rax), %r15
111
112 xorl %eax, %eax
113 addq $8, %rsp
114 FRAME_END
115 jmp restore_processor_state
116ENDPROC(do_suspend_lowlevel)
117
118.data
119ENTRY(saved_rbp) .quad 0
120ENTRY(saved_rsi) .quad 0
121ENTRY(saved_rdi) .quad 0
122ENTRY(saved_rbx) .quad 0
123
124ENTRY(saved_rip) .quad 0
125ENTRY(saved_rsp) .quad 0
126
127ENTRY(saved_magic) .quad 0