Linux Audio

Check our new training course

Linux kernel drivers training

May 6-19, 2025
Register
Loading...
v5.4
  1/* SPDX-License-Identifier: GPL-2.0-only */
  2.text
  3#include <linux/linkage.h>
 
  4#include <asm/segment.h>
  5#include <asm/pgtable_types.h>
  6#include <asm/page_types.h>
  7#include <asm/msr.h>
  8#include <asm/asm-offsets.h>
  9#include <asm/frame.h>
 
 10
 11# Copyright 2003 Pavel Machek <pavel@suse.cz
 12
 13.code64
 14	/*
 15	 * Hooray, we are in Long 64-bit mode (but still running in low memory)
 16	 */
 17ENTRY(wakeup_long64)
 18	movq	saved_magic, %rax
 19	movq	$0x123456789abcdef0, %rdx
 20	cmpq	%rdx, %rax
 21	je	2f
 22
 23	/* stop here on a saved_magic mismatch */
 24	movq $0xbad6d61676963, %rcx
 251:
 26	jmp 1b
 272:
 28	movw	$__KERNEL_DS, %ax
 29	movw	%ax, %ss	
 30	movw	%ax, %ds
 31	movw	%ax, %es
 32	movw	%ax, %fs
 33	movw	%ax, %gs
 34	movq	saved_rsp, %rsp
 35
 36	movq	saved_rbx, %rbx
 37	movq	saved_rdi, %rdi
 38	movq	saved_rsi, %rsi
 39	movq	saved_rbp, %rbp
 40
 41	movq	saved_rip, %rax
 
 42	jmp	*%rax
 43ENDPROC(wakeup_long64)
 44
 45ENTRY(do_suspend_lowlevel)
 46	FRAME_BEGIN
 47	subq	$8, %rsp
 48	xorl	%eax, %eax
 49	call	save_processor_state
 50
 51	movq	$saved_context, %rax
 52	movq	%rsp, pt_regs_sp(%rax)
 53	movq	%rbp, pt_regs_bp(%rax)
 54	movq	%rsi, pt_regs_si(%rax)
 55	movq	%rdi, pt_regs_di(%rax)
 56	movq	%rbx, pt_regs_bx(%rax)
 57	movq	%rcx, pt_regs_cx(%rax)
 58	movq	%rdx, pt_regs_dx(%rax)
 59	movq	%r8, pt_regs_r8(%rax)
 60	movq	%r9, pt_regs_r9(%rax)
 61	movq	%r10, pt_regs_r10(%rax)
 62	movq	%r11, pt_regs_r11(%rax)
 63	movq	%r12, pt_regs_r12(%rax)
 64	movq	%r13, pt_regs_r13(%rax)
 65	movq	%r14, pt_regs_r14(%rax)
 66	movq	%r15, pt_regs_r15(%rax)
 67	pushfq
 68	popq	pt_regs_flags(%rax)
 69
 70	movq	$.Lresume_point, saved_rip(%rip)
 71
 72	movq	%rsp, saved_rsp
 73	movq	%rbp, saved_rbp
 74	movq	%rbx, saved_rbx
 75	movq	%rdi, saved_rdi
 76	movq	%rsi, saved_rsi
 77
 78	addq	$8, %rsp
 79	movl	$3, %edi
 80	xorl	%eax, %eax
 81	call	x86_acpi_enter_sleep_state
 82	/* in case something went wrong, restore the machine status and go on */
 83	jmp	.Lresume_point
 84
 85	.align 4
 86.Lresume_point:
 
 87	/* We don't restore %rax, it must be 0 anyway */
 88	movq	$saved_context, %rax
 89	movq	saved_context_cr4(%rax), %rbx
 90	movq	%rbx, %cr4
 91	movq	saved_context_cr3(%rax), %rbx
 92	movq	%rbx, %cr3
 93	movq	saved_context_cr2(%rax), %rbx
 94	movq	%rbx, %cr2
 95	movq	saved_context_cr0(%rax), %rbx
 96	movq	%rbx, %cr0
 97	pushq	pt_regs_flags(%rax)
 98	popfq
 99	movq	pt_regs_sp(%rax), %rsp
100	movq	pt_regs_bp(%rax), %rbp
101	movq	pt_regs_si(%rax), %rsi
102	movq	pt_regs_di(%rax), %rdi
103	movq	pt_regs_bx(%rax), %rbx
104	movq	pt_regs_cx(%rax), %rcx
105	movq	pt_regs_dx(%rax), %rdx
106	movq	pt_regs_r8(%rax), %r8
107	movq	pt_regs_r9(%rax), %r9
108	movq	pt_regs_r10(%rax), %r10
109	movq	pt_regs_r11(%rax), %r11
110	movq	pt_regs_r12(%rax), %r12
111	movq	pt_regs_r13(%rax), %r13
112	movq	pt_regs_r14(%rax), %r14
113	movq	pt_regs_r15(%rax), %r15
114
115#ifdef CONFIG_KASAN
116	/*
117	 * The suspend path may have poisoned some areas deeper in the stack,
118	 * which we now need to unpoison.
119	 */
120	movq	%rsp, %rdi
121	call	kasan_unpoison_task_stack_below
122#endif
123
124	xorl	%eax, %eax
125	addq	$8, %rsp
126	FRAME_END
127	jmp	restore_processor_state
128ENDPROC(do_suspend_lowlevel)
 
129
130.data
131saved_rbp:		.quad	0
132saved_rsi:		.quad	0
133saved_rdi:		.quad	0
134saved_rbx:		.quad	0
135
136saved_rip:		.quad	0
137saved_rsp:		.quad	0
138
139ENTRY(saved_magic)	.quad	0
v6.13.7
  1/* SPDX-License-Identifier: GPL-2.0-only */
  2.text
  3#include <linux/linkage.h>
  4#include <linux/objtool.h>
  5#include <asm/segment.h>
  6#include <asm/pgtable_types.h>
  7#include <asm/page_types.h>
  8#include <asm/msr.h>
  9#include <asm/asm-offsets.h>
 10#include <asm/frame.h>
 11#include <asm/nospec-branch.h>
 12
 13# Copyright 2003 Pavel Machek <pavel@suse.cz
 14
 15.code64
 16	/*
 17	 * Hooray, we are in Long 64-bit mode (but still running in low memory)
 18	 */
 19SYM_FUNC_START(wakeup_long64)
 20	movq	saved_magic(%rip), %rax
 21	movq	$0x123456789abcdef0, %rdx
 22	cmpq	%rdx, %rax
 23	je	2f
 24
 25	/* stop here on a saved_magic mismatch */
 26	movq $0xbad6d61676963, %rcx
 271:
 28	jmp 1b
 292:
 30	movw	$__KERNEL_DS, %ax
 31	movw	%ax, %ss	
 32	movw	%ax, %ds
 33	movw	%ax, %es
 34	movw	%ax, %fs
 35	movw	%ax, %gs
 36	movq	saved_rsp(%rip), %rsp
 37
 38	movq	saved_rbx(%rip), %rbx
 39	movq	saved_rdi(%rip), %rdi
 40	movq	saved_rsi(%rip), %rsi
 41	movq	saved_rbp(%rip), %rbp
 42
 43	movq	saved_rip(%rip), %rax
 44	ANNOTATE_RETPOLINE_SAFE
 45	jmp	*%rax
 46SYM_FUNC_END(wakeup_long64)
 47
 48SYM_FUNC_START(do_suspend_lowlevel)
 49	FRAME_BEGIN
 50	subq	$8, %rsp
 51	xorl	%eax, %eax
 52	call	save_processor_state
 53
 54	movq	$saved_context, %rax
 55	movq	%rsp, pt_regs_sp(%rax)
 56	movq	%rbp, pt_regs_bp(%rax)
 57	movq	%rsi, pt_regs_si(%rax)
 58	movq	%rdi, pt_regs_di(%rax)
 59	movq	%rbx, pt_regs_bx(%rax)
 60	movq	%rcx, pt_regs_cx(%rax)
 61	movq	%rdx, pt_regs_dx(%rax)
 62	movq	%r8, pt_regs_r8(%rax)
 63	movq	%r9, pt_regs_r9(%rax)
 64	movq	%r10, pt_regs_r10(%rax)
 65	movq	%r11, pt_regs_r11(%rax)
 66	movq	%r12, pt_regs_r12(%rax)
 67	movq	%r13, pt_regs_r13(%rax)
 68	movq	%r14, pt_regs_r14(%rax)
 69	movq	%r15, pt_regs_r15(%rax)
 70	pushfq
 71	popq	pt_regs_flags(%rax)
 72
 73	movq	$.Lresume_point, saved_rip(%rip)
 74
 75	movq	%rsp, saved_rsp(%rip)
 76	movq	%rbp, saved_rbp(%rip)
 77	movq	%rbx, saved_rbx(%rip)
 78	movq	%rdi, saved_rdi(%rip)
 79	movq	%rsi, saved_rsi(%rip)
 80
 81	addq	$8, %rsp
 82	movl	$3, %edi
 83	xorl	%eax, %eax
 84	call	x86_acpi_enter_sleep_state
 85	/* in case something went wrong, restore the machine status and go on */
 86	jmp	.Lresume_point
 87
 88	.align 4
 89.Lresume_point:
 90	ANNOTATE_NOENDBR
 91	/* We don't restore %rax, it must be 0 anyway */
 92	movq	$saved_context, %rax
 93	movq	saved_context_cr4(%rax), %rbx
 94	movq	%rbx, %cr4
 95	movq	saved_context_cr3(%rax), %rbx
 96	movq	%rbx, %cr3
 97	movq	saved_context_cr2(%rax), %rbx
 98	movq	%rbx, %cr2
 99	movq	saved_context_cr0(%rax), %rbx
100	movq	%rbx, %cr0
101	pushq	pt_regs_flags(%rax)
102	popfq
103	movq	pt_regs_sp(%rax), %rsp
104	movq	pt_regs_bp(%rax), %rbp
105	movq	pt_regs_si(%rax), %rsi
106	movq	pt_regs_di(%rax), %rdi
107	movq	pt_regs_bx(%rax), %rbx
108	movq	pt_regs_cx(%rax), %rcx
109	movq	pt_regs_dx(%rax), %rdx
110	movq	pt_regs_r8(%rax), %r8
111	movq	pt_regs_r9(%rax), %r9
112	movq	pt_regs_r10(%rax), %r10
113	movq	pt_regs_r11(%rax), %r11
114	movq	pt_regs_r12(%rax), %r12
115	movq	pt_regs_r13(%rax), %r13
116	movq	pt_regs_r14(%rax), %r14
117	movq	pt_regs_r15(%rax), %r15
118
119#if defined(CONFIG_KASAN) && defined(CONFIG_KASAN_STACK)
120	/*
121	 * The suspend path may have poisoned some areas deeper in the stack,
122	 * which we now need to unpoison.
123	 */
124	movq	%rsp, %rdi
125	call	kasan_unpoison_task_stack_below
126#endif
127
128	xorl	%eax, %eax
129	addq	$8, %rsp
130	FRAME_END
131	jmp	restore_processor_state
132SYM_FUNC_END(do_suspend_lowlevel)
133STACK_FRAME_NON_STANDARD do_suspend_lowlevel
134
135.data
136saved_rbp:		.quad	0
137saved_rsi:		.quad	0
138saved_rdi:		.quad	0
139saved_rbx:		.quad	0
140
141saved_rip:		.quad	0
142saved_rsp:		.quad	0
143
144SYM_DATA(saved_magic,	.quad	0)