]>
Commit | Line | Data |
---|---|---|
767a67b0 | 1 | /* SPDX-License-Identifier: GPL-2.0-only */ |
cf7700fe RW |
2 | /* |
3 | * Hibernation support for x86-64 | |
1da177e4 | 4 | * |
cf7700fe RW |
5 | * Copyright 2007 Rafael J. Wysocki <rjw@sisk.pl> |
6 | * Copyright 2005 Andi Kleen <ak@suse.de> | |
7 | * Copyright 2004 Pavel Machek <pavel@suse.cz> | |
8 | * | |
d158cbdf RW |
9 | * swsusp_arch_resume must not use any stack or any nonlocal variables while |
10 | * copying pages: | |
1da177e4 LT |
11 | * |
12 | * Its rewriting one kernel image with another. What is stack in "old" | |
13 | * image could very well be data page in "new" image, and overwriting | |
14 | * your own stack under you is bad idea. | |
15 | */ | |
cf7700fe | 16 | |
1da177e4 LT |
17 | .text |
18 | #include <linux/linkage.h> | |
19 | #include <asm/segment.h> | |
0341c14d | 20 | #include <asm/page_types.h> |
e2d5df93 | 21 | #include <asm/asm-offsets.h> |
bbb1e57a | 22 | #include <asm/processor-flags.h> |
ef0f3ed5 | 23 | #include <asm/frame.h> |
1da177e4 LT |
24 | |
25 | ENTRY(swsusp_arch_suspend) | |
0de80bcc | 26 | movq $saved_context, %rax |
65ea5b03 PA |
27 | movq %rsp, pt_regs_sp(%rax) |
28 | movq %rbp, pt_regs_bp(%rax) | |
29 | movq %rsi, pt_regs_si(%rax) | |
30 | movq %rdi, pt_regs_di(%rax) | |
31 | movq %rbx, pt_regs_bx(%rax) | |
32 | movq %rcx, pt_regs_cx(%rax) | |
33 | movq %rdx, pt_regs_dx(%rax) | |
0de80bcc RW |
34 | movq %r8, pt_regs_r8(%rax) |
35 | movq %r9, pt_regs_r9(%rax) | |
36 | movq %r10, pt_regs_r10(%rax) | |
37 | movq %r11, pt_regs_r11(%rax) | |
38 | movq %r12, pt_regs_r12(%rax) | |
39 | movq %r13, pt_regs_r13(%rax) | |
40 | movq %r14, pt_regs_r14(%rax) | |
41 | movq %r15, pt_regs_r15(%rax) | |
42 | pushfq | |
65ea5b03 | 43 | popq pt_regs_flags(%rax) |
1da177e4 | 44 | |
c30bb68c RW |
45 | /* save cr3 */ |
46 | movq %cr3, %rax | |
47 | movq %rax, restore_cr3(%rip) | |
d158cbdf | 48 | |
4ce827b4 | 49 | FRAME_BEGIN |
1da177e4 | 50 | call swsusp_save |
ef0f3ed5 | 51 | FRAME_END |
1da177e4 | 52 | ret |
ef0f3ed5 | 53 | ENDPROC(swsusp_arch_suspend) |
1da177e4 | 54 | |
3dd08325 | 55 | ENTRY(restore_image) |
d158cbdf | 56 | /* prepare to jump to the image kernel */ |
65c0554b RW |
57 | movq restore_jump_address(%rip), %r8 |
58 | movq restore_cr3(%rip), %r9 | |
59 | ||
60 | /* prepare to switch to temporary page tables */ | |
72adf477 | 61 | movq temp_pgt(%rip), %rax |
65c0554b | 62 | movq mmu_cr4_features(%rip), %rbx |
d158cbdf RW |
63 | |
64 | /* prepare to copy image data to their original locations */ | |
75534b50 | 65 | movq restore_pblist(%rip), %rdx |
65c0554b RW |
66 | |
67 | /* jump to relocated restore code */ | |
d158cbdf RW |
68 | movq relocated_restore_code(%rip), %rcx |
69 | jmpq *%rcx | |
70 | ||
71 | /* code below has been relocated to a safe page */ | |
72 | ENTRY(core_restore_code) | |
65c0554b | 73 | /* switch to temporary page tables */ |
65c0554b RW |
74 | movq %rax, %cr3 |
75 | /* flush TLB */ | |
76 | movq %rbx, %rcx | |
77 | andq $~(X86_CR4_PGE), %rcx | |
78 | movq %rcx, %cr4; # turn off PGE | |
79 | movq %cr3, %rcx; # flush TLB | |
80 | movq %rcx, %cr3; | |
81 | movq %rbx, %cr4; # turn PGE back on | |
ff22e201 | 82 | .Lloop: |
1da177e4 | 83 | testq %rdx, %rdx |
ff22e201 | 84 | jz .Ldone |
1da177e4 LT |
85 | |
86 | /* get addresses from the pbe and copy the page */ | |
87 | movq pbe_address(%rdx), %rsi | |
88 | movq pbe_orig_address(%rdx), %rdi | |
d158cbdf | 89 | movq $(PAGE_SIZE >> 3), %rcx |
1da177e4 LT |
90 | rep |
91 | movsq | |
92 | ||
93 | /* progress to the next pbe */ | |
94 | movq pbe_next(%rdx), %rdx | |
ff22e201 | 95 | jmp .Lloop |
65c0554b | 96 | |
ff22e201 | 97 | .Ldone: |
d158cbdf | 98 | /* jump to the restore_registers address from the image header */ |
65c0554b | 99 | jmpq *%r8 |
d158cbdf | 100 | |
65c0554b RW |
101 | /* code below belongs to the image kernel */ |
102 | .align PAGE_SIZE | |
d158cbdf | 103 | ENTRY(restore_registers) |
3dd08325 | 104 | /* go back to the original page tables */ |
65c0554b | 105 | movq %r9, %cr3 |
1ab60e0f | 106 | |
1da177e4 LT |
107 | /* Flush TLB, including "global" things (vmalloc) */ |
108 | movq mmu_cr4_features(%rip), %rax | |
109 | movq %rax, %rdx | |
bbb1e57a | 110 | andq $~(X86_CR4_PGE), %rdx |
1da177e4 LT |
111 | movq %rdx, %cr4; # turn off PGE |
112 | movq %cr3, %rcx; # flush TLB | |
113 | movq %rcx, %cr3 | |
114 | movq %rax, %cr4; # turn PGE back on | |
115 | ||
0de80bcc RW |
116 | /* We don't restore %rax, it must be 0 anyway */ |
117 | movq $saved_context, %rax | |
65ea5b03 PA |
118 | movq pt_regs_sp(%rax), %rsp |
119 | movq pt_regs_bp(%rax), %rbp | |
120 | movq pt_regs_si(%rax), %rsi | |
121 | movq pt_regs_di(%rax), %rdi | |
122 | movq pt_regs_bx(%rax), %rbx | |
123 | movq pt_regs_cx(%rax), %rcx | |
124 | movq pt_regs_dx(%rax), %rdx | |
0de80bcc RW |
125 | movq pt_regs_r8(%rax), %r8 |
126 | movq pt_regs_r9(%rax), %r9 | |
127 | movq pt_regs_r10(%rax), %r10 | |
128 | movq pt_regs_r11(%rax), %r11 | |
129 | movq pt_regs_r12(%rax), %r12 | |
130 | movq pt_regs_r13(%rax), %r13 | |
131 | movq pt_regs_r14(%rax), %r14 | |
132 | movq pt_regs_r15(%rax), %r15 | |
65ea5b03 | 133 | pushq pt_regs_flags(%rax) |
0de80bcc | 134 | popfq |
1da177e4 | 135 | |
cc456c4e KRW |
136 | /* Saved in save_processor_state. */ |
137 | lgdt saved_context_gdt_desc(%rax) | |
138 | ||
a7bea830 | 139 | xorl %eax, %eax |
1da177e4 | 140 | |
d158cbdf RW |
141 | /* tell the hibernation core that we've just restored the memory */ |
142 | movq %rax, in_suspend(%rip) | |
143 | ||
1da177e4 | 144 | ret |
ef0f3ed5 | 145 | ENDPROC(restore_registers) |