Commit | Line | Data |
---|---|---|
cf7700fe RW |
1 | /* |
2 | * Hibernation support for x86-64 | |
1da177e4 LT |
3 | * |
4 | * Distribute under GPLv2. | |
5 | * | |
cf7700fe RW |
6 | * Copyright 2007 Rafael J. Wysocki <rjw@sisk.pl> |
7 | * Copyright 2005 Andi Kleen <ak@suse.de> | |
8 | * Copyright 2004 Pavel Machek <pavel@suse.cz> | |
9 | * | |
d158cbdf RW |
10 | * swsusp_arch_resume must not use any stack or any nonlocal variables while |
11 | * copying pages: | |
1da177e4 LT |
12 | * |
13 | * Its rewriting one kernel image with another. What is stack in "old" | |
14 | * image could very well be data page in "new" image, and overwriting | |
15 | * your own stack under you is bad idea. | |
16 | */ | |
cf7700fe | 17 | |
1da177e4 LT |
18 | .text |
19 | #include <linux/linkage.h> | |
20 | #include <asm/segment.h> | |
0341c14d | 21 | #include <asm/page_types.h> |
e2d5df93 | 22 | #include <asm/asm-offsets.h> |
bbb1e57a | 23 | #include <asm/processor-flags.h> |
ef0f3ed5 | 24 | #include <asm/frame.h> |
1da177e4 LT |
25 | |
26 | ENTRY(swsusp_arch_suspend) | |
ef0f3ed5 | 27 | FRAME_BEGIN |
0de80bcc | 28 | movq $saved_context, %rax |
65ea5b03 PA |
29 | movq %rsp, pt_regs_sp(%rax) |
30 | movq %rbp, pt_regs_bp(%rax) | |
31 | movq %rsi, pt_regs_si(%rax) | |
32 | movq %rdi, pt_regs_di(%rax) | |
33 | movq %rbx, pt_regs_bx(%rax) | |
34 | movq %rcx, pt_regs_cx(%rax) | |
35 | movq %rdx, pt_regs_dx(%rax) | |
0de80bcc RW |
36 | movq %r8, pt_regs_r8(%rax) |
37 | movq %r9, pt_regs_r9(%rax) | |
38 | movq %r10, pt_regs_r10(%rax) | |
39 | movq %r11, pt_regs_r11(%rax) | |
40 | movq %r12, pt_regs_r12(%rax) | |
41 | movq %r13, pt_regs_r13(%rax) | |
42 | movq %r14, pt_regs_r14(%rax) | |
43 | movq %r15, pt_regs_r15(%rax) | |
44 | pushfq | |
65ea5b03 | 45 | popq pt_regs_flags(%rax) |
1da177e4 | 46 | |
c30bb68c RW |
47 | /* save cr3 */ |
48 | movq %cr3, %rax | |
49 | movq %rax, restore_cr3(%rip) | |
d158cbdf | 50 | |
1da177e4 | 51 | call swsusp_save |
ef0f3ed5 | 52 | FRAME_END |
1da177e4 | 53 | ret |
ef0f3ed5 | 54 | ENDPROC(swsusp_arch_suspend) |
1da177e4 | 55 | |
3dd08325 | 56 | ENTRY(restore_image) |
d158cbdf | 57 | /* prepare to jump to the image kernel */ |
65c0554b RW |
58 | movq restore_jump_address(%rip), %r8 |
59 | movq restore_cr3(%rip), %r9 | |
60 | ||
61 | /* prepare to switch to temporary page tables */ | |
62 | movq temp_level4_pgt(%rip), %rax | |
63 | movq mmu_cr4_features(%rip), %rbx | |
d158cbdf RW |
64 | |
65 | /* prepare to copy image data to their original locations */ | |
75534b50 | 66 | movq restore_pblist(%rip), %rdx |
65c0554b RW |
67 | |
68 | /* jump to relocated restore code */ | |
d158cbdf RW |
69 | movq relocated_restore_code(%rip), %rcx |
70 | jmpq *%rcx | |
71 | ||
72 | /* code below has been relocated to a safe page */ | |
73 | ENTRY(core_restore_code) | |
65c0554b RW |
74 | /* switch to temporary page tables */ |
75 | movq $__PAGE_OFFSET, %rcx | |
76 | subq %rcx, %rax | |
77 | movq %rax, %cr3 | |
78 | /* flush TLB */ | |
79 | movq %rbx, %rcx | |
80 | andq $~(X86_CR4_PGE), %rcx | |
81 | movq %rcx, %cr4; # turn off PGE | |
82 | movq %cr3, %rcx; # flush TLB | |
83 | movq %rcx, %cr3; | |
84 | movq %rbx, %cr4; # turn PGE back on | |
ff22e201 | 85 | .Lloop: |
1da177e4 | 86 | testq %rdx, %rdx |
ff22e201 | 87 | jz .Ldone |
1da177e4 LT |
88 | |
89 | /* get addresses from the pbe and copy the page */ | |
90 | movq pbe_address(%rdx), %rsi | |
91 | movq pbe_orig_address(%rdx), %rdi | |
d158cbdf | 92 | movq $(PAGE_SIZE >> 3), %rcx |
1da177e4 LT |
93 | rep |
94 | movsq | |
95 | ||
96 | /* progress to the next pbe */ | |
97 | movq pbe_next(%rdx), %rdx | |
ff22e201 | 98 | jmp .Lloop |
65c0554b | 99 | |
ff22e201 | 100 | .Ldone: |
d158cbdf | 101 | /* jump to the restore_registers address from the image header */ |
65c0554b | 102 | jmpq *%r8 |
d158cbdf | 103 | |
65c0554b RW |
104 | /* code below belongs to the image kernel */ |
105 | .align PAGE_SIZE | |
d158cbdf | 106 | ENTRY(restore_registers) |
ef0f3ed5 | 107 | FRAME_BEGIN |
3dd08325 | 108 | /* go back to the original page tables */ |
65c0554b | 109 | movq %r9, %cr3 |
1ab60e0f | 110 | |
1da177e4 LT |
111 | /* Flush TLB, including "global" things (vmalloc) */ |
112 | movq mmu_cr4_features(%rip), %rax | |
113 | movq %rax, %rdx | |
bbb1e57a | 114 | andq $~(X86_CR4_PGE), %rdx |
1da177e4 LT |
115 | movq %rdx, %cr4; # turn off PGE |
116 | movq %cr3, %rcx; # flush TLB | |
117 | movq %rcx, %cr3 | |
118 | movq %rax, %cr4; # turn PGE back on | |
119 | ||
0de80bcc RW |
120 | /* We don't restore %rax, it must be 0 anyway */ |
121 | movq $saved_context, %rax | |
65ea5b03 PA |
122 | movq pt_regs_sp(%rax), %rsp |
123 | movq pt_regs_bp(%rax), %rbp | |
124 | movq pt_regs_si(%rax), %rsi | |
125 | movq pt_regs_di(%rax), %rdi | |
126 | movq pt_regs_bx(%rax), %rbx | |
127 | movq pt_regs_cx(%rax), %rcx | |
128 | movq pt_regs_dx(%rax), %rdx | |
0de80bcc RW |
129 | movq pt_regs_r8(%rax), %r8 |
130 | movq pt_regs_r9(%rax), %r9 | |
131 | movq pt_regs_r10(%rax), %r10 | |
132 | movq pt_regs_r11(%rax), %r11 | |
133 | movq pt_regs_r12(%rax), %r12 | |
134 | movq pt_regs_r13(%rax), %r13 | |
135 | movq pt_regs_r14(%rax), %r14 | |
136 | movq pt_regs_r15(%rax), %r15 | |
65ea5b03 | 137 | pushq pt_regs_flags(%rax) |
0de80bcc | 138 | popfq |
1da177e4 | 139 | |
cc456c4e KRW |
140 | /* Saved in save_processor_state. */ |
141 | lgdt saved_context_gdt_desc(%rax) | |
142 | ||
1da177e4 LT |
143 | xorq %rax, %rax |
144 | ||
d158cbdf RW |
145 | /* tell the hibernation core that we've just restored the memory */ |
146 | movq %rax, in_suspend(%rip) | |
147 | ||
ef0f3ed5 | 148 | FRAME_END |
1da177e4 | 149 | ret |
ef0f3ed5 | 150 | ENDPROC(restore_registers) |