x86: Save registers in saved_context during suspend and hibernation

During hibernation and suspend on x86_64 save CPU registers in the saved_context
structure rather than in a handful of separate variables.

Signed-off-by: Rafael J. Wysocki <rjw@sisk.pl>
Signed-off-by: Ingo Molnar <mingo@elte.hu>
Signed-off-by: Thomas Gleixner <tglx@linutronix.de>

authored by Rafael J. Wysocki and committed by Thomas Gleixner 0de80bcc ef685298

+125 -105
+53 -48
arch/x86/kernel/acpi/wakeup_64.S
··· 4 4 #include <asm/pgtable.h> 5 5 #include <asm/page.h> 6 6 #include <asm/msr.h> 7 + #include <asm/asm-offsets.h> 7 8 8 9 # Copyright 2003 Pavel Machek <pavel@suse.cz>, distribute under GPLv2 9 10 # ··· 343 342 xorl %eax, %eax 344 343 call save_processor_state 345 344 346 - movq %rsp, saved_context_esp(%rip) 347 - movq %rax, saved_context_eax(%rip) 348 - movq %rbx, saved_context_ebx(%rip) 349 - movq %rcx, saved_context_ecx(%rip) 350 - movq %rdx, saved_context_edx(%rip) 351 - movq %rbp, saved_context_ebp(%rip) 352 - movq %rsi, saved_context_esi(%rip) 353 - movq %rdi, saved_context_edi(%rip) 354 - movq %r8, saved_context_r08(%rip) 355 - movq %r9, saved_context_r09(%rip) 356 - movq %r10, saved_context_r10(%rip) 357 - movq %r11, saved_context_r11(%rip) 358 - movq %r12, saved_context_r12(%rip) 359 - movq %r13, saved_context_r13(%rip) 360 - movq %r14, saved_context_r14(%rip) 361 - movq %r15, saved_context_r15(%rip) 362 - pushfq ; popq saved_context_eflags(%rip) 345 + movq $saved_context, %rax 346 + movq %rsp, pt_regs_rsp(%rax) 347 + movq %rbp, pt_regs_rbp(%rax) 348 + movq %rsi, pt_regs_rsi(%rax) 349 + movq %rdi, pt_regs_rdi(%rax) 350 + movq %rbx, pt_regs_rbx(%rax) 351 + movq %rcx, pt_regs_rcx(%rax) 352 + movq %rdx, pt_regs_rdx(%rax) 353 + movq %r8, pt_regs_r8(%rax) 354 + movq %r9, pt_regs_r9(%rax) 355 + movq %r10, pt_regs_r10(%rax) 356 + movq %r11, pt_regs_r11(%rax) 357 + movq %r12, pt_regs_r12(%rax) 358 + movq %r13, pt_regs_r13(%rax) 359 + movq %r14, pt_regs_r14(%rax) 360 + movq %r15, pt_regs_r15(%rax) 361 + pushfq 362 + popq pt_regs_eflags(%rax) 363 363 364 364 movq $.L97, saved_rip(%rip) 365 365 366 - movq %rsp,saved_rsp 367 - movq %rbp,saved_rbp 368 - movq %rbx,saved_rbx 369 - movq %rdi,saved_rdi 370 - movq %rsi,saved_rsi 366 + movq %rsp, saved_rsp 367 + movq %rbp, saved_rbp 368 + movq %rbx, saved_rbx 369 + movq %rdi, saved_rdi 370 + movq %rsi, saved_rsi 371 371 372 372 addq $8, %rsp 373 373 movl $3, %edi ··· 379 377 .L99: 380 378 .align 4 381 379 movl $24, %eax 382 - movw %ax, %ds 383 - movq saved_context+58(%rip), %rax 384 - movq %rax, %cr4 385 - movq saved_context+50(%rip), %rax 386 - movq %rax, %cr3 387 - movq saved_context+42(%rip), %rax 388 - movq %rax, %cr2 389 - movq saved_context+34(%rip), %rax 390 - movq %rax, %cr0 391 - pushq saved_context_eflags(%rip) ; popfq 392 - movq saved_context_esp(%rip), %rsp 393 - movq saved_context_ebp(%rip), %rbp 394 - movq saved_context_eax(%rip), %rax 395 - movq saved_context_ebx(%rip), %rbx 396 - movq saved_context_ecx(%rip), %rcx 397 - movq saved_context_edx(%rip), %rdx 398 - movq saved_context_esi(%rip), %rsi 399 - movq saved_context_edi(%rip), %rdi 400 - movq saved_context_r08(%rip), %r8 401 - movq saved_context_r09(%rip), %r9 402 - movq saved_context_r10(%rip), %r10 403 - movq saved_context_r11(%rip), %r11 404 - movq saved_context_r12(%rip), %r12 405 - movq saved_context_r13(%rip), %r13 406 - movq saved_context_r14(%rip), %r14 407 - movq saved_context_r15(%rip), %r15 380 + movw %ax, %ds 381 + 382 + /* We don't restore %rax, it must be 0 anyway */ 383 + movq $saved_context, %rax 384 + movq saved_context_cr4(%rax), %rbx 385 + movq %rbx, %cr4 386 + movq saved_context_cr3(%rax), %rbx 387 + movq %rbx, %cr3 388 + movq saved_context_cr2(%rax), %rbx 389 + movq %rbx, %cr2 390 + movq saved_context_cr0(%rax), %rbx 391 + movq %rbx, %cr0 392 + pushq pt_regs_eflags(%rax) 393 + popfq 394 + movq pt_regs_rsp(%rax), %rsp 395 + movq pt_regs_rbp(%rax), %rbp 396 + movq pt_regs_rsi(%rax), %rsi 397 + movq pt_regs_rdi(%rax), %rdi 398 + movq pt_regs_rbx(%rax), %rbx 399 + movq pt_regs_rcx(%rax), %rcx 400 + movq pt_regs_rdx(%rax), %rdx 401 + movq pt_regs_r8(%rax), %r8 402 + movq pt_regs_r9(%rax), %r9 403 + movq pt_regs_r10(%rax), %r10 404 + movq pt_regs_r11(%rax), %r11 405 + movq pt_regs_r12(%rax), %r12 406 + movq pt_regs_r13(%rax), %r13 407 + movq pt_regs_r14(%rax), %r14 408 + movq pt_regs_r15(%rax), %r15 408 409 409 410 xorl %eax, %eax 410 411 addq $8, %rsp
+28
arch/x86/kernel/asm-offsets_64.c
··· 76 76 DEFINE(pbe_orig_address, offsetof(struct pbe, orig_address)); 77 77 DEFINE(pbe_next, offsetof(struct pbe, next)); 78 78 BLANK(); 79 + #define ENTRY(entry) DEFINE(pt_regs_ ## entry, offsetof(struct pt_regs, entry)) 80 + ENTRY(rbx); 81 + ENTRY(rbx); 82 + ENTRY(rcx); 83 + ENTRY(rdx); 84 + ENTRY(rsp); 85 + ENTRY(rbp); 86 + ENTRY(rsi); 87 + ENTRY(rdi); 88 + ENTRY(r8); 89 + ENTRY(r9); 90 + ENTRY(r10); 91 + ENTRY(r11); 92 + ENTRY(r12); 93 + ENTRY(r13); 94 + ENTRY(r14); 95 + ENTRY(r15); 96 + ENTRY(eflags); 97 + BLANK(); 98 + #undef ENTRY 99 + #define ENTRY(entry) DEFINE(saved_context_ ## entry, offsetof(struct saved_context, entry)) 100 + ENTRY(cr0); 101 + ENTRY(cr2); 102 + ENTRY(cr3); 103 + ENTRY(cr4); 104 + ENTRY(cr8); 105 + BLANK(); 106 + #undef ENTRY 79 107 DEFINE(TSS_ist, offsetof(struct tss_struct, ist)); 80 108 BLANK(); 81 109 DEFINE(crypto_tfm_ctx_offset, offsetof(struct crypto_tfm, __crt_ctx));
-6
arch/x86/kernel/suspend_64.c
··· 19 19 20 20 struct saved_context saved_context; 21 21 22 - unsigned long saved_context_eax, saved_context_ebx, saved_context_ecx, saved_context_edx; 23 - unsigned long saved_context_esp, saved_context_ebp, saved_context_esi, saved_context_edi; 24 - unsigned long saved_context_r08, saved_context_r09, saved_context_r10, saved_context_r11; 25 - unsigned long saved_context_r12, saved_context_r13, saved_context_r14, saved_context_r15; 26 - unsigned long saved_context_eflags; 27 - 28 22 void __save_processor_state(struct saved_context *ctxt) 29 23 { 30 24 kernel_fpu_begin();
+37 -35
arch/x86/kernel/suspend_asm_64.S
··· 17 17 #include <asm/asm-offsets.h> 18 18 19 19 ENTRY(swsusp_arch_suspend) 20 - 21 - movq %rsp, saved_context_esp(%rip) 22 - movq %rax, saved_context_eax(%rip) 23 - movq %rbx, saved_context_ebx(%rip) 24 - movq %rcx, saved_context_ecx(%rip) 25 - movq %rdx, saved_context_edx(%rip) 26 - movq %rbp, saved_context_ebp(%rip) 27 - movq %rsi, saved_context_esi(%rip) 28 - movq %rdi, saved_context_edi(%rip) 29 - movq %r8, saved_context_r08(%rip) 30 - movq %r9, saved_context_r09(%rip) 31 - movq %r10, saved_context_r10(%rip) 32 - movq %r11, saved_context_r11(%rip) 33 - movq %r12, saved_context_r12(%rip) 34 - movq %r13, saved_context_r13(%rip) 35 - movq %r14, saved_context_r14(%rip) 36 - movq %r15, saved_context_r15(%rip) 37 - pushfq ; popq saved_context_eflags(%rip) 20 + movq $saved_context, %rax 21 + movq %rsp, pt_regs_rsp(%rax) 22 + movq %rbp, pt_regs_rbp(%rax) 23 + movq %rsi, pt_regs_rsi(%rax) 24 + movq %rdi, pt_regs_rdi(%rax) 25 + movq %rbx, pt_regs_rbx(%rax) 26 + movq %rcx, pt_regs_rcx(%rax) 27 + movq %rdx, pt_regs_rdx(%rax) 28 + movq %r8, pt_regs_r8(%rax) 29 + movq %r9, pt_regs_r9(%rax) 30 + movq %r10, pt_regs_r10(%rax) 31 + movq %r11, pt_regs_r11(%rax) 32 + movq %r12, pt_regs_r12(%rax) 33 + movq %r13, pt_regs_r13(%rax) 34 + movq %r14, pt_regs_r14(%rax) 35 + movq %r15, pt_regs_r15(%rax) 36 + pushfq 37 + popq pt_regs_eflags(%rax) 38 38 39 39 /* save the address of restore_registers */ 40 40 movq $restore_registers, %rax ··· 113 113 movq %rcx, %cr3 114 114 movq %rax, %cr4; # turn PGE back on 115 115 116 - movq saved_context_esp(%rip), %rsp 117 - movq saved_context_ebp(%rip), %rbp 118 - /* restore GPRs (we don't restore %rax, it must be 0 anyway) */ 119 - movq saved_context_ebx(%rip), %rbx 120 - movq saved_context_ecx(%rip), %rcx 121 - movq saved_context_edx(%rip), %rdx 122 - movq saved_context_esi(%rip), %rsi 123 - movq saved_context_edi(%rip), %rdi 124 - movq saved_context_r08(%rip), %r8 125 - movq saved_context_r09(%rip), %r9 126 - movq saved_context_r10(%rip), %r10 127 - movq saved_context_r11(%rip), %r11 128 - movq saved_context_r12(%rip), %r12 129 - movq saved_context_r13(%rip), %r13 130 - movq saved_context_r14(%rip), %r14 131 - movq saved_context_r15(%rip), %r15 132 - pushq saved_context_eflags(%rip) ; popfq 116 + /* We don't restore %rax, it must be 0 anyway */ 117 + movq $saved_context, %rax 118 + movq pt_regs_rsp(%rax), %rsp 119 + movq pt_regs_rbp(%rax), %rbp 120 + movq pt_regs_rsi(%rax), %rsi 121 + movq pt_regs_rdi(%rax), %rdi 122 + movq pt_regs_rbx(%rax), %rbx 123 + movq pt_regs_rcx(%rax), %rcx 124 + movq pt_regs_rdx(%rax), %rdx 125 + movq pt_regs_r8(%rax), %r8 126 + movq pt_regs_r9(%rax), %r9 127 + movq pt_regs_r10(%rax), %r10 128 + movq pt_regs_r11(%rax), %r11 129 + movq pt_regs_r12(%rax), %r12 130 + movq pt_regs_r13(%rax), %r13 131 + movq pt_regs_r14(%rax), %r14 132 + movq pt_regs_r15(%rax), %r15 133 + pushq pt_regs_eflags(%rax) 134 + popfq 133 135 134 136 xorq %rax, %rax 135 137
+7 -16
include/asm-x86/suspend_64.h
··· 3 3 * Based on code 4 4 * Copyright 2001 Patrick Mochel <mochel@osdl.org> 5 5 */ 6 + #ifndef __ASM_X86_64_SUSPEND_H 7 + #define __ASM_X86_64_SUSPEND_H 8 + 6 9 #include <asm/desc.h> 7 10 #include <asm/i387.h> 8 11 ··· 15 12 return 0; 16 13 } 17 14 18 - /* Image of the saved processor state. If you touch this, fix acpi_wakeup.S. */ 15 + /* Image of the saved processor state. If you touch this, fix acpi/wakeup.S. */ 19 16 struct saved_context { 17 + struct pt_regs regs; 20 18 u16 ds, es, fs, gs, ss; 21 19 unsigned long gs_base, gs_kernel_base, fs_base; 22 20 unsigned long cr0, cr2, cr3, cr4, cr8; ··· 33 29 unsigned long tr; 34 30 unsigned long safety; 35 31 unsigned long return_address; 36 - unsigned long eflags; 37 32 } __attribute__((packed)); 38 - 39 - /* We'll access these from assembly, so we'd better have them outside struct */ 40 - extern unsigned long saved_context_eax, saved_context_ebx, saved_context_ecx, saved_context_edx; 41 - extern unsigned long saved_context_esp, saved_context_ebp, saved_context_esi, saved_context_edi; 42 - extern unsigned long saved_context_r08, saved_context_r09, saved_context_r10, saved_context_r11; 43 - extern unsigned long saved_context_r12, saved_context_r13, saved_context_r14, saved_context_r15; 44 - extern unsigned long saved_context_eflags; 45 33 46 34 #define loaddebug(thread,register) \ 47 35 set_debugreg((thread)->debugreg##register, register) 48 36 49 37 extern void fix_processor_context(void); 50 38 51 - extern unsigned long saved_rip; 52 - extern unsigned long saved_rsp; 53 - extern unsigned long saved_rbp; 54 - extern unsigned long saved_rbx; 55 - extern unsigned long saved_rsi; 56 - extern unsigned long saved_rdi; 57 - 58 39 /* routines for saving/restoring kernel state */ 59 40 extern int acpi_save_state_mem(void); 60 41 extern char core_restore_code; 61 42 extern char restore_registers; 43 + 44 + #endif /* __ASM_X86_64_SUSPEND_H */