x86/kexec: Clean up register usage in relocate_kernel()

The memory encryption flag is passed in %r8 because that's where the
calling convention puts it. Instead of moving it to %r12 and then using
%r8 for other things, just leave it in %r8 and use other registers
instead.

Signed-off-by: David Woodhouse <dwmw@amazon.co.uk>
Signed-off-by: Ingo Molnar <mingo@kernel.org>
Cc: Baoquan He <bhe@redhat.com>
Cc: Vivek Goyal <vgoyal@redhat.com>
Cc: Dave Young <dyoung@redhat.com>
Cc: Eric Biederman <ebiederm@xmission.com>
Cc: Ard Biesheuvel <ardb@kernel.org>
Cc: "H. Peter Anvin" <hpa@zytor.com>
Link: https://lore.kernel.org/r/20241205153343.3275139-13-dwmw2@infradead.org
This commit is contained in:
David Woodhouse 2024-12-05 15:05:18 +00:00 committed by Ingo Molnar
parent b7155dfd49
commit 93e489ad7a

View File

@ -79,24 +79,18 @@ SYM_CODE_START_NOALIGN(relocate_kernel)
movq %cr4, %r13 movq %cr4, %r13
movq %r13, saved_cr4(%rip) movq %r13, saved_cr4(%rip)
/* Save SME active flag */
movq %r8, %r12
/* save indirection list for jumping back */ /* save indirection list for jumping back */
movq %rdi, pa_backup_pages_map(%rip) movq %rdi, pa_backup_pages_map(%rip)
/* Save the preserve_context to %r11 as swap_pages clobbers %rcx. */ /* Save the preserve_context to %r11 as swap_pages clobbers %rcx. */
movq %rcx, %r11 movq %rcx, %r11
/* Physical address of control page */
movq %rsi, %r8
/* setup a new stack at the end of the physical control page */ /* setup a new stack at the end of the physical control page */
lea PAGE_SIZE(%r8), %rsp lea PAGE_SIZE(%rsi), %rsp
/* jump to identity mapped page */ /* jump to identity mapped page */
addq $(identity_mapped - relocate_kernel), %r8 addq $(identity_mapped - relocate_kernel), %rsi
pushq %r8 pushq %rsi
ANNOTATE_UNRET_SAFE ANNOTATE_UNRET_SAFE
ret ret
int3 int3
@ -107,8 +101,9 @@ SYM_CODE_START_LOCAL_NOALIGN(identity_mapped)
/* /*
* %rdi indirection page * %rdi indirection page
* %rdx start address * %rdx start address
* %r8 host_mem_enc_active
* %r9 page table page
* %r11 preserve_context * %r11 preserve_context
* %r12 host_mem_enc_active
* %r13 original CR4 when relocate_kernel() was invoked * %r13 original CR4 when relocate_kernel() was invoked
*/ */
@ -161,7 +156,7 @@ SYM_CODE_START_LOCAL_NOALIGN(identity_mapped)
* entries that will conflict with the now unencrypted memory * entries that will conflict with the now unencrypted memory
* used by kexec. Flush the caches before copying the kernel. * used by kexec. Flush the caches before copying the kernel.
*/ */
testq %r12, %r12 testq %r8, %r8
jz .Lsme_off jz .Lsme_off
wbinvd wbinvd
.Lsme_off: .Lsme_off: