Commit ea49cdb2 authored by Kai Huang's avatar Kai Huang Committed by Thomas Gleixner

x86/kexec: Add comments around swap_pages() assembly to improve readability

The current assembly around swap_pages() in the relocate_kernel() takes
some time to follow because the use of registers can be easily lost when
the line of assembly goes long.  Add a couple of comments to clarify the
code around swap_pages() to improve readability.
Signed-off-by: default avatarKai Huang <kai.huang@intel.com>
Signed-off-by: default avatarThomas Gleixner <tglx@linutronix.de>
Acked-by: default avatarKirill A. Shutemov <kirill.shutemov@linux.intel.com>
Link: https://lore.kernel.org/all/8b52b0b8513a34b2a02fb4abb05c6700c2821475.1724573384.git.kai.huang@intel.com
parent 3c41ad39
......@@ -170,6 +170,7 @@ SYM_CODE_START_LOCAL_NOALIGN(identity_mapped)
wbinvd
.Lsme_off:
/* Save the preserve_context to %r11 as swap_pages clobbers %rcx. */
movq %rcx, %r11
call swap_pages
......@@ -289,18 +290,21 @@ SYM_CODE_START_LOCAL_NOALIGN(swap_pages)
movq %rcx, %rsi /* For ever source page do a copy */
andq $0xfffffffffffff000, %rsi
movq %rdi, %rdx
movq %rsi, %rax
movq %rdi, %rdx /* Save destination page to %rdx */
movq %rsi, %rax /* Save source page to %rax */
/* copy source page to swap page */
movq %r10, %rdi
movl $512, %ecx
rep ; movsq
/* copy destination page to source page */
movq %rax, %rdi
movq %rdx, %rsi
movl $512, %ecx
rep ; movsq
/* copy swap page to destination page */
movq %rdx, %rdi
movq %r10, %rsi
movl $512, %ecx
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment