Commit ab0fedcc authored by Peter Zijlstra's avatar Peter Zijlstra

x86/copy_mc_64: Remove .fixup usage

Place the anonymous .fixup code at the tail of the regular functions.
Signed-off-by: default avatarPeter Zijlstra (Intel) <peterz@infradead.org>
Reviewed-by: default avatarJosh Poimboeuf <jpoimboe@redhat.com>
Reviewed-by: default avatarBorislav Petkov <bp@suse.de>
Link: https://lore.kernel.org/r/20211110101325.127055887@infradead.org
parent acba44d2
...@@ -78,9 +78,7 @@ SYM_FUNC_START(copy_mc_fragile) ...@@ -78,9 +78,7 @@ SYM_FUNC_START(copy_mc_fragile)
xorl %eax, %eax xorl %eax, %eax
.L_done: .L_done:
RET RET
SYM_FUNC_END(copy_mc_fragile)
.section .fixup, "ax"
/* /*
* Return number of bytes not copied for any failure. Note that * Return number of bytes not copied for any failure. Note that
* there is no "tail" handling since the source buffer is 8-byte * there is no "tail" handling since the source buffer is 8-byte
...@@ -105,14 +103,14 @@ SYM_FUNC_END(copy_mc_fragile) ...@@ -105,14 +103,14 @@ SYM_FUNC_END(copy_mc_fragile)
movl %ecx, %edx movl %ecx, %edx
jmp copy_mc_fragile_handle_tail jmp copy_mc_fragile_handle_tail
.previous
_ASM_EXTABLE_TYPE(.L_read_leading_bytes, .E_leading_bytes, EX_TYPE_DEFAULT_MCE_SAFE) _ASM_EXTABLE_TYPE(.L_read_leading_bytes, .E_leading_bytes, EX_TYPE_DEFAULT_MCE_SAFE)
_ASM_EXTABLE_TYPE(.L_read_words, .E_read_words, EX_TYPE_DEFAULT_MCE_SAFE) _ASM_EXTABLE_TYPE(.L_read_words, .E_read_words, EX_TYPE_DEFAULT_MCE_SAFE)
_ASM_EXTABLE_TYPE(.L_read_trailing_bytes, .E_trailing_bytes, EX_TYPE_DEFAULT_MCE_SAFE) _ASM_EXTABLE_TYPE(.L_read_trailing_bytes, .E_trailing_bytes, EX_TYPE_DEFAULT_MCE_SAFE)
_ASM_EXTABLE(.L_write_leading_bytes, .E_leading_bytes) _ASM_EXTABLE(.L_write_leading_bytes, .E_leading_bytes)
_ASM_EXTABLE(.L_write_words, .E_write_words) _ASM_EXTABLE(.L_write_words, .E_write_words)
_ASM_EXTABLE(.L_write_trailing_bytes, .E_trailing_bytes) _ASM_EXTABLE(.L_write_trailing_bytes, .E_trailing_bytes)
SYM_FUNC_END(copy_mc_fragile)
#endif /* CONFIG_X86_MCE */ #endif /* CONFIG_X86_MCE */
/* /*
...@@ -133,9 +131,7 @@ SYM_FUNC_START(copy_mc_enhanced_fast_string) ...@@ -133,9 +131,7 @@ SYM_FUNC_START(copy_mc_enhanced_fast_string)
/* Copy successful. Return zero */ /* Copy successful. Return zero */
xorl %eax, %eax xorl %eax, %eax
RET RET
SYM_FUNC_END(copy_mc_enhanced_fast_string)
.section .fixup, "ax"
.E_copy: .E_copy:
/* /*
* On fault %rcx is updated such that the copy instruction could * On fault %rcx is updated such that the copy instruction could
...@@ -147,7 +143,7 @@ SYM_FUNC_END(copy_mc_enhanced_fast_string) ...@@ -147,7 +143,7 @@ SYM_FUNC_END(copy_mc_enhanced_fast_string)
movq %rcx, %rax movq %rcx, %rax
RET RET
.previous
_ASM_EXTABLE_TYPE(.L_copy, .E_copy, EX_TYPE_DEFAULT_MCE_SAFE) _ASM_EXTABLE_TYPE(.L_copy, .E_copy, EX_TYPE_DEFAULT_MCE_SAFE)
SYM_FUNC_END(copy_mc_enhanced_fast_string)
#endif /* !CONFIG_UML */ #endif /* !CONFIG_UML */
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment