From 16e617d05ef0c521d000c989796412ce713f28c9 Mon Sep 17 00:00:00 2001 From: Peter Zijlstra Date: Wed, 10 Nov 2021 11:01:07 +0100 Subject: x86/entry_64: Remove .fixup usage Place the anonymous .fixup code at the tail of the regular functions. Signed-off-by: Peter Zijlstra (Intel) Reviewed-by: Josh Poimboeuf Reviewed-by: Borislav Petkov Reviewed-by: Lai Jiangshan Link: https://lore.kernel.org/r/20211110101325.186049322@infradead.org --- arch/x86/entry/entry_64.S | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) (limited to 'arch/x86') diff --git a/arch/x86/entry/entry_64.S b/arch/x86/entry/entry_64.S index e23319ad3f42..1ffdbfaad2e2 100644 --- a/arch/x86/entry/entry_64.S +++ b/arch/x86/entry/entry_64.S @@ -739,13 +739,9 @@ SYM_FUNC_START(asm_load_gs_index) swapgs FRAME_END RET -SYM_FUNC_END(asm_load_gs_index) -EXPORT_SYMBOL(asm_load_gs_index) - _ASM_EXTABLE(.Lgs_change, .Lbad_gs) - .section .fixup, "ax" /* running with kernelgs */ -SYM_CODE_START_LOCAL_NOALIGN(.Lbad_gs) +.Lbad_gs: swapgs /* switch back to user gs */ .macro ZAP_GS /* This can't be a string because the preprocessor needs to see it. */ @@ -756,8 +752,11 @@ SYM_CODE_START_LOCAL_NOALIGN(.Lbad_gs) xorl %eax, %eax movl %eax, %gs jmp 2b -SYM_CODE_END(.Lbad_gs) - .previous + + _ASM_EXTABLE(.Lgs_change, .Lbad_gs) + +SYM_FUNC_END(asm_load_gs_index) +EXPORT_SYMBOL(asm_load_gs_index) #ifdef CONFIG_XEN_PV /* -- cgit v1.2.3