All of lore.kernel.org
 help / color / mirror / Atom feed
* [PATCH 0/2] x86: adjustments to .fixup section handling
@ 2023-01-05 11:10 Jan Beulich
  2023-01-05 11:11 ` [PATCH 1/2] x86: macroize switches to/from .fixup section Jan Beulich
                   ` (2 more replies)
  0 siblings, 3 replies; 4+ messages in thread
From: Jan Beulich @ 2023-01-05 11:10 UTC (permalink / raw)
  To: xen-devel; +Cc: Andrew Cooper, Wei Liu, Roger Pau Monné

1: macroize switches to/from .fixup section
2: split .fixup section with new enough gas

Jan


^ permalink raw reply	[flat|nested] 4+ messages in thread

* [PATCH 1/2] x86: macroize switches to/from .fixup section
  2023-01-05 11:10 [PATCH 0/2] x86: adjustments to .fixup section handling Jan Beulich
@ 2023-01-05 11:11 ` Jan Beulich
  2023-01-05 11:12 ` [PATCH 2/2] x86: split .fixup section with new enough gas Jan Beulich
  2023-01-05 14:19 ` [PATCH 0/2] x86: adjustments to .fixup section handling Andrew Cooper
  2 siblings, 0 replies; 4+ messages in thread
From: Jan Beulich @ 2023-01-05 11:11 UTC (permalink / raw)
  To: xen-devel; +Cc: Andrew Cooper, Wei Liu, Roger Pau Monné

This centralizes section name and attribute setting, thus simplifying
future changes to either of these.

Signed-off-by: Jan Beulich <jbeulich@suse.com>

--- a/xen/arch/x86/cpu/amd.c
+++ b/xen/arch/x86/cpu/amd.c
@@ -57,10 +57,10 @@ static inline int rdmsr_amd_safe(unsigne
 	int err;
 
 	asm volatile("1: rdmsr\n2:\n"
-		     ".section .fixup,\"ax\"\n"
+		     _ASM_FIXUP "\n"
 		     "3: movl %6,%2\n"
 		     "   jmp 2b\n"
-		     ".previous\n"
+		     _ASM_FIXUP_END "\n"
 		     _ASM_EXTABLE(1b, 3b)
 		     : "=a" (*lo), "=d" (*hi), "=r" (err)
 		     : "c" (msr), "D" (0x9c5a203a), "2" (0), "i" (-EFAULT));
@@ -74,10 +74,10 @@ static inline int wrmsr_amd_safe(unsigne
 	int err;
 
 	asm volatile("1: wrmsr\n2:\n"
-		     ".section .fixup,\"ax\"\n"
+		     _ASM_FIXUP "\n"
 		     "3: movl %6,%0\n"
 		     "   jmp 2b\n"
-		     ".previous\n"
+		     _ASM_FIXUP_END "\n"
 		     _ASM_EXTABLE(1b, 3b)
 		     : "=r" (err)
 		     : "c" (msr), "a" (lo), "d" (hi), "D" (0x9c5a203a),
--- a/xen/arch/x86/domain.c
+++ b/xen/arch/x86/domain.c
@@ -1607,11 +1607,11 @@ static void load_segments(struct vcpu *n
 #define TRY_LOAD_SEG(seg, val)                          \
     asm volatile ( "1: mov %k[_val], %%" #seg "\n\t"    \
                    "2:\n\t"                             \
-                   ".section .fixup, \"ax\"\n\t"        \
+                   _ASM_FIXUP "\n\t"                    \
                    "3: xor %k[ok], %k[ok]\n\t"          \
                    "   mov %k[ok], %%" #seg "\n\t"      \
                    "   jmp 2b\n\t"                      \
-                   ".previous\n\t"                      \
+                   _ASM_FIXUP_END "\n\t"                \
                    _ASM_EXTABLE(1b, 3b)                 \
                    : [ok] "+r" (all_segs_okay)          \
                    : [_val] "rm" (val) )
--- a/xen/arch/x86/extable.c
+++ b/xen/arch/x86/extable.c
@@ -164,11 +164,11 @@ static int __init cf_check stub_selftest
 
         asm volatile ( "INDIRECT_CALL %[stb]\n"
                        ".Lret%=:\n\t"
-                       ".pushsection .fixup,\"ax\"\n"
+                       _ASM_FIXUP "\n"
                        ".Lfix%=:\n\t"
                        "pop %[exn]\n\t"
                        "jmp .Lret%=\n\t"
-                       ".popsection\n\t"
+                       _ASM_FIXUP_END "\n\t"
                        _ASM_EXTABLE(.Lret%=, .Lfix%=)
                        : [exn] "+m" (res) ASM_CALL_CONSTRAINT
                        : [stb] "r" (addr), "a" (tests[i].rax));
--- a/xen/arch/x86/i387.c
+++ b/xen/arch/x86/i387.c
@@ -67,7 +67,7 @@ static inline void fpu_fxrstor(struct vc
         asm volatile (
             /* See below for why the operands/constraints are this way. */
             "1: " REX64_PREFIX "fxrstor (%2)\n"
-            ".section .fixup,\"ax\"   \n"
+            _ASM_FIXUP               "\n"
             "2: push %%"__OP"ax       \n"
             "   push %%"__OP"cx       \n"
             "   push %%"__OP"di       \n"
@@ -79,7 +79,7 @@ static inline void fpu_fxrstor(struct vc
             "   pop  %%"__OP"cx       \n"
             "   pop  %%"__OP"ax       \n"
             "   jmp  1b               \n"
-            ".previous                \n"
+            _ASM_FIXUP_END           "\n"
             _ASM_EXTABLE(1b, 2b)
             :
             : "m" (*fpu_ctxt), "i" (sizeof(*fpu_ctxt) / 4), "R" (fpu_ctxt) );
@@ -87,7 +87,7 @@ static inline void fpu_fxrstor(struct vc
     case 4: case 2:
         asm volatile (
             "1: fxrstor %0         \n"
-            ".section .fixup,\"ax\"\n"
+            _ASM_FIXUP            "\n"
             "2: push %%"__OP"ax    \n"
             "   push %%"__OP"cx    \n"
             "   push %%"__OP"di    \n"
@@ -99,7 +99,7 @@ static inline void fpu_fxrstor(struct vc
             "   pop  %%"__OP"cx    \n"
             "   pop  %%"__OP"ax    \n"
             "   jmp  1b            \n"
-            ".previous             \n"
+            _ASM_FIXUP_END        "\n"
             _ASM_EXTABLE(1b, 2b)
             :
             : "m" (*fpu_ctxt), "i" (sizeof(*fpu_ctxt) / 4) );
--- a/xen/arch/x86/include/asm/asm_defns.h
+++ b/xen/arch/x86/include/asm/asm_defns.h
@@ -79,6 +79,15 @@ register unsigned long current_stack_poi
 #define _ASM_EXTABLE(from, to)     _ASM__EXTABLE(, from, to)
 #define _ASM_PRE_EXTABLE(from, to) _ASM__EXTABLE(.pre, from, to)
 
+/* Exception recovery code section */
+#ifdef __ASSEMBLY__
+# define _ASM_FIXUP     .pushsection .fixup, "ax", @progbits
+# define _ASM_FIXUP_END .popsection
+#else
+# define _ASM_FIXUP     " .pushsection .fixup, \"ax\", @progbits"
+# define _ASM_FIXUP_END " .popsection"
+#endif
+
 #ifdef __ASSEMBLY__
 
 #ifdef HAVE_AS_QUOTED_SYM
--- a/xen/arch/x86/include/asm/hvm/vmx/vmx.h
+++ b/xen/arch/x86/include/asm/hvm/vmx/vmx.h
@@ -579,9 +579,9 @@ static inline int __vmxon(u64 addr)
         "1: " VMXON_OPCODE MODRM_EAX_06 "\n"
         "   setna %b0 ; neg %0\n" /* CF==1 or ZF==1 --> rc = -1 */
         "2:\n"
-        ".section .fixup,\"ax\"\n"
+        _ASM_FIXUP "\n"
         "3: sub $2,%0 ; jmp 2b\n"    /* #UD or #GP --> rc = -2 */
-        ".previous\n"
+        _ASM_FIXUP_END "\n"
         _ASM_EXTABLE(1b, 3b)
         : "=q" (rc)
         : "0" (0), "a" (&addr)
--- a/xen/arch/x86/include/asm/msr.h
+++ b/xen/arch/x86/include/asm/msr.h
@@ -44,10 +44,10 @@ static inline void wrmsrl(unsigned int m
     uint32_t lo_, hi_; \
     __asm__ __volatile__( \
         "1: rdmsr\n2:\n" \
-        ".section .fixup,\"ax\"\n" \
+        _ASM_FIXUP "\n" \
         "3: xorl %0,%0\n; xorl %1,%1\n" \
         "   movl %5,%2\n; jmp 2b\n" \
-        ".previous\n" \
+        _ASM_FIXUP_END "\n" \
         _ASM_EXTABLE(1b, 3b) \
         : "=a" (lo_), "=d" (hi_), "=&r" (rc_) \
         : "c" (msr), "2" (0), "i" (-EFAULT)); \
@@ -64,9 +64,9 @@ static inline int wrmsr_safe(unsigned in
 
     __asm__ __volatile__(
         "1: wrmsr\n2:\n"
-        ".section .fixup,\"ax\"\n"
+        _ASM_FIXUP "\n"
         "3: movl %5,%0\n; jmp 2b\n"
-        ".previous\n"
+        _ASM_FIXUP_END "\n"
         _ASM_EXTABLE(1b, 3b)
         : "=&r" (rc)
         : "c" (msr), "a" (lo), "d" (hi), "0" (0), "i" (-EFAULT));
--- a/xen/arch/x86/include/asm/uaccess.h
+++ b/xen/arch/x86/include/asm/uaccess.h
@@ -160,10 +160,10 @@ struct __large_struct { unsigned long bu
 		)							\
 		"1:	mov"itype" %"rtype"[val], (%[ptr])\n"		\
 		"2:\n"							\
-		".section .fixup,\"ax\"\n"				\
+		"       " _ASM_FIXUP "\n"				\
 		"3:	mov %[errno], %[ret]\n"				\
 		"	jmp 2b\n"					\
-		".previous\n"						\
+		_ASM_FIXUP_END "\n"					\
 		_ASM_EXTABLE(1b, 3b)					\
 		: [ret] "+r" (err), [ptr] "=&r" (dummy_)		\
 		  GUARD(, [scr1] "=&r" (dummy_), [scr2] "=&r" (dummy_))	\
@@ -177,11 +177,11 @@ struct __large_struct { unsigned long bu
 		)							\
 		"1:	mov (%[ptr]), %"rtype"[val]\n"			\
 		"2:\n"							\
-		".section .fixup,\"ax\"\n"				\
+		"       " _ASM_FIXUP "\n"				\
 		"3:	mov %[errno], %[ret]\n"				\
 		"	xor %k[val], %k[val]\n"				\
 		"	jmp 2b\n"					\
-		".previous\n"						\
+		_ASM_FIXUP_END "\n"					\
 		_ASM_EXTABLE(1b, 3b)					\
 		: [ret] "+r" (err), [val] ltype (x),			\
 		  [ptr] "=&r" (dummy_)					\
--- a/xen/arch/x86/pv/misc-hypercalls.c
+++ b/xen/arch/x86/pv/misc-hypercalls.c
@@ -251,11 +251,11 @@ long do_set_segment_base(unsigned int wh
          * re-read %gs and compare against the input.
          */
         asm volatile ( "1: mov %[sel], %%gs\n\t"
-                       ".section .fixup, \"ax\", @progbits\n\t"
+                       _ASM_FIXUP "\n\t"
                        "2: mov %k[flat], %%gs\n\t"
                        "   xor %[sel], %[sel]\n\t"
                        "   jmp 1b\n\t"
-                       ".previous\n\t"
+                       _ASM_FIXUP_END "\n\t"
                        _ASM_EXTABLE(1b, 2b)
                        : [sel] "+r" (sel)
                        : [flat] "r" (FLAT_USER_DS32) );
--- a/xen/arch/x86/traps.c
+++ b/xen/arch/x86/traps.c
@@ -550,9 +550,9 @@ static void show_trace(const struct cpu_
 
     /* Guarded read of the stack top. */
     asm ( "1: mov %[data], %[tos]; 2:\n"
-          ".pushsection .fixup,\"ax\"\n"
+          _ASM_FIXUP "\n"
           "3: movb $1, %[fault]; jmp 2b\n"
-          ".popsection\n"
+          _ASM_FIXUP_END "\n"
           _ASM_EXTABLE(1b, 3b)
           : [tos] "+r" (tos), [fault] "+qm" (fault) : [data] "m" (*sp) );
 
--- a/xen/arch/x86/usercopy.c
+++ b/xen/arch/x86/usercopy.c
@@ -38,12 +38,12 @@ unsigned int copy_to_guest_ll(void __use
         "    mov  %[aux],%[cnt]\n"
         "1:  rep movsb\n" /* ...remainder copied as bytes */
         "2:\n"
-        ".section .fixup,\"ax\"\n"
+        "    " _ASM_FIXUP "\n"
         "5:  add %[aux], %[cnt]\n"
         "    jmp 2b\n"
         "3:  lea (%q[aux], %q[cnt], "STR(BYTES_PER_LONG)"), %[cnt]\n"
         "    jmp 2b\n"
-        ".previous\n"
+        "    " _ASM_FIXUP_END "\n"
         _ASM_EXTABLE(4b, 5b)
         _ASM_EXTABLE(0b, 3b)
         _ASM_EXTABLE(1b, 2b)
@@ -81,7 +81,7 @@ unsigned int copy_from_guest_ll(void *to
         "    mov  %[aux], %[cnt]\n"
         "1:  rep movsb\n" /* ...remainder copied as bytes */
         "2:\n"
-        ".section .fixup,\"ax\"\n"
+        "    " _ASM_FIXUP "\n"
         "5:  add  %[aux], %[cnt]\n"
         "    jmp 6f\n"
         "3:  lea  (%q[aux], %q[cnt], "STR(BYTES_PER_LONG)"), %[cnt]\n"
@@ -92,7 +92,7 @@ unsigned int copy_from_guest_ll(void *to
         "    xchg %[aux], %%eax\n"
         "    mov  %k[from], %[cnt]\n"
         "    jmp 2b\n"
-        ".previous\n"
+        "    " _ASM_FIXUP_END "\n"
         _ASM_EXTABLE(4b, 5b)
         _ASM_EXTABLE(0b, 3b)
         _ASM_EXTABLE(1b, 6b)
@@ -149,10 +149,10 @@ unsigned int clear_guest_pv(void __user
             "    mov  %[bytes], %[cnt]\n"
             "1:  rep stosb\n"
             "2:\n"
-            ".section .fixup,\"ax\"\n"
+            "    " _ASM_FIXUP "\n"
             "3:  lea  (%q[bytes], %q[longs], "STR(BYTES_PER_LONG)"), %[cnt]\n"
             "    jmp  2b\n"
-            ".previous\n"
+            "    " _ASM_FIXUP_END "\n"
             _ASM_EXTABLE(0b,3b)
             _ASM_EXTABLE(1b,2b)
             : [cnt] "=&c" (n), [to] "+D" (to), [scratch1] "=&r" (dummy),
--- a/xen/arch/x86/x86_64/compat/entry.S
+++ b/xen/arch/x86/x86_64/compat/entry.S
@@ -321,11 +321,11 @@ __UNLIKELY_END(compat_bounce_null_select
         mov   %al,  TRAPBOUNCE_flags(%rdx)
         ret
 
-.section .fixup,"ax"
+        _ASM_FIXUP
 .Lfx13:
         xorl  %edi,%edi
         jmp   .Lft13
-.previous
+        _ASM_FIXUP_END
         _ASM_EXTABLE(.Lft1,  dom_crash_sync_extable)
         _ASM_EXTABLE(.Lft2,  compat_crash_page_fault)
         _ASM_EXTABLE(.Lft3,  compat_crash_page_fault_4)
@@ -346,9 +346,9 @@ compat_crash_page_fault:
         movl  %esi,%edi
         call  show_page_walk
         jmp   dom_crash_sync_extable
-.section .fixup,"ax"
+        _ASM_FIXUP
 .Lfx14:
         xorl  %edi,%edi
         jmp   .Lft14
-.previous
+        _ASM_FIXUP_END
         _ASM_EXTABLE(.Lft14, .Lfx14)
--- a/xen/arch/x86/x86_64/entry.S
+++ b/xen/arch/x86/x86_64/entry.S
@@ -580,7 +580,7 @@ __UNLIKELY_END(create_bounce_frame_bad_b
         mov   %al,  TRAPBOUNCE_flags(%rdx)
         ret
 
-        .pushsection .fixup, "ax", @progbits
+        _ASM_FIXUP
         # Numeric tags below represent the intended overall %rsi adjustment.
 domain_crash_page_fault_6x8:
         addq  $8,%rsi
@@ -616,7 +616,7 @@ ENTRY(dom_crash_sync_extable)
 #endif
         xorl  %edi,%edi
         jmp   asm_domain_crash_synchronous /* Does not return */
-        .popsection
+        _ASM_FIXUP_END
 #endif /* CONFIG_PV */
 
 /* --- CODE BELOW THIS LINE (MOSTLY) NOT GUEST RELATED --- */
--- a/xen/arch/x86/x86_emulate/x86_emulate.c
+++ b/xen/arch/x86/x86_emulate/x86_emulate.c
@@ -1260,11 +1260,11 @@ static inline int mkec(uint8_t e, int32_
     block_speculation(); /* SCSB */                                     \
     asm volatile ( pre "\n\tINDIRECT_CALL %[stub]\n\t" post "\n"        \
                    ".Lret%=:\n\t"                                       \
-                   ".pushsection .fixup,\"ax\"\n"                       \
+                   _ASM_FIXUP "\n"                                      \
                    ".Lfix%=:\n\t"                                       \
                    "pop %[exn]\n\t"                                     \
                    "jmp .Lret%=\n\t"                                    \
-                   ".popsection\n\t"                                    \
+                   _ASM_FIXUP_END "\n\t"                                \
                    _ASM_EXTABLE(.Lret%=, .Lfix%=)                       \
                    : [exn] "+g" (stub_exn.info) ASM_CALL_CONSTRAINT,    \
                      constraints,                                       \
--- a/xen/arch/x86/xstate.c
+++ b/xen/arch/x86/xstate.c
@@ -45,10 +45,10 @@ static inline bool xsetbv(u32 index, u64
 
     asm volatile ( "1: .byte 0x0f,0x01,0xd1\n"
                    "3:                     \n"
-                   ".section .fixup,\"ax\" \n"
+                   _ASM_FIXUP             "\n"
                    "2: xor %0,%0           \n"
                    "   jmp 3b              \n"
-                   ".previous              \n"
+                   _ASM_FIXUP_END         "\n"
                    _ASM_EXTABLE(1b, 2b)
                    : "+a" (lo)
                    : "c" (index), "d" (hi));
@@ -403,10 +403,10 @@ void xrstor(struct vcpu *v, uint64_t mas
 #define _xrstor(insn) \
         asm volatile ( "1: .byte " insn "\n" \
                        "3:\n" \
-                       "   .section .fixup,\"ax\"\n" \
+                       "   " _ASM_FIXUP "\n" \
                        "2: incl %[faults]\n" \
                        "   jmp 3b\n" \
-                       "   .previous\n" \
+                       "   " _ASM_FIXUP_END "\n" \
                        _ASM_EXTABLE(1b, 2b) \
                        : [mem] "+m" (*ptr), [faults] "+g" (faults) \
                        : [lmask] "a" (lmask), [hmask] "d" (hmask), \



^ permalink raw reply	[flat|nested] 4+ messages in thread

* [PATCH 2/2] x86: split .fixup section with new enough gas
  2023-01-05 11:10 [PATCH 0/2] x86: adjustments to .fixup section handling Jan Beulich
  2023-01-05 11:11 ` [PATCH 1/2] x86: macroize switches to/from .fixup section Jan Beulich
@ 2023-01-05 11:12 ` Jan Beulich
  2023-01-05 14:19 ` [PATCH 0/2] x86: adjustments to .fixup section handling Andrew Cooper
  2 siblings, 0 replies; 4+ messages in thread
From: Jan Beulich @ 2023-01-05 11:12 UTC (permalink / raw)
  To: xen-devel; +Cc: Andrew Cooper, Wei Liu, Roger Pau Monné

GNU as, as of version 2.26, allows deriving the name of a section to
switch to from the present section's name. For the replacement to occur
--sectname-subst needs to be passed to the assembler.

Signed-off-by: Jan Beulich <jbeulich@suse.com>
---
Similarly (and perhaps of more interest) we could split .ex_table,
allowing to reduce the number of entries to search through post-init.

--- a/Config.mk
+++ b/Config.mk
@@ -98,7 +98,7 @@ cc-option = $(shell if test -z "`echo 'v
 # Usage: $(call cc-option-add CFLAGS,CC,-march=winchip-c6)
 cc-option-add = $(eval $(call cc-option-add-closure,$(1),$(2),$(3)))
 define cc-option-add-closure
-    ifneq ($$(call cc-option,$$($(2)),$(3),n),n)
+    ifneq ($$(call cc-option,$$($(2)),$(firstword $(3)),n),n)
         $(1) += $(3)
     endif
 endef
--- a/xen/arch/x86/arch.mk
+++ b/xen/arch/x86/arch.mk
@@ -34,6 +34,9 @@ $(call as-option-add,CFLAGS,CC,\
 $(call as-option-add,CFLAGS,CC,\
     ".L1: .L2: .nops (.L2 - .L1)$$(comma)9",-DHAVE_AS_NOPS_DIRECTIVE)
 
+# Check to see whether the assmbler supports the --sectname-subst option.
+$(call cc-option-add,CFLAGS,CC,-Wa$$(comma)--sectname-subst -DHAVE_AS_SECTNAME_SUBST)
+
 CFLAGS += -mno-red-zone -fpic
 
 # Xen doesn't use MMX or SSE interally.  If the compiler supports it, also skip
--- a/xen/arch/x86/include/asm/asm_defns.h
+++ b/xen/arch/x86/include/asm/asm_defns.h
@@ -81,10 +81,18 @@ register unsigned long current_stack_poi
 
 /* Exception recovery code section */
 #ifdef __ASSEMBLY__
-# define _ASM_FIXUP     .pushsection .fixup, "ax", @progbits
+# ifdef HAVE_AS_SECTNAME_SUBST
+#  define _ASM_FIXUP    .pushsection .fixup%S, "ax", @progbits
+# else
+#  define _ASM_FIXUP    .pushsection .fixup, "ax", @progbits
+# endif
 # define _ASM_FIXUP_END .popsection
 #else
-# define _ASM_FIXUP     " .pushsection .fixup, \"ax\", @progbits"
+# ifdef HAVE_AS_SECTNAME_SUBST
+#  define _ASM_FIXUP    " .pushsection .fixup%%S, \"ax\", @progbits"
+# else
+#  define _ASM_FIXUP    " .pushsection .fixup, \"ax\", @progbits"
+# endif
 # define _ASM_FIXUP_END " .popsection"
 #endif
 
--- a/xen/arch/x86/xen.lds.S
+++ b/xen/arch/x86/xen.lds.S
@@ -103,6 +103,12 @@ SECTIONS
        *(.text.__x86_indirect_thunk_*)
 
        *(.fixup)
+       *(.fixup.text)
+       *(.fixup.text.cold)
+       *(.fixup.text.unlikely .fixup.text.*_unlikely .fixup.text.unlikely.*)
+#ifdef CONFIG_CC_SPLIT_SECTIONS
+       *(.fixup.text.*)
+#endif
        *(.gnu.warning)
        _etext = .;             /* End of text section */
   } PHDR(text) = 0x9090
@@ -215,6 +221,8 @@ SECTIONS
        _sinittext = .;
        *(.init.text)
        *(.text.startup)
+       *(.fixup.init.text)
+       *(.fixup.text.startup)
        _einittext = .;
        /*
         * Here are the replacement instructions. The linker sticks them
--- a/xen/include/xen/xen.lds.h
+++ b/xen/include/xen/xen.lds.h
@@ -89,7 +89,9 @@
 #define DISCARD_SECTIONS     \
   /DISCARD/ : {              \
        *(.text.exit)         \
+       *(.fixup.text.exit)   \
        *(.exit.text)         \
+       *(.fixup.exit.text)   \
        *(.exit.data)         \
        *(.exitcall.exit)     \
        *(.discard)           \



^ permalink raw reply	[flat|nested] 4+ messages in thread

* Re: [PATCH 0/2] x86: adjustments to .fixup section handling
  2023-01-05 11:10 [PATCH 0/2] x86: adjustments to .fixup section handling Jan Beulich
  2023-01-05 11:11 ` [PATCH 1/2] x86: macroize switches to/from .fixup section Jan Beulich
  2023-01-05 11:12 ` [PATCH 2/2] x86: split .fixup section with new enough gas Jan Beulich
@ 2023-01-05 14:19 ` Andrew Cooper
  2 siblings, 0 replies; 4+ messages in thread
From: Andrew Cooper @ 2023-01-05 14:19 UTC (permalink / raw)
  To: Jan Beulich, xen-devel; +Cc: Wei Liu, Roger Pau Monne

On 05/01/2023 11:10 am, Jan Beulich wrote:
> 1: macroize switches to/from .fixup section
> 2: split .fixup section with new enough gas
>
> Jan

Honestly, I was planning to make another effort to up the minimum
compiler versions to something which supports asm goto, and delete
.fixup entirely.

This is a prerequisite for taking objtool and using ORC unwinding.  The
use of the fixup section in the first place actually interferes with
backtraces; most uses can be removed with some tweaks (and tightening
overall) to the extable handling mechanism, but the VMX VM* instructions
(needing jae err) in particular can't use extable.

Given that we want to do this for several reasons anyway, I'm not sure
the added complexity here is useful.


As for extable size note, splitting into two tables will complicate the
lookup logic at runtime.  And even by splitting the table, you're only
reducing the search length by less than 1 step.

I don't see splitting the tables turning out to be a win, but there is a
far simpler option I think.  Table is sorted by address, so all we need
to do is make extable_end[] variable, and move it forward when we free
.init, at which point we only binary search through the first part of
the table.

~Andrew

^ permalink raw reply	[flat|nested] 4+ messages in thread

end of thread, other threads:[~2023-01-05 14:20 UTC | newest]

Thread overview: 4+ messages (download: mbox.gz / follow: Atom feed)
-- links below jump to the message on this page --
2023-01-05 11:10 [PATCH 0/2] x86: adjustments to .fixup section handling Jan Beulich
2023-01-05 11:11 ` [PATCH 1/2] x86: macroize switches to/from .fixup section Jan Beulich
2023-01-05 11:12 ` [PATCH 2/2] x86: split .fixup section with new enough gas Jan Beulich
2023-01-05 14:19 ` [PATCH 0/2] x86: adjustments to .fixup section handling Andrew Cooper

This is an external index of several public inboxes,
see mirroring instructions on how to clone and mirror
all data and code used by this external index.