diff options
author | Johannes Berg <johannes.berg@intel.com> | 2024-10-22 14:02:38 +0200 |
---|---|---|
committer | Johannes Berg <johannes.berg@intel.com> | 2024-10-23 09:04:59 +0200 |
commit | 14d4a7b516e993cf3926758a7ede569d8e119855 (patch) | |
tree | 73944a9852888a5207123438899c8aa684d39a0b /arch/x86 | |
parent | um: Fix misaligned stack in stub_exe (diff) | |
download | linux-14d4a7b516e993cf3926758a7ede569d8e119855.tar.xz linux-14d4a7b516e993cf3926758a7ede569d8e119855.zip |
um: make stub_exe _start() pure inline asm
Since __attribute__((naked)) cannot be used with functions
containing C statements, just generate the few instructions
it needs in assembly directly.
While at it, fix the stack usage ("1 + 2*x - 1" is odd) and
document what it must do, and why it must adjust the stack.
Fixes: 8508a5e0e9db ("um: Fix misaligned stack in stub_exe")
Link: https://lore.kernel.org/linux-um/CABVgOSntH-uoOFMP5HwMXjx_f1osMnVdhgKRKm4uz6DFm2Lb8Q@mail.gmail.com/
Reviewed-by: David Gow <davidgow@google.com>
Signed-off-by: Johannes Berg <johannes.berg@intel.com>
Diffstat (limited to 'arch/x86')
-rw-r--r-- | arch/x86/um/shared/sysdep/stub_32.h | 8 | ||||
-rw-r--r-- | arch/x86/um/shared/sysdep/stub_64.h | 8 |
2 files changed, 16 insertions, 0 deletions
diff --git a/arch/x86/um/shared/sysdep/stub_32.h b/arch/x86/um/shared/sysdep/stub_32.h index 631a18d0ff44..390988132c0a 100644 --- a/arch/x86/um/shared/sysdep/stub_32.h +++ b/arch/x86/um/shared/sysdep/stub_32.h @@ -123,4 +123,12 @@ static __always_inline void *get_stub_data(void) return (void *)ret; } + +#define stub_start(fn) \ + asm volatile ( \ + "subl %0,%%esp ;" \ + "movl %1, %%eax ; " \ + "call *%%eax ;" \ + :: "i" ((1 + STUB_DATA_PAGES) * UM_KERN_PAGE_SIZE), \ + "i" (&fn)) #endif diff --git a/arch/x86/um/shared/sysdep/stub_64.h b/arch/x86/um/shared/sysdep/stub_64.h index 17153dfd780a..294affbec742 100644 --- a/arch/x86/um/shared/sysdep/stub_64.h +++ b/arch/x86/um/shared/sysdep/stub_64.h @@ -126,4 +126,12 @@ static __always_inline void *get_stub_data(void) return (void *)ret; } + +#define stub_start(fn) \ + asm volatile ( \ + "subq %0,%%rsp ;" \ + "movq %1,%%rax ;" \ + "call *%%rax ;" \ + :: "i" ((1 + STUB_DATA_PAGES) * UM_KERN_PAGE_SIZE), \ + "i" (&fn)) #endif |