um: make stub_exe _start() pure inline asm

Since __attribute__((naked)) cannot be used with functions
containing C statements, just generate the few instructions
it needs in assembly directly.

While at it, fix the stack usage ("1 + 2*x - 1" is odd) and
document what it must do, and why it must adjust the stack.

Fixes: 8508a5e0e9 ("um: Fix misaligned stack in stub_exe")
Link: https://lore.kernel.org/linux-um/CABVgOSntH-uoOFMP5HwMXjx_f1osMnVdhgKRKm4uz6DFm2Lb8Q@mail.gmail.com/
Reviewed-by: David Gow <davidgow@google.com>
Signed-off-by: Johannes Berg <johannes.berg@intel.com>
This commit is contained in:
Johannes Berg 2024-10-22 14:02:38 +02:00
parent 8508a5e0e9
commit 14d4a7b516
3 changed files with 27 additions and 7 deletions

View File

@ -81,11 +81,15 @@ noinline static void real_init(void)
__attribute__((naked)) void _start(void)
{
char *alloc;
/* Make enough space for the stub (including space for alignment) */
alloc = __builtin_alloca((1 + 2 * STUB_DATA_PAGES - 1) * UM_KERN_PAGE_SIZE);
asm volatile("" : "+r,m"(alloc) : : "memory");
real_init();
/*
* Since the stack after exec() starts at the top-most address,
* but that's exactly where we also want to map the stub data
* and code, this must:
* - push the stack by 1 code and STUB_DATA_PAGES data pages
* - call real_init()
* This way, real_init() can use the stack normally, while the
* original stack further down (higher address) will become
* inaccessible after the mmap() calls above.
*/
stub_start(real_init);
}

View File

@ -123,4 +123,12 @@ static __always_inline void *get_stub_data(void)
return (void *)ret;
}
#define stub_start(fn) \
asm volatile ( \
"subl %0,%%esp ;" \
"movl %1, %%eax ; " \
"call *%%eax ;" \
:: "i" ((1 + STUB_DATA_PAGES) * UM_KERN_PAGE_SIZE), \
"i" (&fn))
#endif

View File

@ -126,4 +126,12 @@ static __always_inline void *get_stub_data(void)
return (void *)ret;
}
#define stub_start(fn) \
asm volatile ( \
"subq %0,%%rsp ;" \
"movq %1,%%rax ;" \
"call *%%rax ;" \
:: "i" ((1 + STUB_DATA_PAGES) * UM_KERN_PAGE_SIZE), \
"i" (&fn))
#endif