summaryrefslogtreecommitdiffstats
path: root/arch/x86/um/shared/sysdep/stub_64.h
diff options
context:
space:
mode:
Diffstat (limited to 'arch/x86/um/shared/sysdep/stub_64.h')
-rw-r--r--arch/x86/um/shared/sysdep/stub_64.h68
1 files changed, 40 insertions, 28 deletions
diff --git a/arch/x86/um/shared/sysdep/stub_64.h b/arch/x86/um/shared/sysdep/stub_64.h
index b24168ef0ac4..294affbec742 100644
--- a/arch/x86/um/shared/sysdep/stub_64.h
+++ b/arch/x86/um/shared/sysdep/stub_64.h
@@ -6,6 +6,7 @@
#ifndef __SYSDEP_STUB_H
#define __SYSDEP_STUB_H
+#include <stddef.h>
#include <sysdep/ptrace_user.h>
#include <generated/asm-offsets.h>
#include <linux/stddef.h>
@@ -27,6 +28,17 @@ static __always_inline long stub_syscall0(long syscall)
return ret;
}
+static __always_inline long stub_syscall1(long syscall, long arg1)
+{
+ long ret;
+
+ __asm__ volatile (__syscall
+ : "=a" (ret)
+ : "0" (syscall), "D" (arg1) : __syscall_clobber );
+
+ return ret;
+}
+
static __always_inline long stub_syscall2(long syscall, long arg1, long arg2)
{
long ret;
@@ -79,35 +91,25 @@ static __always_inline long stub_syscall5(long syscall, long arg1, long arg2,
return ret;
}
-static __always_inline void trap_myself(void)
+static __always_inline long stub_syscall6(long syscall, long arg1, long arg2,
+ long arg3, long arg4, long arg5,
+ long arg6)
{
- __asm("int3");
+ long ret;
+
+ __asm__ volatile ("movq %5,%%r10 ; movq %6,%%r8 ; movq %7,%%r9 ; "
+ __syscall
+ : "=a" (ret)
+ : "0" (syscall), "D" (arg1), "S" (arg2), "d" (arg3),
+ "g" (arg4), "g" (arg5), "g" (arg6)
+ : __syscall_clobber, "r10", "r8", "r9");
+
+ return ret;
}
-static __always_inline void remap_stack_and_trap(void)
+static __always_inline void trap_myself(void)
{
- __asm__ volatile (
- "movq %0,%%rax ;"
- "movq %%rsp,%%rdi ;"
- "andq %1,%%rdi ;"
- "movq %2,%%r10 ;"
- "movq %%rdi,%%r8 ; addq %3,%%r8 ; movq (%%r8),%%r8 ;"
- "movq %%rdi,%%r9 ; addq %4,%%r9 ; movq (%%r9),%%r9 ;"
- __syscall ";"
- "movq %%rsp,%%rdi ; andq %1,%%rdi ;"
- "addq %5,%%rdi ; movq %%rax, (%%rdi) ;"
- "int3"
- : :
- "g" (STUB_MMAP_NR),
- "g" (~(STUB_DATA_PAGES * UM_KERN_PAGE_SIZE - 1)),
- "g" (MAP_FIXED | MAP_SHARED),
- "g" (UML_STUB_FIELD_FD),
- "g" (UML_STUB_FIELD_OFFSET),
- "g" (UML_STUB_FIELD_CHILD_ERR),
- "S" (STUB_DATA_PAGES * UM_KERN_PAGE_SIZE),
- "d" (PROT_READ | PROT_WRITE)
- :
- __syscall_clobber, "r10", "r8", "r9");
+ __asm("int3");
}
static __always_inline void *get_stub_data(void)
@@ -115,11 +117,21 @@ static __always_inline void *get_stub_data(void)
unsigned long ret;
asm volatile (
- "movq %%rsp,%0 ;"
- "andq %1,%0"
+ "lea 0(%%rip), %0;"
+ "andq %1, %0 ;"
+ "addq %2, %0 ;"
: "=a" (ret)
- : "g" (~(STUB_DATA_PAGES * UM_KERN_PAGE_SIZE - 1)));
+ : "g" (~(UM_KERN_PAGE_SIZE - 1)),
+ "g" (UM_KERN_PAGE_SIZE));
return (void *)ret;
}
+
+#define stub_start(fn) \
+ asm volatile ( \
+ "subq %0,%%rsp ;" \
+ "movq %1,%%rax ;" \
+ "call *%%rax ;" \
+ :: "i" ((1 + STUB_DATA_PAGES) * UM_KERN_PAGE_SIZE), \
+ "i" (&fn))
#endif