aboutsummaryrefslogtreecommitdiff
path: root/arch/um/include
diff options
context:
space:
mode:
Diffstat (limited to 'arch/um/include')
-rw-r--r--arch/um/include/sysdep-i386/stub.h64
-rw-r--r--arch/um/include/sysdep-x86_64/stub.h61
2 files changed, 92 insertions, 33 deletions
diff --git a/arch/um/include/sysdep-i386/stub.h b/arch/um/include/sysdep-i386/stub.h
index d3699fe1c61..a49ceb199ee 100644
--- a/arch/um/include/sysdep-i386/stub.h
+++ b/arch/um/include/sysdep-i386/stub.h
@@ -16,45 +16,69 @@ extern void stub_clone_handler(void);
#define STUB_MMAP_NR __NR_mmap2
#define MMAP_OFFSET(o) ((o) >> PAGE_SHIFT)
+static inline long stub_syscall1(long syscall, long arg1)
+{
+ long ret;
+
+ __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1));
+
+ return ret;
+}
+
static inline long stub_syscall2(long syscall, long arg1, long arg2)
{
long ret;
- __asm__("movl %0, %%ecx; " : : "g" (arg2) : "%ecx");
- __asm__("movl %0, %%ebx; " : : "g" (arg1) : "%ebx");
- __asm__("movl %0, %%eax; " : : "g" (syscall) : "%eax");
- __asm__("int $0x80;" : : : "%eax");
- __asm__ __volatile__("movl %%eax, %0; " : "=g" (ret) :);
- return(ret);
+ __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
+ "c" (arg2));
+
+ return ret;
}
static inline long stub_syscall3(long syscall, long arg1, long arg2, long arg3)
{
- __asm__("movl %0, %%edx; " : : "g" (arg3) : "%edx");
- return(stub_syscall2(syscall, arg1, arg2));
+ long ret;
+
+ __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
+ "c" (arg2), "d" (arg3));
+
+ return ret;
}
static inline long stub_syscall4(long syscall, long arg1, long arg2, long arg3,
long arg4)
{
- __asm__("movl %0, %%esi; " : : "g" (arg4) : "%esi");
- return(stub_syscall3(syscall, arg1, arg2, arg3));
+ long ret;
+
+ __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
+ "c" (arg2), "d" (arg3), "S" (arg4));
+
+ return ret;
+}
+
+static inline long stub_syscall5(long syscall, long arg1, long arg2, long arg3,
+ long arg4, long arg5)
+{
+ long ret;
+
+ __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
+ "c" (arg2), "d" (arg3), "S" (arg4), "D" (arg5));
+
+ return ret;
}
static inline long stub_syscall6(long syscall, long arg1, long arg2, long arg3,
long arg4, long arg5, long arg6)
{
long ret;
- __asm__("movl %0, %%eax; " : : "g" (syscall) : "%eax");
- __asm__("movl %0, %%ebx; " : : "g" (arg1) : "%ebx");
- __asm__("movl %0, %%ecx; " : : "g" (arg2) : "%ecx");
- __asm__("movl %0, %%edx; " : : "g" (arg3) : "%edx");
- __asm__("movl %0, %%esi; " : : "g" (arg4) : "%esi");
- __asm__("movl %0, %%edi; " : : "g" (arg5) : "%edi");
- __asm__ __volatile__("pushl %%ebp ; movl %1, %%ebp; "
- "int $0x80; popl %%ebp ; "
- "movl %%eax, %0; " : "=g" (ret) : "g" (arg6) : "%eax");
- return(ret);
+
+ __asm__ volatile ("push %%ebp ; movl %%eax,%%ebp ; movl %1,%%eax ; "
+ "int $0x80 ; pop %%ebp"
+ : "=a" (ret)
+ : "g" (syscall), "b" (arg1), "c" (arg2), "d" (arg3),
+ "S" (arg4), "D" (arg5), "0" (arg6));
+
+ return ret;
}
static inline void trap_myself(void)
diff --git a/arch/um/include/sysdep-x86_64/stub.h b/arch/um/include/sysdep-x86_64/stub.h
index f599058d826..2bd6e7a9728 100644
--- a/arch/um/include/sysdep-x86_64/stub.h
+++ b/arch/um/include/sysdep-x86_64/stub.h
@@ -17,37 +17,72 @@ extern void stub_clone_handler(void);
#define STUB_MMAP_NR __NR_mmap
#define MMAP_OFFSET(o) (o)
+#define __syscall_clobber "r11","rcx","memory"
+#define __syscall "syscall"
+
static inline long stub_syscall2(long syscall, long arg1, long arg2)
{
long ret;
- __asm__("movq %0, %%rsi; " : : "g" (arg2) : "%rsi");
- __asm__("movq %0, %%rdi; " : : "g" (arg1) : "%rdi");
- __asm__("movq %0, %%rax; " : : "g" (syscall) : "%rax");
- __asm__("syscall;" : : : "%rax", "%r11", "%rcx");
- __asm__ __volatile__("movq %%rax, %0; " : "=g" (ret) :);
- return(ret);
+ __asm__ volatile (__syscall
+ : "=a" (ret)
+ : "0" (syscall), "D" (arg1), "S" (arg2) : __syscall_clobber );
+
+ return ret;
}
static inline long stub_syscall3(long syscall, long arg1, long arg2, long arg3)
{
- __asm__("movq %0, %%rdx; " : : "g" (arg3) : "%rdx");
- return(stub_syscall2(syscall, arg1, arg2));
+ long ret;
+
+ __asm__ volatile (__syscall
+ : "=a" (ret)
+ : "0" (syscall), "D" (arg1), "S" (arg2), "d" (arg3)
+ : __syscall_clobber );
+
+ return ret;
}
static inline long stub_syscall4(long syscall, long arg1, long arg2, long arg3,
long arg4)
{
- __asm__("movq %0, %%r10; " : : "g" (arg4) : "%r10");
- return(stub_syscall3(syscall, arg1, arg2, arg3));
+ long ret;
+
+ __asm__ volatile ("movq %5,%%r10 ; " __syscall
+ : "=a" (ret)
+ : "0" (syscall), "D" (arg1), "S" (arg2), "d" (arg3),
+ "g" (arg4)
+ : __syscall_clobber, "r10" );
+
+ return ret;
+}
+
+static inline long stub_syscall5(long syscall, long arg1, long arg2, long arg3,
+ long arg4, long arg5)
+{
+ long ret;
+
+ __asm__ volatile ("movq %5,%%r10 ; movq %6,%%r8 ; " __syscall
+ : "=a" (ret)
+ : "0" (syscall), "D" (arg1), "S" (arg2), "d" (arg3),
+ "g" (arg4), "g" (arg5)
+ : __syscall_clobber, "r10", "r8" );
+
+ return ret;
}
static inline long stub_syscall6(long syscall, long arg1, long arg2, long arg3,
long arg4, long arg5, long arg6)
{
- __asm__("movq %0, %%r9; " : : "g" (arg6) : "%r9");
- __asm__("movq %0, %%r8; " : : "g" (arg5) : "%r8");
- return(stub_syscall4(syscall, arg1, arg2, arg3, arg4));
+ long ret;
+
+ __asm__ volatile ("movq %5,%%r10 ; movq %6,%%r8 ; "
+ "movq %7, %%r9; " __syscall : "=a" (ret)
+ : "0" (syscall), "D" (arg1), "S" (arg2), "d" (arg3),
+ "g" (arg4), "g" (arg5), "g" (arg6)
+ : __syscall_clobber, "r10", "r8", "r9" );
+
+ return ret;
}
static inline void trap_myself(void)