Loading...
1/*
2 * Copyright (C) 2004 Jeff Dike (jdike@addtoit.com)
3 * Licensed under the GPL
4 */
5
6#ifndef __SYSDEP_STUB_H
7#define __SYSDEP_STUB_H
8
9#include <asm/ptrace.h>
10
11#define STUB_SYSCALL_RET EAX
12#define STUB_MMAP_NR __NR_mmap2
13#define MMAP_OFFSET(o) ((o) >> UM_KERN_PAGE_SHIFT)
14
15static inline long stub_syscall0(long syscall)
16{
17 long ret;
18
19 __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall));
20
21 return ret;
22}
23
24static inline long stub_syscall1(long syscall, long arg1)
25{
26 long ret;
27
28 __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1));
29
30 return ret;
31}
32
33static inline long stub_syscall2(long syscall, long arg1, long arg2)
34{
35 long ret;
36
37 __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
38 "c" (arg2));
39
40 return ret;
41}
42
43static inline long stub_syscall3(long syscall, long arg1, long arg2, long arg3)
44{
45 long ret;
46
47 __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
48 "c" (arg2), "d" (arg3));
49
50 return ret;
51}
52
53static inline long stub_syscall4(long syscall, long arg1, long arg2, long arg3,
54 long arg4)
55{
56 long ret;
57
58 __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
59 "c" (arg2), "d" (arg3), "S" (arg4));
60
61 return ret;
62}
63
64static inline long stub_syscall5(long syscall, long arg1, long arg2, long arg3,
65 long arg4, long arg5)
66{
67 long ret;
68
69 __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
70 "c" (arg2), "d" (arg3), "S" (arg4), "D" (arg5));
71
72 return ret;
73}
74
75static inline void trap_myself(void)
76{
77 __asm("int3");
78}
79
80static inline void remap_stack(int fd, unsigned long offset)
81{
82 __asm__ volatile ("movl %%eax,%%ebp ; movl %0,%%eax ; int $0x80 ;"
83 "movl %7, %%ebx ; movl %%eax, (%%ebx)"
84 : : "g" (STUB_MMAP_NR), "b" (STUB_DATA),
85 "c" (UM_KERN_PAGE_SIZE),
86 "d" (PROT_READ | PROT_WRITE),
87 "S" (MAP_FIXED | MAP_SHARED), "D" (fd),
88 "a" (offset),
89 "i" (&((struct stub_data *) STUB_DATA)->err)
90 : "memory");
91}
92
93#endif
1/*
2 * Copyright (C) 2004 Jeff Dike (jdike@addtoit.com)
3 * Licensed under the GPL
4 */
5
6#ifndef __SYSDEP_STUB_H
7#define __SYSDEP_STUB_H
8
9#include <asm/ptrace.h>
10#include <generated/asm-offsets.h>
11
12#define STUB_MMAP_NR __NR_mmap2
13#define MMAP_OFFSET(o) ((o) >> UM_KERN_PAGE_SHIFT)
14
15static __always_inline long stub_syscall0(long syscall)
16{
17 long ret;
18
19 __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall)
20 : "memory");
21
22 return ret;
23}
24
25static __always_inline long stub_syscall1(long syscall, long arg1)
26{
27 long ret;
28
29 __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1)
30 : "memory");
31
32 return ret;
33}
34
35static __always_inline long stub_syscall2(long syscall, long arg1, long arg2)
36{
37 long ret;
38
39 __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
40 "c" (arg2)
41 : "memory");
42
43 return ret;
44}
45
46static __always_inline long stub_syscall3(long syscall, long arg1, long arg2,
47 long arg3)
48{
49 long ret;
50
51 __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
52 "c" (arg2), "d" (arg3)
53 : "memory");
54
55 return ret;
56}
57
58static __always_inline long stub_syscall4(long syscall, long arg1, long arg2,
59 long arg3, long arg4)
60{
61 long ret;
62
63 __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
64 "c" (arg2), "d" (arg3), "S" (arg4)
65 : "memory");
66
67 return ret;
68}
69
70static __always_inline long stub_syscall5(long syscall, long arg1, long arg2,
71 long arg3, long arg4, long arg5)
72{
73 long ret;
74
75 __asm__ volatile ("int $0x80" : "=a" (ret) : "0" (syscall), "b" (arg1),
76 "c" (arg2), "d" (arg3), "S" (arg4), "D" (arg5)
77 : "memory");
78
79 return ret;
80}
81
82static __always_inline void trap_myself(void)
83{
84 __asm("int3");
85}
86
87static __always_inline void remap_stack_and_trap(void)
88{
89 __asm__ volatile (
90 "movl %%esp,%%ebx ;"
91 "andl %0,%%ebx ;"
92 "movl %1,%%eax ;"
93 "movl %%ebx,%%edi ; addl %2,%%edi ; movl (%%edi),%%edi ;"
94 "movl %%ebx,%%ebp ; addl %3,%%ebp ; movl (%%ebp),%%ebp ;"
95 "int $0x80 ;"
96 "addl %4,%%ebx ; movl %%eax, (%%ebx) ;"
97 "int $3"
98 : :
99 "g" (~(STUB_DATA_PAGES * UM_KERN_PAGE_SIZE - 1)),
100 "g" (STUB_MMAP_NR),
101 "g" (UML_STUB_FIELD_FD),
102 "g" (UML_STUB_FIELD_OFFSET),
103 "g" (UML_STUB_FIELD_CHILD_ERR),
104 "c" (STUB_DATA_PAGES * UM_KERN_PAGE_SIZE),
105 "d" (PROT_READ | PROT_WRITE),
106 "S" (MAP_FIXED | MAP_SHARED)
107 :
108 "memory");
109}
110
111static __always_inline void *get_stub_data(void)
112{
113 unsigned long ret;
114
115 asm volatile (
116 "movl %%esp,%0 ;"
117 "andl %1,%0"
118 : "=a" (ret)
119 : "g" (~(STUB_DATA_PAGES * UM_KERN_PAGE_SIZE - 1)));
120
121 return (void *)ret;
122}
123#endif