|
10 | 10 | #include "compiler.h" |
11 | 11 | #include "crt.h" |
12 | 12 |
|
13 | | -#if !defined(_ABIO32) |
| 13 | +#if !defined(_ABIO32) && !defined(_ABIN32) && !defined(_ABI64) |
14 | 14 | #error Unsupported MIPS ABI |
15 | 15 | #endif |
16 | 16 |
|
|
32 | 32 | * - the arguments are cast to long and assigned into the target registers |
33 | 33 | * which are then simply passed as registers to the asm code, so that we |
34 | 34 | * don't have to experience issues with register constraints. |
| 35 | + * |
| 36 | + * Syscalls for MIPS ABI N32, same as ABI O32 with the following differences : |
| 37 | + * - arguments are in a0, a1, a2, a3, t0, t1, t2, t3. |
| 38 | + * t0..t3 are also known as a4..a7. |
| 39 | + * - stack is 16-byte aligned |
35 | 40 | */ |
36 | 41 |
|
| 42 | +#if defined(_ABIO32) |
| 43 | + |
37 | 44 | #define _NOLIBC_SYSCALL_CLOBBERLIST \ |
38 | 45 | "memory", "cc", "at", "v1", "hi", "lo", \ |
39 | 46 | "t0", "t1", "t2", "t3", "t4", "t5", "t6", "t7", "t8", "t9" |
| 47 | +#define _NOLIBC_SYSCALL_STACK_RESERVE "addiu $sp, $sp, -32\n" |
| 48 | +#define _NOLIBC_SYSCALL_STACK_UNRESERVE "addiu $sp, $sp, 32\n" |
| 49 | + |
| 50 | +#else /* _ABIN32 || _ABI64 */ |
| 51 | + |
| 52 | +/* binutils, GCC and clang disagree about register aliases, use numbers instead. */ |
| 53 | +#define _NOLIBC_SYSCALL_CLOBBERLIST \ |
| 54 | + "memory", "cc", "at", "v1", \ |
| 55 | + "10", "11", "12", "13", "14", "15", "24", "25" |
| 56 | + |
| 57 | +#define _NOLIBC_SYSCALL_STACK_RESERVE |
| 58 | +#define _NOLIBC_SYSCALL_STACK_UNRESERVE |
| 59 | + |
| 60 | +#endif /* _ABIO32 */ |
40 | 61 |
|
41 | 62 | #define my_syscall0(num) \ |
42 | 63 | ({ \ |
43 | 64 | register long _num __asm__ ("v0") = (num); \ |
44 | 65 | register long _arg4 __asm__ ("a3"); \ |
45 | 66 | \ |
46 | 67 | __asm__ volatile ( \ |
47 | | - "addiu $sp, $sp, -32\n" \ |
| 68 | + _NOLIBC_SYSCALL_STACK_RESERVE \ |
48 | 69 | "syscall\n" \ |
49 | | - "addiu $sp, $sp, 32\n" \ |
| 70 | + _NOLIBC_SYSCALL_STACK_UNRESERVE \ |
50 | 71 | : "=r"(_num), "=r"(_arg4) \ |
51 | 72 | : "r"(_num) \ |
52 | 73 | : _NOLIBC_SYSCALL_CLOBBERLIST \ |
|
61 | 82 | register long _arg4 __asm__ ("a3"); \ |
62 | 83 | \ |
63 | 84 | __asm__ volatile ( \ |
64 | | - "addiu $sp, $sp, -32\n" \ |
| 85 | + _NOLIBC_SYSCALL_STACK_RESERVE \ |
65 | 86 | "syscall\n" \ |
66 | | - "addiu $sp, $sp, 32\n" \ |
| 87 | + _NOLIBC_SYSCALL_STACK_UNRESERVE \ |
67 | 88 | : "=r"(_num), "=r"(_arg4) \ |
68 | 89 | : "0"(_num), \ |
69 | 90 | "r"(_arg1) \ |
|
80 | 101 | register long _arg4 __asm__ ("a3"); \ |
81 | 102 | \ |
82 | 103 | __asm__ volatile ( \ |
83 | | - "addiu $sp, $sp, -32\n" \ |
| 104 | + _NOLIBC_SYSCALL_STACK_RESERVE \ |
84 | 105 | "syscall\n" \ |
85 | | - "addiu $sp, $sp, 32\n" \ |
| 106 | + _NOLIBC_SYSCALL_STACK_UNRESERVE \ |
86 | 107 | : "=r"(_num), "=r"(_arg4) \ |
87 | 108 | : "0"(_num), \ |
88 | 109 | "r"(_arg1), "r"(_arg2) \ |
|
100 | 121 | register long _arg4 __asm__ ("a3"); \ |
101 | 122 | \ |
102 | 123 | __asm__ volatile ( \ |
103 | | - "addiu $sp, $sp, -32\n" \ |
| 124 | + _NOLIBC_SYSCALL_STACK_RESERVE \ |
104 | 125 | "syscall\n" \ |
105 | | - "addiu $sp, $sp, 32\n" \ |
| 126 | + _NOLIBC_SYSCALL_STACK_UNRESERVE \ |
106 | 127 | : "=r"(_num), "=r"(_arg4) \ |
107 | 128 | : "0"(_num), \ |
108 | 129 | "r"(_arg1), "r"(_arg2), "r"(_arg3) \ |
|
120 | 141 | register long _arg4 __asm__ ("a3") = (long)(arg4); \ |
121 | 142 | \ |
122 | 143 | __asm__ volatile ( \ |
123 | | - "addiu $sp, $sp, -32\n" \ |
| 144 | + _NOLIBC_SYSCALL_STACK_RESERVE \ |
124 | 145 | "syscall\n" \ |
125 | | - "addiu $sp, $sp, 32\n" \ |
| 146 | + _NOLIBC_SYSCALL_STACK_UNRESERVE \ |
126 | 147 | : "=r" (_num), "=r"(_arg4) \ |
127 | 148 | : "0"(_num), \ |
128 | 149 | "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4) \ |
|
131 | 152 | _arg4 ? -_num : _num; \ |
132 | 153 | }) |
133 | 154 |
|
| 155 | +#if defined(_ABIO32) |
| 156 | + |
134 | 157 | #define my_syscall5(num, arg1, arg2, arg3, arg4, arg5) \ |
135 | 158 | ({ \ |
136 | 159 | register long _num __asm__ ("v0") = (num); \ |
|
141 | 164 | register long _arg5 = (long)(arg5); \ |
142 | 165 | \ |
143 | 166 | __asm__ volatile ( \ |
144 | | - "addiu $sp, $sp, -32\n" \ |
| 167 | + _NOLIBC_SYSCALL_STACK_RESERVE \ |
145 | 168 | "sw %7, 16($sp)\n" \ |
146 | 169 | "syscall\n" \ |
147 | | - "addiu $sp, $sp, 32\n" \ |
| 170 | + _NOLIBC_SYSCALL_STACK_UNRESERVE \ |
148 | 171 | : "=r" (_num), "=r"(_arg4) \ |
149 | 172 | : "0"(_num), \ |
150 | 173 | "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5) \ |
|
164 | 187 | register long _arg6 = (long)(arg6); \ |
165 | 188 | \ |
166 | 189 | __asm__ volatile ( \ |
167 | | - "addiu $sp, $sp, -32\n" \ |
| 190 | + _NOLIBC_SYSCALL_STACK_RESERVE \ |
168 | 191 | "sw %7, 16($sp)\n" \ |
169 | 192 | "sw %8, 20($sp)\n" \ |
170 | 193 | "syscall\n" \ |
171 | | - "addiu $sp, $sp, 32\n" \ |
| 194 | + _NOLIBC_SYSCALL_STACK_UNRESERVE \ |
| 195 | + : "=r" (_num), "=r"(_arg4) \ |
| 196 | + : "0"(_num), \ |
| 197 | + "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), \ |
| 198 | + "r"(_arg6) \ |
| 199 | + : _NOLIBC_SYSCALL_CLOBBERLIST \ |
| 200 | + ); \ |
| 201 | + _arg4 ? -_num : _num; \ |
| 202 | +}) |
| 203 | + |
| 204 | +#else /* _ABIN32 || _ABI64 */ |
| 205 | + |
| 206 | +#define my_syscall5(num, arg1, arg2, arg3, arg4, arg5) \ |
| 207 | +({ \ |
| 208 | + register long _num __asm__ ("v0") = (num); \ |
| 209 | + register long _arg1 __asm__ ("$4") = (long)(arg1); \ |
| 210 | + register long _arg2 __asm__ ("$5") = (long)(arg2); \ |
| 211 | + register long _arg3 __asm__ ("$6") = (long)(arg3); \ |
| 212 | + register long _arg4 __asm__ ("$7") = (long)(arg4); \ |
| 213 | + register long _arg5 __asm__ ("$8") = (long)(arg5); \ |
| 214 | + \ |
| 215 | + __asm__ volatile ( \ |
| 216 | + "syscall\n" \ |
| 217 | + : "=r" (_num), "=r"(_arg4) \ |
| 218 | + : "0"(_num), \ |
| 219 | + "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5) \ |
| 220 | + : _NOLIBC_SYSCALL_CLOBBERLIST \ |
| 221 | + ); \ |
| 222 | + _arg4 ? -_num : _num; \ |
| 223 | +}) |
| 224 | + |
| 225 | +#define my_syscall6(num, arg1, arg2, arg3, arg4, arg5, arg6) \ |
| 226 | +({ \ |
| 227 | + register long _num __asm__ ("v0") = (num); \ |
| 228 | + register long _arg1 __asm__ ("$4") = (long)(arg1); \ |
| 229 | + register long _arg2 __asm__ ("$5") = (long)(arg2); \ |
| 230 | + register long _arg3 __asm__ ("$6") = (long)(arg3); \ |
| 231 | + register long _arg4 __asm__ ("$7") = (long)(arg4); \ |
| 232 | + register long _arg5 __asm__ ("$8") = (long)(arg5); \ |
| 233 | + register long _arg6 __asm__ ("$9") = (long)(arg6); \ |
| 234 | + \ |
| 235 | + __asm__ volatile ( \ |
| 236 | + "syscall\n" \ |
172 | 237 | : "=r" (_num), "=r"(_arg4) \ |
173 | 238 | : "0"(_num), \ |
174 | 239 | "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), \ |
|
178 | 243 | _arg4 ? -_num : _num; \ |
179 | 244 | }) |
180 | 245 |
|
| 246 | +#endif /* _ABIO32 */ |
| 247 | + |
181 | 248 | /* startup code, note that it's called __start on MIPS */ |
182 | 249 | void __start(void); |
183 | 250 | void __attribute__((weak, noreturn)) __nolibc_entrypoint __no_stack_protector __start(void) |
184 | 251 | { |
185 | 252 | __asm__ volatile ( |
186 | 253 | "move $a0, $sp\n" /* save stack pointer to $a0, as arg1 of _start_c */ |
| 254 | +#if defined(_ABIO32) |
187 | 255 | "addiu $sp, $sp, -16\n" /* the callee expects to save a0..a3 there */ |
| 256 | +#endif /* _ABIO32 */ |
188 | 257 | "lui $t9, %hi(_start_c)\n" /* ABI requires current function address in $t9 */ |
189 | 258 | "ori $t9, %lo(_start_c)\n" |
| 259 | +#if defined(_ABI64) |
| 260 | + "lui $t0, %highest(_start_c)\n" |
| 261 | + "ori $t0, %higher(_start_c)\n" |
| 262 | + "dsll $t0, 0x20\n" |
| 263 | + "or $t9, $t0\n" |
| 264 | +#endif /* _ABI64 */ |
190 | 265 | "jalr $t9\n" /* transfer to c runtime */ |
191 | 266 | ); |
192 | 267 | __nolibc_entrypoint_epilogue(); |
|
0 commit comments