Loading...
1/* SPDX-License-Identifier: LGPL-2.1 OR MIT */
2/*
3 * ARM specific definitions for NOLIBC
4 * Copyright (C) 2017-2022 Willy Tarreau <w@1wt.eu>
5 */
6
7#ifndef _NOLIBC_ARCH_ARM_H
8#define _NOLIBC_ARCH_ARM_H
9
10#include "compiler.h"
11#include "crt.h"
12
13/* Syscalls for ARM in ARM or Thumb modes :
14 * - registers are 32-bit
15 * - stack is 8-byte aligned
16 * ( http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.faqs/ka4127.html)
17 * - syscall number is passed in r7
18 * - arguments are in r0, r1, r2, r3, r4, r5
19 * - the system call is performed by calling svc #0
20 * - syscall return comes in r0.
21 * - only lr is clobbered.
22 * - the arguments are cast to long and assigned into the target registers
23 * which are then simply passed as registers to the asm code, so that we
24 * don't have to experience issues with register constraints.
25 * - the syscall number is always specified last in order to allow to force
26 * some registers before (gcc refuses a %-register at the last position).
27 * - in thumb mode without -fomit-frame-pointer, r7 is also used to store the
28 * frame pointer, and we cannot directly assign it as a register variable,
29 * nor can we clobber it. Instead we assign the r6 register and swap it
30 * with r7 before calling svc, and r6 is marked as clobbered.
31 * We're just using any regular register which we assign to r7 after saving
32 * it.
33 *
34 * Also, ARM supports the old_select syscall if newselect is not available
35 */
36#define __ARCH_WANT_SYS_OLD_SELECT
37
38#if (defined(__THUMBEB__) || defined(__THUMBEL__)) && \
39 !defined(NOLIBC_OMIT_FRAME_POINTER)
40/* swap r6,r7 needed in Thumb mode since we can't use nor clobber r7 */
41#define _NOLIBC_SYSCALL_REG "r6"
42#define _NOLIBC_THUMB_SET_R7 "eor r7, r6\neor r6, r7\neor r7, r6\n"
43#define _NOLIBC_THUMB_RESTORE_R7 "mov r7, r6\n"
44
45#else /* we're in ARM mode */
46/* in Arm mode we can directly use r7 */
47#define _NOLIBC_SYSCALL_REG "r7"
48#define _NOLIBC_THUMB_SET_R7 ""
49#define _NOLIBC_THUMB_RESTORE_R7 ""
50
51#endif /* end THUMB */
52
53#define my_syscall0(num) \
54({ \
55 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
56 register long _arg1 __asm__ ("r0"); \
57 \
58 __asm__ volatile ( \
59 _NOLIBC_THUMB_SET_R7 \
60 "svc #0\n" \
61 _NOLIBC_THUMB_RESTORE_R7 \
62 : "=r"(_arg1), "=r"(_num) \
63 : "r"(_arg1), \
64 "r"(_num) \
65 : "memory", "cc", "lr" \
66 ); \
67 _arg1; \
68})
69
70#define my_syscall1(num, arg1) \
71({ \
72 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
73 register long _arg1 __asm__ ("r0") = (long)(arg1); \
74 \
75 __asm__ volatile ( \
76 _NOLIBC_THUMB_SET_R7 \
77 "svc #0\n" \
78 _NOLIBC_THUMB_RESTORE_R7 \
79 : "=r"(_arg1), "=r" (_num) \
80 : "r"(_arg1), \
81 "r"(_num) \
82 : "memory", "cc", "lr" \
83 ); \
84 _arg1; \
85})
86
87#define my_syscall2(num, arg1, arg2) \
88({ \
89 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
90 register long _arg1 __asm__ ("r0") = (long)(arg1); \
91 register long _arg2 __asm__ ("r1") = (long)(arg2); \
92 \
93 __asm__ volatile ( \
94 _NOLIBC_THUMB_SET_R7 \
95 "svc #0\n" \
96 _NOLIBC_THUMB_RESTORE_R7 \
97 : "=r"(_arg1), "=r" (_num) \
98 : "r"(_arg1), "r"(_arg2), \
99 "r"(_num) \
100 : "memory", "cc", "lr" \
101 ); \
102 _arg1; \
103})
104
105#define my_syscall3(num, arg1, arg2, arg3) \
106({ \
107 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
108 register long _arg1 __asm__ ("r0") = (long)(arg1); \
109 register long _arg2 __asm__ ("r1") = (long)(arg2); \
110 register long _arg3 __asm__ ("r2") = (long)(arg3); \
111 \
112 __asm__ volatile ( \
113 _NOLIBC_THUMB_SET_R7 \
114 "svc #0\n" \
115 _NOLIBC_THUMB_RESTORE_R7 \
116 : "=r"(_arg1), "=r" (_num) \
117 : "r"(_arg1), "r"(_arg2), "r"(_arg3), \
118 "r"(_num) \
119 : "memory", "cc", "lr" \
120 ); \
121 _arg1; \
122})
123
124#define my_syscall4(num, arg1, arg2, arg3, arg4) \
125({ \
126 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
127 register long _arg1 __asm__ ("r0") = (long)(arg1); \
128 register long _arg2 __asm__ ("r1") = (long)(arg2); \
129 register long _arg3 __asm__ ("r2") = (long)(arg3); \
130 register long _arg4 __asm__ ("r3") = (long)(arg4); \
131 \
132 __asm__ volatile ( \
133 _NOLIBC_THUMB_SET_R7 \
134 "svc #0\n" \
135 _NOLIBC_THUMB_RESTORE_R7 \
136 : "=r"(_arg1), "=r" (_num) \
137 : "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), \
138 "r"(_num) \
139 : "memory", "cc", "lr" \
140 ); \
141 _arg1; \
142})
143
144#define my_syscall5(num, arg1, arg2, arg3, arg4, arg5) \
145({ \
146 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
147 register long _arg1 __asm__ ("r0") = (long)(arg1); \
148 register long _arg2 __asm__ ("r1") = (long)(arg2); \
149 register long _arg3 __asm__ ("r2") = (long)(arg3); \
150 register long _arg4 __asm__ ("r3") = (long)(arg4); \
151 register long _arg5 __asm__ ("r4") = (long)(arg5); \
152 \
153 __asm__ volatile ( \
154 _NOLIBC_THUMB_SET_R7 \
155 "svc #0\n" \
156 _NOLIBC_THUMB_RESTORE_R7 \
157 : "=r"(_arg1), "=r" (_num) \
158 : "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), \
159 "r"(_num) \
160 : "memory", "cc", "lr" \
161 ); \
162 _arg1; \
163})
164
165#define my_syscall6(num, arg1, arg2, arg3, arg4, arg5, arg6) \
166({ \
167 register long _num __asm__(_NOLIBC_SYSCALL_REG) = (num); \
168 register long _arg1 __asm__ ("r0") = (long)(arg1); \
169 register long _arg2 __asm__ ("r1") = (long)(arg2); \
170 register long _arg3 __asm__ ("r2") = (long)(arg3); \
171 register long _arg4 __asm__ ("r3") = (long)(arg4); \
172 register long _arg5 __asm__ ("r4") = (long)(arg5); \
173 register long _arg6 __asm__ ("r5") = (long)(arg6); \
174 \
175 __asm__ volatile ( \
176 _NOLIBC_THUMB_SET_R7 \
177 "svc #0\n" \
178 _NOLIBC_THUMB_RESTORE_R7 \
179 : "=r"(_arg1), "=r" (_num) \
180 : "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), \
181 "r"(_arg6), "r"(_num) \
182 : "memory", "cc", "lr" \
183 ); \
184 _arg1; \
185})
186
187/* startup code */
188void __attribute__((weak, noreturn, optimize("Os", "omit-frame-pointer"))) __no_stack_protector _start(void)
189{
190 __asm__ volatile (
191 "mov %r0, sp\n" /* save stack pointer to %r0, as arg1 of _start_c */
192 "and ip, %r0, #-8\n" /* sp must be 8-byte aligned in the callee */
193 "mov sp, ip\n"
194 "bl _start_c\n" /* transfer to c runtime */
195 );
196 __builtin_unreachable();
197}
198
199#endif /* _NOLIBC_ARCH_ARM_H */
1/* SPDX-License-Identifier: LGPL-2.1 OR MIT */
2/*
3 * ARM specific definitions for NOLIBC
4 * Copyright (C) 2017-2022 Willy Tarreau <w@1wt.eu>
5 */
6
7#ifndef _NOLIBC_ARCH_ARM_H
8#define _NOLIBC_ARCH_ARM_H
9
10/* O_* macros for fcntl/open are architecture-specific */
11#define O_RDONLY 0
12#define O_WRONLY 1
13#define O_RDWR 2
14#define O_CREAT 0x40
15#define O_EXCL 0x80
16#define O_NOCTTY 0x100
17#define O_TRUNC 0x200
18#define O_APPEND 0x400
19#define O_NONBLOCK 0x800
20#define O_DIRECTORY 0x4000
21
22/* The struct returned by the stat() syscall, 32-bit only, the syscall returns
23 * exactly 56 bytes (stops before the unused array). In big endian, the format
24 * differs as devices are returned as short only.
25 */
26struct sys_stat_struct {
27#if defined(__ARMEB__)
28 unsigned short st_dev;
29 unsigned short __pad1;
30#else
31 unsigned long st_dev;
32#endif
33 unsigned long st_ino;
34 unsigned short st_mode;
35 unsigned short st_nlink;
36 unsigned short st_uid;
37 unsigned short st_gid;
38
39#if defined(__ARMEB__)
40 unsigned short st_rdev;
41 unsigned short __pad2;
42#else
43 unsigned long st_rdev;
44#endif
45 unsigned long st_size;
46 unsigned long st_blksize;
47 unsigned long st_blocks;
48
49 unsigned long st_atime;
50 unsigned long st_atime_nsec;
51 unsigned long st_mtime;
52 unsigned long st_mtime_nsec;
53
54 unsigned long st_ctime;
55 unsigned long st_ctime_nsec;
56 unsigned long __unused[2];
57};
58
59/* Syscalls for ARM in ARM or Thumb modes :
60 * - registers are 32-bit
61 * - stack is 8-byte aligned
62 * ( http://infocenter.arm.com/help/index.jsp?topic=/com.arm.doc.faqs/ka4127.html)
63 * - syscall number is passed in r7
64 * - arguments are in r0, r1, r2, r3, r4, r5
65 * - the system call is performed by calling svc #0
66 * - syscall return comes in r0.
67 * - only lr is clobbered.
68 * - the arguments are cast to long and assigned into the target registers
69 * which are then simply passed as registers to the asm code, so that we
70 * don't have to experience issues with register constraints.
71 * - the syscall number is always specified last in order to allow to force
72 * some registers before (gcc refuses a %-register at the last position).
73 *
74 * Also, ARM supports the old_select syscall if newselect is not available
75 */
76#define __ARCH_WANT_SYS_OLD_SELECT
77
78#define my_syscall0(num) \
79({ \
80 register long _num __asm__ ("r7") = (num); \
81 register long _arg1 __asm__ ("r0"); \
82 \
83 __asm__ volatile ( \
84 "svc #0\n" \
85 : "=r"(_arg1) \
86 : "r"(_num) \
87 : "memory", "cc", "lr" \
88 ); \
89 _arg1; \
90})
91
92#define my_syscall1(num, arg1) \
93({ \
94 register long _num __asm__ ("r7") = (num); \
95 register long _arg1 __asm__ ("r0") = (long)(arg1); \
96 \
97 __asm__ volatile ( \
98 "svc #0\n" \
99 : "=r"(_arg1) \
100 : "r"(_arg1), \
101 "r"(_num) \
102 : "memory", "cc", "lr" \
103 ); \
104 _arg1; \
105})
106
107#define my_syscall2(num, arg1, arg2) \
108({ \
109 register long _num __asm__ ("r7") = (num); \
110 register long _arg1 __asm__ ("r0") = (long)(arg1); \
111 register long _arg2 __asm__ ("r1") = (long)(arg2); \
112 \
113 __asm__ volatile ( \
114 "svc #0\n" \
115 : "=r"(_arg1) \
116 : "r"(_arg1), "r"(_arg2), \
117 "r"(_num) \
118 : "memory", "cc", "lr" \
119 ); \
120 _arg1; \
121})
122
123#define my_syscall3(num, arg1, arg2, arg3) \
124({ \
125 register long _num __asm__ ("r7") = (num); \
126 register long _arg1 __asm__ ("r0") = (long)(arg1); \
127 register long _arg2 __asm__ ("r1") = (long)(arg2); \
128 register long _arg3 __asm__ ("r2") = (long)(arg3); \
129 \
130 __asm__ volatile ( \
131 "svc #0\n" \
132 : "=r"(_arg1) \
133 : "r"(_arg1), "r"(_arg2), "r"(_arg3), \
134 "r"(_num) \
135 : "memory", "cc", "lr" \
136 ); \
137 _arg1; \
138})
139
140#define my_syscall4(num, arg1, arg2, arg3, arg4) \
141({ \
142 register long _num __asm__ ("r7") = (num); \
143 register long _arg1 __asm__ ("r0") = (long)(arg1); \
144 register long _arg2 __asm__ ("r1") = (long)(arg2); \
145 register long _arg3 __asm__ ("r2") = (long)(arg3); \
146 register long _arg4 __asm__ ("r3") = (long)(arg4); \
147 \
148 __asm__ volatile ( \
149 "svc #0\n" \
150 : "=r"(_arg1) \
151 : "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), \
152 "r"(_num) \
153 : "memory", "cc", "lr" \
154 ); \
155 _arg1; \
156})
157
158#define my_syscall5(num, arg1, arg2, arg3, arg4, arg5) \
159({ \
160 register long _num __asm__ ("r7") = (num); \
161 register long _arg1 __asm__ ("r0") = (long)(arg1); \
162 register long _arg2 __asm__ ("r1") = (long)(arg2); \
163 register long _arg3 __asm__ ("r2") = (long)(arg3); \
164 register long _arg4 __asm__ ("r3") = (long)(arg4); \
165 register long _arg5 __asm__ ("r4") = (long)(arg5); \
166 \
167 __asm__ volatile ( \
168 "svc #0\n" \
169 : "=r" (_arg1) \
170 : "r"(_arg1), "r"(_arg2), "r"(_arg3), "r"(_arg4), "r"(_arg5), \
171 "r"(_num) \
172 : "memory", "cc", "lr" \
173 ); \
174 _arg1; \
175})
176
177/* startup code */
178__asm__ (".section .text\n"
179 ".weak _start\n"
180 "_start:\n"
181#if defined(__THUMBEB__) || defined(__THUMBEL__)
182 /* We enter here in 32-bit mode but if some previous functions were in
183 * 16-bit mode, the assembler cannot know, so we need to tell it we're in
184 * 32-bit now, then switch to 16-bit (is there a better way to do it than
185 * adding 1 by hand ?) and tell the asm we're now in 16-bit mode so that
186 * it generates correct instructions. Note that we do not support thumb1.
187 */
188 ".code 32\n"
189 "add r0, pc, #1\n"
190 "bx r0\n"
191 ".code 16\n"
192#endif
193 "pop {%r0}\n" // argc was in the stack
194 "mov %r1, %sp\n" // argv = sp
195 "add %r2, %r1, %r0, lsl #2\n" // envp = argv + 4*argc ...
196 "add %r2, %r2, $4\n" // ... + 4
197 "and %r3, %r1, $-8\n" // AAPCS : sp must be 8-byte aligned in the
198 "mov %sp, %r3\n" // callee, an bl doesn't push (lr=pc)
199 "bl main\n" // main() returns the status code, we'll exit with it.
200 "movs r7, $1\n" // NR_exit == 1
201 "svc $0x00\n"
202 "");
203
204#endif // _NOLIBC_ARCH_ARM_H