Loading...
1/*
2 * This program is free software; you can redistribute it and/or modify
3 * it under the terms of the GNU General Public License version 2 as
4 * published by the Free Software Foundation.
5 */
6
7#include <asm/assembler.h>
8#include <asm/ftrace.h>
9#include <asm/unwind.h>
10
11#include "entry-header.S"
12
13/*
14 * When compiling with -pg, gcc inserts a call to the mcount routine at the
15 * start of every function. In mcount, apart from the function's address (in
16 * lr), we need to get hold of the function's caller's address.
17 *
18 * Older GCCs (pre-4.4) inserted a call to a routine called mcount like this:
19 *
20 * bl mcount
21 *
22 * These versions have the limitation that in order for the mcount routine to
23 * be able to determine the function's caller's address, an APCS-style frame
24 * pointer (which is set up with something like the code below) is required.
25 *
26 * mov ip, sp
27 * push {fp, ip, lr, pc}
28 * sub fp, ip, #4
29 *
30 * With EABI, these frame pointers are not available unless -mapcs-frame is
31 * specified, and if building as Thumb-2, not even then.
32 *
33 * Newer GCCs (4.4+) solve this problem by introducing a new version of mcount,
34 * with call sites like:
35 *
36 * push {lr}
37 * bl __gnu_mcount_nc
38 *
39 * With these compilers, frame pointers are not necessary.
40 *
41 * mcount can be thought of as a function called in the middle of a subroutine
42 * call. As such, it needs to be transparent for both the caller and the
43 * callee: the original lr needs to be restored when leaving mcount, and no
44 * registers should be clobbered. (In the __gnu_mcount_nc implementation, we
45 * clobber the ip register. This is OK because the ARM calling convention
46 * allows it to be clobbered in subroutines and doesn't use it to hold
47 * parameters.)
48 *
49 * When using dynamic ftrace, we patch out the mcount call by a "mov r0, r0"
50 * for the mcount case, and a "pop {lr}" for the __gnu_mcount_nc case (see
51 * arch/arm/kernel/ftrace.c).
52 */
53
54#ifndef CONFIG_OLD_MCOUNT
55#if (__GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 4))
56#error Ftrace requires CONFIG_FRAME_POINTER=y with GCC older than 4.4.0.
57#endif
58#endif
59
60.macro mcount_adjust_addr rd, rn
61 bic \rd, \rn, #1 @ clear the Thumb bit if present
62 sub \rd, \rd, #MCOUNT_INSN_SIZE
63.endm
64
65.macro __mcount suffix
66 mcount_enter
67 ldr r0, =ftrace_trace_function
68 ldr r2, [r0]
69 adr r0, .Lftrace_stub
70 cmp r0, r2
71 bne 1f
72
73#ifdef CONFIG_FUNCTION_GRAPH_TRACER
74 ldr r1, =ftrace_graph_return
75 ldr r2, [r1]
76 cmp r0, r2
77 bne ftrace_graph_caller\suffix
78
79 ldr r1, =ftrace_graph_entry
80 ldr r2, [r1]
81 ldr r0, =ftrace_graph_entry_stub
82 cmp r0, r2
83 bne ftrace_graph_caller\suffix
84#endif
85
86 mcount_exit
87
881: mcount_get_lr r1 @ lr of instrumented func
89 mcount_adjust_addr r0, lr @ instrumented function
90 badr lr, 2f
91 mov pc, r2
922: mcount_exit
93.endm
94
95#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
96
97.macro __ftrace_regs_caller
98
99 sub sp, sp, #8 @ space for PC and CPSR OLD_R0,
100 @ OLD_R0 will overwrite previous LR
101
102 add ip, sp, #12 @ move in IP the value of SP as it was
103 @ before the push {lr} of the mcount mechanism
104
105 str lr, [sp, #0] @ store LR instead of PC
106
107 ldr lr, [sp, #8] @ get previous LR
108
109 str r0, [sp, #8] @ write r0 as OLD_R0 over previous LR
110
111 stmdb sp!, {ip, lr}
112 stmdb sp!, {r0-r11, lr}
113
114 @ stack content at this point:
115 @ 0 4 48 52 56 60 64 68 72
116 @ R0 | R1 | ... | LR | SP + 4 | previous LR | LR | PSR | OLD_R0 |
117
118 mov r3, sp @ struct pt_regs*
119
120 ldr r2, =function_trace_op
121 ldr r2, [r2] @ pointer to the current
122 @ function tracing op
123
124 ldr r1, [sp, #S_LR] @ lr of instrumented func
125
126 ldr lr, [sp, #S_PC] @ get LR
127
128 mcount_adjust_addr r0, lr @ instrumented function
129
130 .globl ftrace_regs_call
131ftrace_regs_call:
132 bl ftrace_stub
133
134#ifdef CONFIG_FUNCTION_GRAPH_TRACER
135 .globl ftrace_graph_regs_call
136ftrace_graph_regs_call:
137 mov r0, r0
138#endif
139
140 @ pop saved regs
141 ldmia sp!, {r0-r12} @ restore r0 through r12
142 ldr ip, [sp, #8] @ restore PC
143 ldr lr, [sp, #4] @ restore LR
144 ldr sp, [sp, #0] @ restore SP
145 mov pc, ip @ return
146.endm
147
148#ifdef CONFIG_FUNCTION_GRAPH_TRACER
149.macro __ftrace_graph_regs_caller
150
151 sub r0, fp, #4 @ lr of instrumented routine (parent)
152
153 @ called from __ftrace_regs_caller
154 ldr r1, [sp, #S_PC] @ instrumented routine (func)
155 mcount_adjust_addr r1, r1
156
157 mov r2, fp @ frame pointer
158 bl prepare_ftrace_return
159
160 @ pop registers saved in ftrace_regs_caller
161 ldmia sp!, {r0-r12} @ restore r0 through r12
162 ldr ip, [sp, #8] @ restore PC
163 ldr lr, [sp, #4] @ restore LR
164 ldr sp, [sp, #0] @ restore SP
165 mov pc, ip @ return
166
167.endm
168#endif
169#endif
170
171.macro __ftrace_caller suffix
172 mcount_enter
173
174 mcount_get_lr r1 @ lr of instrumented func
175 mcount_adjust_addr r0, lr @ instrumented function
176
177#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
178 ldr r2, =function_trace_op
179 ldr r2, [r2] @ pointer to the current
180 @ function tracing op
181 mov r3, #0 @ regs is NULL
182#endif
183
184 .globl ftrace_call\suffix
185ftrace_call\suffix:
186 bl ftrace_stub
187
188#ifdef CONFIG_FUNCTION_GRAPH_TRACER
189 .globl ftrace_graph_call\suffix
190ftrace_graph_call\suffix:
191 mov r0, r0
192#endif
193
194 mcount_exit
195.endm
196
197.macro __ftrace_graph_caller
198 sub r0, fp, #4 @ &lr of instrumented routine (&parent)
199#ifdef CONFIG_DYNAMIC_FTRACE
200 @ called from __ftrace_caller, saved in mcount_enter
201 ldr r1, [sp, #16] @ instrumented routine (func)
202 mcount_adjust_addr r1, r1
203#else
204 @ called from __mcount, untouched in lr
205 mcount_adjust_addr r1, lr @ instrumented routine (func)
206#endif
207 mov r2, fp @ frame pointer
208 bl prepare_ftrace_return
209 mcount_exit
210.endm
211
212#ifdef CONFIG_OLD_MCOUNT
213/*
214 * mcount
215 */
216
217.macro mcount_enter
218 stmdb sp!, {r0-r3, lr}
219.endm
220
221.macro mcount_get_lr reg
222 ldr \reg, [fp, #-4]
223.endm
224
225.macro mcount_exit
226 ldr lr, [fp, #-4]
227 ldmia sp!, {r0-r3, pc}
228.endm
229
230ENTRY(mcount)
231#ifdef CONFIG_DYNAMIC_FTRACE
232 stmdb sp!, {lr}
233 ldr lr, [fp, #-4]
234 ldmia sp!, {pc}
235#else
236 __mcount _old
237#endif
238ENDPROC(mcount)
239
240#ifdef CONFIG_DYNAMIC_FTRACE
241ENTRY(ftrace_caller_old)
242 __ftrace_caller _old
243ENDPROC(ftrace_caller_old)
244#endif
245
246#ifdef CONFIG_FUNCTION_GRAPH_TRACER
247ENTRY(ftrace_graph_caller_old)
248 __ftrace_graph_caller
249ENDPROC(ftrace_graph_caller_old)
250#endif
251
252.purgem mcount_enter
253.purgem mcount_get_lr
254.purgem mcount_exit
255#endif
256
257/*
258 * __gnu_mcount_nc
259 */
260
261.macro mcount_enter
262/*
263 * This pad compensates for the push {lr} at the call site. Note that we are
264 * unable to unwind through a function which does not otherwise save its lr.
265 */
266 UNWIND(.pad #4)
267 stmdb sp!, {r0-r3, lr}
268 UNWIND(.save {r0-r3, lr})
269.endm
270
271.macro mcount_get_lr reg
272 ldr \reg, [sp, #20]
273.endm
274
275.macro mcount_exit
276 ldmia sp!, {r0-r3, ip, lr}
277 ret ip
278.endm
279
280ENTRY(__gnu_mcount_nc)
281UNWIND(.fnstart)
282#ifdef CONFIG_DYNAMIC_FTRACE
283 mov ip, lr
284 ldmia sp!, {lr}
285 ret ip
286#else
287 __mcount
288#endif
289UNWIND(.fnend)
290ENDPROC(__gnu_mcount_nc)
291
292#ifdef CONFIG_DYNAMIC_FTRACE
293ENTRY(ftrace_caller)
294UNWIND(.fnstart)
295 __ftrace_caller
296UNWIND(.fnend)
297ENDPROC(ftrace_caller)
298
299#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
300ENTRY(ftrace_regs_caller)
301UNWIND(.fnstart)
302 __ftrace_regs_caller
303UNWIND(.fnend)
304ENDPROC(ftrace_regs_caller)
305#endif
306
307#endif
308
309#ifdef CONFIG_FUNCTION_GRAPH_TRACER
310ENTRY(ftrace_graph_caller)
311UNWIND(.fnstart)
312 __ftrace_graph_caller
313UNWIND(.fnend)
314ENDPROC(ftrace_graph_caller)
315
316#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
317ENTRY(ftrace_graph_regs_caller)
318UNWIND(.fnstart)
319 __ftrace_graph_regs_caller
320UNWIND(.fnend)
321ENDPROC(ftrace_graph_regs_caller)
322#endif
323#endif
324
325.purgem mcount_enter
326.purgem mcount_get_lr
327.purgem mcount_exit
328
329#ifdef CONFIG_FUNCTION_GRAPH_TRACER
330 .globl return_to_handler
331return_to_handler:
332 stmdb sp!, {r0-r3}
333 mov r0, fp @ frame pointer
334 bl ftrace_return_to_handler
335 mov lr, r0 @ r0 has real ret addr
336 ldmia sp!, {r0-r3}
337 ret lr
338#endif
339
340ENTRY(ftrace_stub)
341.Lftrace_stub:
342 ret lr
343ENDPROC(ftrace_stub)
1/* SPDX-License-Identifier: GPL-2.0-only */
2
3#include <asm/assembler.h>
4#include <asm/ftrace.h>
5#include <asm/unwind.h>
6
7#include "entry-header.S"
8
9/*
10 * When compiling with -pg, gcc inserts a call to the mcount routine at the
11 * start of every function. In mcount, apart from the function's address (in
12 * lr), we need to get hold of the function's caller's address.
13 *
14 * Newer GCCs (4.4+) solve this problem by using a version of mcount with call
15 * sites like:
16 *
17 * push {lr}
18 * bl __gnu_mcount_nc
19 *
20 * With these compilers, frame pointers are not necessary.
21 *
22 * mcount can be thought of as a function called in the middle of a subroutine
23 * call. As such, it needs to be transparent for both the caller and the
24 * callee: the original lr needs to be restored when leaving mcount, and no
25 * registers should be clobbered.
26 *
27 * When using dynamic ftrace, we patch out the mcount call by a "add sp, #4"
28 * instead of the __gnu_mcount_nc call (see arch/arm/kernel/ftrace.c).
29 */
30
31.macro mcount_adjust_addr rd, rn
32 bic \rd, \rn, #1 @ clear the Thumb bit if present
33 sub \rd, \rd, #MCOUNT_INSN_SIZE
34.endm
35
36.macro __mcount suffix
37 mcount_enter
38 ldr_va r2, ftrace_trace_function
39 badr r0, .Lftrace_stub
40 cmp r0, r2
41 bne 1f
42
43#ifdef CONFIG_FUNCTION_GRAPH_TRACER
44 ldr_va r2, ftrace_graph_return
45 cmp r0, r2
46 bne ftrace_graph_caller\suffix
47
48 ldr_va r2, ftrace_graph_entry
49 mov_l r0, ftrace_graph_entry_stub
50 cmp r0, r2
51 bne ftrace_graph_caller\suffix
52#endif
53
54 mcount_exit
55
561: mcount_get_lr r1 @ lr of instrumented func
57 mcount_adjust_addr r0, lr @ instrumented function
58 badr lr, 2f
59 mov pc, r2
602: mcount_exit
61.endm
62
63#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
64
65.macro __ftrace_regs_caller
66
67 str lr, [sp, #-8]! @ store LR as PC and make space for CPSR/OLD_R0,
68 @ OLD_R0 will overwrite previous LR
69
70 ldr lr, [sp, #8] @ get previous LR
71
72 str r0, [sp, #8] @ write r0 as OLD_R0 over previous LR
73
74 str lr, [sp, #-4]! @ store previous LR as LR
75
76 add lr, sp, #16 @ move in LR the value of SP as it was
77 @ before the push {lr} of the mcount mechanism
78
79 push {r0-r11, ip, lr}
80
81 @ stack content at this point:
82 @ 0 4 48 52 56 60 64 68 72
83 @ R0 | R1 | ... | IP | SP + 4 | previous LR | LR | PSR | OLD_R0 |
84
85 mov r3, sp @ struct pt_regs*
86
87 ldr_va r2, function_trace_op @ pointer to the current
88 @ function tracing op
89
90 ldr r1, [sp, #S_LR] @ lr of instrumented func
91
92 ldr lr, [sp, #S_PC] @ get LR
93
94 mcount_adjust_addr r0, lr @ instrumented function
95
96 .globl ftrace_regs_call
97ftrace_regs_call:
98 bl ftrace_stub
99
100#ifdef CONFIG_FUNCTION_GRAPH_TRACER
101 .globl ftrace_graph_regs_call
102ftrace_graph_regs_call:
103ARM( mov r0, r0 )
104THUMB( nop.w )
105#endif
106
107 @ pop saved regs
108 pop {r0-r11, ip, lr} @ restore r0 through r12
109 ldr lr, [sp], #4 @ restore LR
110 ldr pc, [sp], #12
111.endm
112
113#ifdef CONFIG_FUNCTION_GRAPH_TRACER
114.macro __ftrace_graph_regs_caller
115
116#ifdef CONFIG_UNWINDER_FRAME_POINTER
117 sub r0, fp, #4 @ lr of instrumented routine (parent)
118#else
119 add r0, sp, #S_LR
120#endif
121
122 @ called from __ftrace_regs_caller
123 ldr r1, [sp, #S_PC] @ instrumented routine (func)
124 mcount_adjust_addr r1, r1
125
126 mov r2, fpreg @ frame pointer
127 add r3, sp, #PT_REGS_SIZE
128 bl prepare_ftrace_return
129
130 @ pop registers saved in ftrace_regs_caller
131 pop {r0-r11, ip, lr} @ restore r0 through r12
132 ldr lr, [sp], #4 @ restore LR
133 ldr pc, [sp], #12
134
135.endm
136#endif
137#endif
138
139.macro __ftrace_caller suffix
140 mcount_enter
141
142 mcount_get_lr r1 @ lr of instrumented func
143 mcount_adjust_addr r0, lr @ instrumented function
144
145#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
146 ldr_va r2, function_trace_op @ pointer to the current
147 @ function tracing op
148 mov r3, #0 @ regs is NULL
149#endif
150
151 .globl ftrace_call\suffix
152ftrace_call\suffix:
153 bl ftrace_stub
154
155#ifdef CONFIG_FUNCTION_GRAPH_TRACER
156 .globl ftrace_graph_call\suffix
157ftrace_graph_call\suffix:
158ARM( mov r0, r0 )
159THUMB( nop.w )
160#endif
161
162 mcount_exit
163.endm
164
165.macro __ftrace_graph_caller
166#ifdef CONFIG_UNWINDER_FRAME_POINTER
167 sub r0, fp, #4 @ &lr of instrumented routine (&parent)
168#else
169 add r0, sp, #20
170#endif
171#ifdef CONFIG_DYNAMIC_FTRACE
172 @ called from __ftrace_caller, saved in mcount_enter
173 ldr r1, [sp, #16] @ instrumented routine (func)
174 mcount_adjust_addr r1, r1
175#else
176 @ called from __mcount, untouched in lr
177 mcount_adjust_addr r1, lr @ instrumented routine (func)
178#endif
179 mov r2, fpreg @ frame pointer
180 add r3, sp, #24
181 bl prepare_ftrace_return
182 mcount_exit
183.endm
184
185/*
186 * __gnu_mcount_nc
187 */
188
189.macro mcount_enter
190/*
191 * This pad compensates for the push {lr} at the call site. Note that we are
192 * unable to unwind through a function which does not otherwise save its lr.
193 */
194 UNWIND(.pad #4)
195 stmdb sp!, {r0-r3, lr}
196 UNWIND(.save {r0-r3, lr})
197.endm
198
199.macro mcount_get_lr reg
200 ldr \reg, [sp, #20]
201.endm
202
203.macro mcount_exit
204 ldmia sp!, {r0-r3}
205 ldr lr, [sp, #4]
206 ldr pc, [sp], #8
207.endm
208
209ENTRY(__gnu_mcount_nc)
210UNWIND(.fnstart)
211#ifdef CONFIG_DYNAMIC_FTRACE
212 push {lr}
213 ldr lr, [sp, #4]
214 ldr pc, [sp], #8
215#else
216 __mcount
217#endif
218UNWIND(.fnend)
219ENDPROC(__gnu_mcount_nc)
220
221#ifdef CONFIG_DYNAMIC_FTRACE
222ENTRY(ftrace_caller)
223UNWIND(.fnstart)
224 __ftrace_caller
225UNWIND(.fnend)
226ENDPROC(ftrace_caller)
227
228#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
229ENTRY(ftrace_regs_caller)
230UNWIND(.fnstart)
231 __ftrace_regs_caller
232UNWIND(.fnend)
233ENDPROC(ftrace_regs_caller)
234#endif
235
236#endif
237
238#ifdef CONFIG_FUNCTION_GRAPH_TRACER
239ENTRY(ftrace_graph_caller)
240UNWIND(.fnstart)
241 __ftrace_graph_caller
242UNWIND(.fnend)
243ENDPROC(ftrace_graph_caller)
244
245#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
246ENTRY(ftrace_graph_regs_caller)
247UNWIND(.fnstart)
248 __ftrace_graph_regs_caller
249UNWIND(.fnend)
250ENDPROC(ftrace_graph_regs_caller)
251#endif
252#endif
253
254.purgem mcount_enter
255.purgem mcount_get_lr
256.purgem mcount_exit
257
258#ifdef CONFIG_FUNCTION_GRAPH_TRACER
259ENTRY(return_to_handler)
260 stmdb sp!, {r0-r3}
261 add r0, sp, #16 @ sp at exit of instrumented routine
262 bl ftrace_return_to_handler
263 mov lr, r0 @ r0 has real ret addr
264 ldmia sp!, {r0-r3}
265 ret lr
266ENDPROC(return_to_handler)
267#endif
268
269ENTRY(ftrace_stub)
270.Lftrace_stub:
271 ret lr
272ENDPROC(ftrace_stub)
273
274#ifdef CONFIG_DYNAMIC_FTRACE
275
276 __INIT
277
278 .macro init_tramp, dst:req
279ENTRY(\dst\()_from_init)
280 ldr pc, =\dst
281ENDPROC(\dst\()_from_init)
282 .endm
283
284 init_tramp ftrace_caller
285#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
286 init_tramp ftrace_regs_caller
287#endif
288#endif