Linux Audio

Check our new training course

Loading...
v4.17
  1/*
  2 * This program is free software; you can redistribute it and/or modify
  3 * it under the terms of the GNU General Public License version 2 as
  4 * published by the Free Software Foundation.
  5 */
  6
  7#include <asm/assembler.h>
  8#include <asm/ftrace.h>
  9#include <asm/unwind.h>
 10
 11#include "entry-header.S"
 12
 13/*
 14 * When compiling with -pg, gcc inserts a call to the mcount routine at the
 15 * start of every function.  In mcount, apart from the function's address (in
 16 * lr), we need to get hold of the function's caller's address.
 17 *
 18 * Older GCCs (pre-4.4) inserted a call to a routine called mcount like this:
 19 *
 20 *	bl	mcount
 21 *
 22 * These versions have the limitation that in order for the mcount routine to
 23 * be able to determine the function's caller's address, an APCS-style frame
 24 * pointer (which is set up with something like the code below) is required.
 25 *
 26 *	mov     ip, sp
 27 *	push    {fp, ip, lr, pc}
 28 *	sub     fp, ip, #4
 29 *
 30 * With EABI, these frame pointers are not available unless -mapcs-frame is
 31 * specified, and if building as Thumb-2, not even then.
 32 *
 33 * Newer GCCs (4.4+) solve this problem by introducing a new version of mcount,
 34 * with call sites like:
 35 *
 36 *	push	{lr}
 37 *	bl	__gnu_mcount_nc
 38 *
 39 * With these compilers, frame pointers are not necessary.
 40 *
 41 * mcount can be thought of as a function called in the middle of a subroutine
 42 * call.  As such, it needs to be transparent for both the caller and the
 43 * callee: the original lr needs to be restored when leaving mcount, and no
 44 * registers should be clobbered.  (In the __gnu_mcount_nc implementation, we
 45 * clobber the ip register.  This is OK because the ARM calling convention
 46 * allows it to be clobbered in subroutines and doesn't use it to hold
 47 * parameters.)
 48 *
 49 * When using dynamic ftrace, we patch out the mcount call by a "mov r0, r0"
 50 * for the mcount case, and a "pop {lr}" for the __gnu_mcount_nc case (see
 51 * arch/arm/kernel/ftrace.c).
 52 */
 53
 54#ifndef CONFIG_OLD_MCOUNT
 55#if (__GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 4))
 56#error Ftrace requires CONFIG_FRAME_POINTER=y with GCC older than 4.4.0.
 57#endif
 58#endif
 59
 60.macro mcount_adjust_addr rd, rn
 61	bic	\rd, \rn, #1		@ clear the Thumb bit if present
 62	sub	\rd, \rd, #MCOUNT_INSN_SIZE
 63.endm
 64
 65.macro __mcount suffix
 66	mcount_enter
 67	ldr	r0, =ftrace_trace_function
 68	ldr	r2, [r0]
 69	adr	r0, .Lftrace_stub
 70	cmp	r0, r2
 71	bne	1f
 72
 73#ifdef CONFIG_FUNCTION_GRAPH_TRACER
 74	ldr     r1, =ftrace_graph_return
 75	ldr     r2, [r1]
 76	cmp     r0, r2
 77	bne     ftrace_graph_caller\suffix
 78
 79	ldr     r1, =ftrace_graph_entry
 80	ldr     r2, [r1]
 81	ldr     r0, =ftrace_graph_entry_stub
 82	cmp     r0, r2
 83	bne     ftrace_graph_caller\suffix
 84#endif
 85
 86	mcount_exit
 87
 881: 	mcount_get_lr	r1			@ lr of instrumented func
 89	mcount_adjust_addr	r0, lr		@ instrumented function
 90	badr	lr, 2f
 91	mov	pc, r2
 922:	mcount_exit
 93.endm
 94
 95#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
 96
 97.macro __ftrace_regs_caller
 98
 99	sub	sp, sp, #8	@ space for PC and CPSR OLD_R0,
100				@ OLD_R0 will overwrite previous LR
101
102	add 	ip, sp, #12	@ move in IP the value of SP as it was
103				@ before the push {lr} of the mcount mechanism
104
105	str     lr, [sp, #0]    @ store LR instead of PC
106
107	ldr     lr, [sp, #8]    @ get previous LR
108
109	str	r0, [sp, #8]	@ write r0 as OLD_R0 over previous LR
110
111	stmdb   sp!, {ip, lr}
112	stmdb   sp!, {r0-r11, lr}
113
114	@ stack content at this point:
115	@ 0  4          48   52       56            60   64    68       72
116	@ R0 | R1 | ... | LR | SP + 4 | previous LR | LR | PSR | OLD_R0 |
117
118	mov r3, sp				@ struct pt_regs*
119
120	ldr r2, =function_trace_op
121	ldr r2, [r2]				@ pointer to the current
122						@ function tracing op
123
124	ldr	r1, [sp, #S_LR]			@ lr of instrumented func
125
126	ldr	lr, [sp, #S_PC]			@ get LR
127
128	mcount_adjust_addr	r0, lr		@ instrumented function
129
130	.globl ftrace_regs_call
131ftrace_regs_call:
132	bl	ftrace_stub
133
134#ifdef CONFIG_FUNCTION_GRAPH_TRACER
135	.globl ftrace_graph_regs_call
136ftrace_graph_regs_call:
137	mov	r0, r0
138#endif
139
140	@ pop saved regs
141	ldmia   sp!, {r0-r12}			@ restore r0 through r12
142	ldr	ip, [sp, #8]			@ restore PC
143	ldr	lr, [sp, #4]			@ restore LR
144	ldr	sp, [sp, #0]			@ restore SP
145	mov	pc, ip				@ return
146.endm
147
148#ifdef CONFIG_FUNCTION_GRAPH_TRACER
149.macro __ftrace_graph_regs_caller
150
151	sub     r0, fp, #4              @ lr of instrumented routine (parent)
152
153	@ called from __ftrace_regs_caller
154	ldr     r1, [sp, #S_PC]		@ instrumented routine (func)
155	mcount_adjust_addr	r1, r1
156
157	mov	r2, fp			@ frame pointer
158	bl	prepare_ftrace_return
159
160	@ pop registers saved in ftrace_regs_caller
161	ldmia   sp!, {r0-r12}			@ restore r0 through r12
162	ldr	ip, [sp, #8]			@ restore PC
163	ldr	lr, [sp, #4]			@ restore LR
164	ldr	sp, [sp, #0]			@ restore SP
165	mov	pc, ip				@ return
166
167.endm
168#endif
169#endif
170
171.macro __ftrace_caller suffix
172	mcount_enter
173
174	mcount_get_lr	r1			@ lr of instrumented func
175	mcount_adjust_addr	r0, lr		@ instrumented function
176
177#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
178	ldr r2, =function_trace_op
179	ldr r2, [r2]				@ pointer to the current
180						@ function tracing op
181	mov r3, #0				@ regs is NULL
182#endif
183
184	.globl ftrace_call\suffix
185ftrace_call\suffix:
186	bl	ftrace_stub
187
188#ifdef CONFIG_FUNCTION_GRAPH_TRACER
189	.globl ftrace_graph_call\suffix
190ftrace_graph_call\suffix:
191	mov	r0, r0
192#endif
193
194	mcount_exit
195.endm
196
197.macro __ftrace_graph_caller
198	sub	r0, fp, #4		@ &lr of instrumented routine (&parent)
199#ifdef CONFIG_DYNAMIC_FTRACE
200	@ called from __ftrace_caller, saved in mcount_enter
201	ldr	r1, [sp, #16]		@ instrumented routine (func)
202	mcount_adjust_addr	r1, r1
203#else
204	@ called from __mcount, untouched in lr
205	mcount_adjust_addr	r1, lr	@ instrumented routine (func)
206#endif
207	mov	r2, fp			@ frame pointer
208	bl	prepare_ftrace_return
209	mcount_exit
210.endm
211
212#ifdef CONFIG_OLD_MCOUNT
213/*
214 * mcount
215 */
216
217.macro mcount_enter
218	stmdb	sp!, {r0-r3, lr}
219.endm
220
221.macro mcount_get_lr reg
222	ldr	\reg, [fp, #-4]
223.endm
224
225.macro mcount_exit
226	ldr	lr, [fp, #-4]
227	ldmia	sp!, {r0-r3, pc}
228.endm
229
230ENTRY(mcount)
231#ifdef CONFIG_DYNAMIC_FTRACE
232	stmdb	sp!, {lr}
233	ldr	lr, [fp, #-4]
234	ldmia	sp!, {pc}
235#else
236	__mcount _old
237#endif
238ENDPROC(mcount)
239
240#ifdef CONFIG_DYNAMIC_FTRACE
241ENTRY(ftrace_caller_old)
242	__ftrace_caller _old
243ENDPROC(ftrace_caller_old)
244#endif
245
246#ifdef CONFIG_FUNCTION_GRAPH_TRACER
247ENTRY(ftrace_graph_caller_old)
248	__ftrace_graph_caller
249ENDPROC(ftrace_graph_caller_old)
250#endif
251
252.purgem mcount_enter
253.purgem mcount_get_lr
254.purgem mcount_exit
255#endif
256
257/*
258 * __gnu_mcount_nc
259 */
260
261.macro mcount_enter
262/*
263 * This pad compensates for the push {lr} at the call site.  Note that we are
264 * unable to unwind through a function which does not otherwise save its lr.
265 */
266 UNWIND(.pad	#4)
267	stmdb	sp!, {r0-r3, lr}
268 UNWIND(.save	{r0-r3, lr})
269.endm
270
271.macro mcount_get_lr reg
272	ldr	\reg, [sp, #20]
273.endm
274
275.macro mcount_exit
276	ldmia	sp!, {r0-r3, ip, lr}
277	ret	ip
278.endm
279
280ENTRY(__gnu_mcount_nc)
281UNWIND(.fnstart)
282#ifdef CONFIG_DYNAMIC_FTRACE
283	mov	ip, lr
284	ldmia	sp!, {lr}
285	ret	ip
286#else
287	__mcount
288#endif
289UNWIND(.fnend)
290ENDPROC(__gnu_mcount_nc)
291
292#ifdef CONFIG_DYNAMIC_FTRACE
293ENTRY(ftrace_caller)
294UNWIND(.fnstart)
295	__ftrace_caller
296UNWIND(.fnend)
297ENDPROC(ftrace_caller)
298
299#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
300ENTRY(ftrace_regs_caller)
301UNWIND(.fnstart)
302	__ftrace_regs_caller
303UNWIND(.fnend)
304ENDPROC(ftrace_regs_caller)
305#endif
306
307#endif
308
309#ifdef CONFIG_FUNCTION_GRAPH_TRACER
310ENTRY(ftrace_graph_caller)
311UNWIND(.fnstart)
312	__ftrace_graph_caller
313UNWIND(.fnend)
314ENDPROC(ftrace_graph_caller)
315
316#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
317ENTRY(ftrace_graph_regs_caller)
318UNWIND(.fnstart)
319	__ftrace_graph_regs_caller
320UNWIND(.fnend)
321ENDPROC(ftrace_graph_regs_caller)
322#endif
323#endif
324
325.purgem mcount_enter
326.purgem mcount_get_lr
327.purgem mcount_exit
328
329#ifdef CONFIG_FUNCTION_GRAPH_TRACER
330	.globl return_to_handler
331return_to_handler:
332	stmdb	sp!, {r0-r3}
333	mov	r0, fp			@ frame pointer
334	bl	ftrace_return_to_handler
335	mov	lr, r0			@ r0 has real ret addr
336	ldmia	sp!, {r0-r3}
337	ret	lr
338#endif
339
340ENTRY(ftrace_stub)
341.Lftrace_stub:
342	ret	lr
343ENDPROC(ftrace_stub)
v5.4
  1/* SPDX-License-Identifier: GPL-2.0-only */
 
 
 
 
  2
  3#include <asm/assembler.h>
  4#include <asm/ftrace.h>
  5#include <asm/unwind.h>
  6
  7#include "entry-header.S"
  8
  9/*
 10 * When compiling with -pg, gcc inserts a call to the mcount routine at the
 11 * start of every function.  In mcount, apart from the function's address (in
 12 * lr), we need to get hold of the function's caller's address.
 13 *
 14 * Newer GCCs (4.4+) solve this problem by using a version of mcount with call
 15 * sites like:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 16 *
 17 *	push	{lr}
 18 *	bl	__gnu_mcount_nc
 19 *
 20 * With these compilers, frame pointers are not necessary.
 21 *
 22 * mcount can be thought of as a function called in the middle of a subroutine
 23 * call.  As such, it needs to be transparent for both the caller and the
 24 * callee: the original lr needs to be restored when leaving mcount, and no
 25 * registers should be clobbered.  (In the __gnu_mcount_nc implementation, we
 26 * clobber the ip register.  This is OK because the ARM calling convention
 27 * allows it to be clobbered in subroutines and doesn't use it to hold
 28 * parameters.)
 29 *
 30 * When using dynamic ftrace, we patch out the mcount call by a "pop {lr}"
 31 * instead of the __gnu_mcount_nc call (see arch/arm/kernel/ftrace.c).
 
 32 */
 33
 
 
 
 
 
 
 34.macro mcount_adjust_addr rd, rn
 35	bic	\rd, \rn, #1		@ clear the Thumb bit if present
 36	sub	\rd, \rd, #MCOUNT_INSN_SIZE
 37.endm
 38
 39.macro __mcount suffix
 40	mcount_enter
 41	ldr	r0, =ftrace_trace_function
 42	ldr	r2, [r0]
 43	adr	r0, .Lftrace_stub
 44	cmp	r0, r2
 45	bne	1f
 46
 47#ifdef CONFIG_FUNCTION_GRAPH_TRACER
 48	ldr     r1, =ftrace_graph_return
 49	ldr     r2, [r1]
 50	cmp     r0, r2
 51	bne     ftrace_graph_caller\suffix
 52
 53	ldr     r1, =ftrace_graph_entry
 54	ldr     r2, [r1]
 55	ldr     r0, =ftrace_graph_entry_stub
 56	cmp     r0, r2
 57	bne     ftrace_graph_caller\suffix
 58#endif
 59
 60	mcount_exit
 61
 621: 	mcount_get_lr	r1			@ lr of instrumented func
 63	mcount_adjust_addr	r0, lr		@ instrumented function
 64	badr	lr, 2f
 65	mov	pc, r2
 662:	mcount_exit
 67.endm
 68
 69#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
 70
 71.macro __ftrace_regs_caller
 72
 73	sub	sp, sp, #8	@ space for PC and CPSR OLD_R0,
 74				@ OLD_R0 will overwrite previous LR
 75
 76	add 	ip, sp, #12	@ move in IP the value of SP as it was
 77				@ before the push {lr} of the mcount mechanism
 78
 79	str     lr, [sp, #0]    @ store LR instead of PC
 80
 81	ldr     lr, [sp, #8]    @ get previous LR
 82
 83	str	r0, [sp, #8]	@ write r0 as OLD_R0 over previous LR
 84
 85	stmdb   sp!, {ip, lr}
 86	stmdb   sp!, {r0-r11, lr}
 87
 88	@ stack content at this point:
 89	@ 0  4          48   52       56            60   64    68       72
 90	@ R0 | R1 | ... | LR | SP + 4 | previous LR | LR | PSR | OLD_R0 |
 91
 92	mov r3, sp				@ struct pt_regs*
 93
 94	ldr r2, =function_trace_op
 95	ldr r2, [r2]				@ pointer to the current
 96						@ function tracing op
 97
 98	ldr	r1, [sp, #S_LR]			@ lr of instrumented func
 99
100	ldr	lr, [sp, #S_PC]			@ get LR
101
102	mcount_adjust_addr	r0, lr		@ instrumented function
103
104	.globl ftrace_regs_call
105ftrace_regs_call:
106	bl	ftrace_stub
107
108#ifdef CONFIG_FUNCTION_GRAPH_TRACER
109	.globl ftrace_graph_regs_call
110ftrace_graph_regs_call:
111	mov	r0, r0
112#endif
113
114	@ pop saved regs
115	ldmia   sp!, {r0-r12}			@ restore r0 through r12
116	ldr	ip, [sp, #8]			@ restore PC
117	ldr	lr, [sp, #4]			@ restore LR
118	ldr	sp, [sp, #0]			@ restore SP
119	mov	pc, ip				@ return
120.endm
121
122#ifdef CONFIG_FUNCTION_GRAPH_TRACER
123.macro __ftrace_graph_regs_caller
124
125	sub     r0, fp, #4              @ lr of instrumented routine (parent)
126
127	@ called from __ftrace_regs_caller
128	ldr     r1, [sp, #S_PC]		@ instrumented routine (func)
129	mcount_adjust_addr	r1, r1
130
131	mov	r2, fp			@ frame pointer
132	bl	prepare_ftrace_return
133
134	@ pop registers saved in ftrace_regs_caller
135	ldmia   sp!, {r0-r12}			@ restore r0 through r12
136	ldr	ip, [sp, #8]			@ restore PC
137	ldr	lr, [sp, #4]			@ restore LR
138	ldr	sp, [sp, #0]			@ restore SP
139	mov	pc, ip				@ return
140
141.endm
142#endif
143#endif
144
145.macro __ftrace_caller suffix
146	mcount_enter
147
148	mcount_get_lr	r1			@ lr of instrumented func
149	mcount_adjust_addr	r0, lr		@ instrumented function
150
151#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
152	ldr r2, =function_trace_op
153	ldr r2, [r2]				@ pointer to the current
154						@ function tracing op
155	mov r3, #0				@ regs is NULL
156#endif
157
158	.globl ftrace_call\suffix
159ftrace_call\suffix:
160	bl	ftrace_stub
161
162#ifdef CONFIG_FUNCTION_GRAPH_TRACER
163	.globl ftrace_graph_call\suffix
164ftrace_graph_call\suffix:
165	mov	r0, r0
166#endif
167
168	mcount_exit
169.endm
170
171.macro __ftrace_graph_caller
172	sub	r0, fp, #4		@ &lr of instrumented routine (&parent)
173#ifdef CONFIG_DYNAMIC_FTRACE
174	@ called from __ftrace_caller, saved in mcount_enter
175	ldr	r1, [sp, #16]		@ instrumented routine (func)
176	mcount_adjust_addr	r1, r1
177#else
178	@ called from __mcount, untouched in lr
179	mcount_adjust_addr	r1, lr	@ instrumented routine (func)
180#endif
181	mov	r2, fp			@ frame pointer
182	bl	prepare_ftrace_return
183	mcount_exit
184.endm
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
185
186/*
187 * __gnu_mcount_nc
188 */
189
190.macro mcount_enter
191/*
192 * This pad compensates for the push {lr} at the call site.  Note that we are
193 * unable to unwind through a function which does not otherwise save its lr.
194 */
195 UNWIND(.pad	#4)
196	stmdb	sp!, {r0-r3, lr}
197 UNWIND(.save	{r0-r3, lr})
198.endm
199
200.macro mcount_get_lr reg
201	ldr	\reg, [sp, #20]
202.endm
203
204.macro mcount_exit
205	ldmia	sp!, {r0-r3, ip, lr}
206	ret	ip
207.endm
208
209ENTRY(__gnu_mcount_nc)
210UNWIND(.fnstart)
211#ifdef CONFIG_DYNAMIC_FTRACE
212	mov	ip, lr
213	ldmia	sp!, {lr}
214	ret	ip
215#else
216	__mcount
217#endif
218UNWIND(.fnend)
219ENDPROC(__gnu_mcount_nc)
220
221#ifdef CONFIG_DYNAMIC_FTRACE
222ENTRY(ftrace_caller)
223UNWIND(.fnstart)
224	__ftrace_caller
225UNWIND(.fnend)
226ENDPROC(ftrace_caller)
227
228#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
229ENTRY(ftrace_regs_caller)
230UNWIND(.fnstart)
231	__ftrace_regs_caller
232UNWIND(.fnend)
233ENDPROC(ftrace_regs_caller)
234#endif
235
236#endif
237
238#ifdef CONFIG_FUNCTION_GRAPH_TRACER
239ENTRY(ftrace_graph_caller)
240UNWIND(.fnstart)
241	__ftrace_graph_caller
242UNWIND(.fnend)
243ENDPROC(ftrace_graph_caller)
244
245#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
246ENTRY(ftrace_graph_regs_caller)
247UNWIND(.fnstart)
248	__ftrace_graph_regs_caller
249UNWIND(.fnend)
250ENDPROC(ftrace_graph_regs_caller)
251#endif
252#endif
253
254.purgem mcount_enter
255.purgem mcount_get_lr
256.purgem mcount_exit
257
258#ifdef CONFIG_FUNCTION_GRAPH_TRACER
259	.globl return_to_handler
260return_to_handler:
261	stmdb	sp!, {r0-r3}
262	mov	r0, fp			@ frame pointer
263	bl	ftrace_return_to_handler
264	mov	lr, r0			@ r0 has real ret addr
265	ldmia	sp!, {r0-r3}
266	ret	lr
267#endif
268
269ENTRY(ftrace_stub)
270.Lftrace_stub:
271	ret	lr
272ENDPROC(ftrace_stub)