Linux Audio

Check our new training course

Loading...
v4.6
  1/*
  2 * This program is free software; you can redistribute it and/or modify
  3 * it under the terms of the GNU General Public License version 2 as
  4 * published by the Free Software Foundation.
  5 */
  6
  7#include <asm/assembler.h>
  8#include <asm/ftrace.h>
  9#include <asm/unwind.h>
 10
 11#include "entry-header.S"
 12
 13/*
 14 * When compiling with -pg, gcc inserts a call to the mcount routine at the
 15 * start of every function.  In mcount, apart from the function's address (in
 16 * lr), we need to get hold of the function's caller's address.
 17 *
 18 * Older GCCs (pre-4.4) inserted a call to a routine called mcount like this:
 19 *
 20 *	bl	mcount
 21 *
 22 * These versions have the limitation that in order for the mcount routine to
 23 * be able to determine the function's caller's address, an APCS-style frame
 24 * pointer (which is set up with something like the code below) is required.
 25 *
 26 *	mov     ip, sp
 27 *	push    {fp, ip, lr, pc}
 28 *	sub     fp, ip, #4
 29 *
 30 * With EABI, these frame pointers are not available unless -mapcs-frame is
 31 * specified, and if building as Thumb-2, not even then.
 32 *
 33 * Newer GCCs (4.4+) solve this problem by introducing a new version of mcount,
 34 * with call sites like:
 35 *
 36 *	push	{lr}
 37 *	bl	__gnu_mcount_nc
 38 *
 39 * With these compilers, frame pointers are not necessary.
 40 *
 41 * mcount can be thought of as a function called in the middle of a subroutine
 42 * call.  As such, it needs to be transparent for both the caller and the
 43 * callee: the original lr needs to be restored when leaving mcount, and no
 44 * registers should be clobbered.  (In the __gnu_mcount_nc implementation, we
 45 * clobber the ip register.  This is OK because the ARM calling convention
 46 * allows it to be clobbered in subroutines and doesn't use it to hold
 47 * parameters.)
 48 *
 49 * When using dynamic ftrace, we patch out the mcount call by a "mov r0, r0"
 50 * for the mcount case, and a "pop {lr}" for the __gnu_mcount_nc case (see
 51 * arch/arm/kernel/ftrace.c).
 52 */
 53
 54#ifndef CONFIG_OLD_MCOUNT
 55#if (__GNUC__ < 4 || (__GNUC__ == 4 && __GNUC_MINOR__ < 4))
 56#error Ftrace requires CONFIG_FRAME_POINTER=y with GCC older than 4.4.0.
 57#endif
 58#endif
 59
 60.macro mcount_adjust_addr rd, rn
 61	bic	\rd, \rn, #1		@ clear the Thumb bit if present
 62	sub	\rd, \rd, #MCOUNT_INSN_SIZE
 63.endm
 64
 65.macro __mcount suffix
 66	mcount_enter
 67	ldr	r0, =ftrace_trace_function
 68	ldr	r2, [r0]
 69	adr	r0, .Lftrace_stub
 70	cmp	r0, r2
 71	bne	1f
 72
 73#ifdef CONFIG_FUNCTION_GRAPH_TRACER
 74	ldr     r1, =ftrace_graph_return
 75	ldr     r2, [r1]
 76	cmp     r0, r2
 77	bne     ftrace_graph_caller\suffix
 78
 79	ldr     r1, =ftrace_graph_entry
 80	ldr     r2, [r1]
 81	ldr     r0, =ftrace_graph_entry_stub
 82	cmp     r0, r2
 83	bne     ftrace_graph_caller\suffix
 84#endif
 85
 86	mcount_exit
 87
 881: 	mcount_get_lr	r1			@ lr of instrumented func
 89	mcount_adjust_addr	r0, lr		@ instrumented function
 90	badr	lr, 2f
 91	mov	pc, r2
 922:	mcount_exit
 93.endm
 94
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 95.macro __ftrace_caller suffix
 96	mcount_enter
 97
 98	mcount_get_lr	r1			@ lr of instrumented func
 99	mcount_adjust_addr	r0, lr		@ instrumented function
100
 
 
 
 
 
 
101	.globl ftrace_call\suffix
102ftrace_call\suffix:
103	bl	ftrace_stub
104
105#ifdef CONFIG_FUNCTION_GRAPH_TRACER
106	.globl ftrace_graph_call\suffix
107ftrace_graph_call\suffix:
108	mov	r0, r0
 
109#endif
110
111	mcount_exit
112.endm
113
114.macro __ftrace_graph_caller
 
115	sub	r0, fp, #4		@ &lr of instrumented routine (&parent)
 
 
 
116#ifdef CONFIG_DYNAMIC_FTRACE
117	@ called from __ftrace_caller, saved in mcount_enter
118	ldr	r1, [sp, #16]		@ instrumented routine (func)
119	mcount_adjust_addr	r1, r1
120#else
121	@ called from __mcount, untouched in lr
122	mcount_adjust_addr	r1, lr	@ instrumented routine (func)
123#endif
124	mov	r2, fp			@ frame pointer
 
125	bl	prepare_ftrace_return
126	mcount_exit
127.endm
128
129#ifdef CONFIG_OLD_MCOUNT
130/*
131 * mcount
132 */
133
134.macro mcount_enter
135	stmdb	sp!, {r0-r3, lr}
136.endm
137
138.macro mcount_get_lr reg
139	ldr	\reg, [fp, #-4]
140.endm
141
142.macro mcount_exit
143	ldr	lr, [fp, #-4]
144	ldmia	sp!, {r0-r3, pc}
145.endm
146
147ENTRY(mcount)
148#ifdef CONFIG_DYNAMIC_FTRACE
149	stmdb	sp!, {lr}
150	ldr	lr, [fp, #-4]
151	ldmia	sp!, {pc}
152#else
153	__mcount _old
154#endif
155ENDPROC(mcount)
156
157#ifdef CONFIG_DYNAMIC_FTRACE
158ENTRY(ftrace_caller_old)
159	__ftrace_caller _old
160ENDPROC(ftrace_caller_old)
161#endif
162
163#ifdef CONFIG_FUNCTION_GRAPH_TRACER
164ENTRY(ftrace_graph_caller_old)
165	__ftrace_graph_caller
166ENDPROC(ftrace_graph_caller_old)
167#endif
168
169.purgem mcount_enter
170.purgem mcount_get_lr
171.purgem mcount_exit
172#endif
173
174/*
175 * __gnu_mcount_nc
176 */
177
178.macro mcount_enter
179/*
180 * This pad compensates for the push {lr} at the call site.  Note that we are
181 * unable to unwind through a function which does not otherwise save its lr.
182 */
183 UNWIND(.pad	#4)
184	stmdb	sp!, {r0-r3, lr}
185 UNWIND(.save	{r0-r3, lr})
186.endm
187
188.macro mcount_get_lr reg
189	ldr	\reg, [sp, #20]
190.endm
191
192.macro mcount_exit
193	ldmia	sp!, {r0-r3, ip, lr}
194	ret	ip
 
195.endm
196
197ENTRY(__gnu_mcount_nc)
198UNWIND(.fnstart)
199#ifdef CONFIG_DYNAMIC_FTRACE
200	mov	ip, lr
201	ldmia	sp!, {lr}
202	ret	ip
203#else
204	__mcount
205#endif
206UNWIND(.fnend)
207ENDPROC(__gnu_mcount_nc)
208
209#ifdef CONFIG_DYNAMIC_FTRACE
210ENTRY(ftrace_caller)
211UNWIND(.fnstart)
212	__ftrace_caller
213UNWIND(.fnend)
214ENDPROC(ftrace_caller)
 
 
 
 
 
 
 
 
 
215#endif
216
217#ifdef CONFIG_FUNCTION_GRAPH_TRACER
218ENTRY(ftrace_graph_caller)
219UNWIND(.fnstart)
220	__ftrace_graph_caller
221UNWIND(.fnend)
222ENDPROC(ftrace_graph_caller)
 
 
 
 
 
 
 
 
223#endif
224
225.purgem mcount_enter
226.purgem mcount_get_lr
227.purgem mcount_exit
228
229#ifdef CONFIG_FUNCTION_GRAPH_TRACER
230	.globl return_to_handler
231return_to_handler:
232	stmdb	sp!, {r0-r3}
233	mov	r0, fp			@ frame pointer
234	bl	ftrace_return_to_handler
235	mov	lr, r0			@ r0 has real ret addr
236	ldmia	sp!, {r0-r3}
237	ret	lr
 
238#endif
239
240ENTRY(ftrace_stub)
241.Lftrace_stub:
242	ret	lr
243ENDPROC(ftrace_stub)
v6.8
  1/* SPDX-License-Identifier: GPL-2.0-only */
 
 
 
 
  2
  3#include <asm/assembler.h>
  4#include <asm/ftrace.h>
  5#include <asm/unwind.h>
  6
  7#include "entry-header.S"
  8
  9/*
 10 * When compiling with -pg, gcc inserts a call to the mcount routine at the
 11 * start of every function.  In mcount, apart from the function's address (in
 12 * lr), we need to get hold of the function's caller's address.
 13 *
 14 * Newer GCCs (4.4+) solve this problem by using a version of mcount with call
 15 * sites like:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 16 *
 17 *	push	{lr}
 18 *	bl	__gnu_mcount_nc
 19 *
 20 * With these compilers, frame pointers are not necessary.
 21 *
 22 * mcount can be thought of as a function called in the middle of a subroutine
 23 * call.  As such, it needs to be transparent for both the caller and the
 24 * callee: the original lr needs to be restored when leaving mcount, and no
 25 * registers should be clobbered.
 
 
 
 26 *
 27 * When using dynamic ftrace, we patch out the mcount call by a "add sp, #4"
 28 * instead of the __gnu_mcount_nc call (see arch/arm/kernel/ftrace.c).
 
 29 */
 30
 
 
 
 
 
 
 31.macro mcount_adjust_addr rd, rn
 32	bic	\rd, \rn, #1		@ clear the Thumb bit if present
 33	sub	\rd, \rd, #MCOUNT_INSN_SIZE
 34.endm
 35
 36.macro __mcount suffix
 37	mcount_enter
 38	ldr_va	r2, ftrace_trace_function
 39	badr	r0, .Lftrace_stub
 
 40	cmp	r0, r2
 41	bne	1f
 42
 43#ifdef CONFIG_FUNCTION_GRAPH_TRACER
 44	ldr_va	r2, ftrace_graph_return
 45	cmp	r0, r2
 46	bne	ftrace_graph_caller\suffix
 47
 48	ldr_va	r2, ftrace_graph_entry
 49	mov_l	r0, ftrace_graph_entry_stub
 50	cmp	r0, r2
 51	bne	ftrace_graph_caller\suffix
 
 
 52#endif
 53
 54	mcount_exit
 55
 561: 	mcount_get_lr	r1			@ lr of instrumented func
 57	mcount_adjust_addr	r0, lr		@ instrumented function
 58	badr	lr, 2f
 59	mov	pc, r2
 602:	mcount_exit
 61.endm
 62
 63#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
 64
 65.macro __ftrace_regs_caller
 66
 67	str	lr, [sp, #-8]!	@ store LR as PC and make space for CPSR/OLD_R0,
 68				@ OLD_R0 will overwrite previous LR
 69
 70	ldr	lr, [sp, #8]    @ get previous LR
 71
 72	str	r0, [sp, #8]	@ write r0 as OLD_R0 over previous LR
 73
 74	str	lr, [sp, #-4]!	@ store previous LR as LR
 75
 76	add 	lr, sp, #16	@ move in LR the value of SP as it was
 77				@ before the push {lr} of the mcount mechanism
 78
 79	push	{r0-r11, ip, lr}
 80
 81	@ stack content at this point:
 82	@ 0  4          48   52       56            60   64    68       72
 83	@ R0 | R1 | ... | IP | SP + 4 | previous LR | LR | PSR | OLD_R0 |
 84
 85	mov	r3, sp				@ struct pt_regs*
 86
 87	ldr_va	r2, function_trace_op		@ pointer to the current
 88						@ function tracing op
 89
 90	ldr	r1, [sp, #S_LR]			@ lr of instrumented func
 91
 92	ldr	lr, [sp, #S_PC]			@ get LR
 93
 94	mcount_adjust_addr	r0, lr		@ instrumented function
 95
 96	.globl ftrace_regs_call
 97ftrace_regs_call:
 98	bl	ftrace_stub
 99
100#ifdef CONFIG_FUNCTION_GRAPH_TRACER
101	.globl ftrace_graph_regs_call
102ftrace_graph_regs_call:
103ARM(	mov	r0, r0	)
104THUMB(	nop.w		)
105#endif
106
107	@ pop saved regs
108	pop	{r0-r11, ip, lr}		@ restore r0 through r12
109	ldr	lr, [sp], #4			@ restore LR
110	ldr	pc, [sp], #12
111.endm
112
113#ifdef CONFIG_FUNCTION_GRAPH_TRACER
114.macro __ftrace_graph_regs_caller
115
116#ifdef CONFIG_UNWINDER_FRAME_POINTER
117	sub	r0, fp, #4		@ lr of instrumented routine (parent)
118#else
119	add	r0, sp, #S_LR
120#endif
121
122	@ called from __ftrace_regs_caller
123	ldr	r1, [sp, #S_PC]		@ instrumented routine (func)
124	mcount_adjust_addr	r1, r1
125
126	mov	r2, fpreg		@ frame pointer
127	add	r3, sp, #PT_REGS_SIZE
128	bl	prepare_ftrace_return
129
130	@ pop registers saved in ftrace_regs_caller
131	pop	{r0-r11, ip, lr}		@ restore r0 through r12
132	ldr	lr, [sp], #4			@ restore LR
133	ldr	pc, [sp], #12
134
135.endm
136#endif
137#endif
138
139.macro __ftrace_caller suffix
140	mcount_enter
141
142	mcount_get_lr	r1			@ lr of instrumented func
143	mcount_adjust_addr	r0, lr		@ instrumented function
144
145#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
146	ldr_va	r2, function_trace_op		@ pointer to the current
147						@ function tracing op
148	mov r3, #0				@ regs is NULL
149#endif
150
151	.globl ftrace_call\suffix
152ftrace_call\suffix:
153	bl	ftrace_stub
154
155#ifdef CONFIG_FUNCTION_GRAPH_TRACER
156	.globl ftrace_graph_call\suffix
157ftrace_graph_call\suffix:
158ARM(	mov	r0, r0	)
159THUMB(	nop.w		)
160#endif
161
162	mcount_exit
163.endm
164
165.macro __ftrace_graph_caller
166#ifdef CONFIG_UNWINDER_FRAME_POINTER
167	sub	r0, fp, #4		@ &lr of instrumented routine (&parent)
168#else
169	add	r0, sp, #20
170#endif
171#ifdef CONFIG_DYNAMIC_FTRACE
172	@ called from __ftrace_caller, saved in mcount_enter
173	ldr	r1, [sp, #16]		@ instrumented routine (func)
174	mcount_adjust_addr	r1, r1
175#else
176	@ called from __mcount, untouched in lr
177	mcount_adjust_addr	r1, lr	@ instrumented routine (func)
178#endif
179	mov	r2, fpreg		@ frame pointer
180	add	r3, sp, #24
181	bl	prepare_ftrace_return
182	mcount_exit
183.endm
184
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
185/*
186 * __gnu_mcount_nc
187 */
188
189.macro mcount_enter
190/*
191 * This pad compensates for the push {lr} at the call site.  Note that we are
192 * unable to unwind through a function which does not otherwise save its lr.
193 */
194 UNWIND(.pad	#4)
195	stmdb	sp!, {r0-r3, lr}
196 UNWIND(.save	{r0-r3, lr})
197.endm
198
199.macro mcount_get_lr reg
200	ldr	\reg, [sp, #20]
201.endm
202
203.macro mcount_exit
204	ldmia	sp!, {r0-r3}
205	ldr	lr, [sp, #4]
206	ldr	pc, [sp], #8
207.endm
208
209ENTRY(__gnu_mcount_nc)
210UNWIND(.fnstart)
211#ifdef CONFIG_DYNAMIC_FTRACE
212	push	{lr}
213	ldr	lr, [sp, #4]
214	ldr	pc, [sp], #8
215#else
216	__mcount
217#endif
218UNWIND(.fnend)
219ENDPROC(__gnu_mcount_nc)
220
221#ifdef CONFIG_DYNAMIC_FTRACE
222ENTRY(ftrace_caller)
223UNWIND(.fnstart)
224	__ftrace_caller
225UNWIND(.fnend)
226ENDPROC(ftrace_caller)
227
228#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
229ENTRY(ftrace_regs_caller)
230UNWIND(.fnstart)
231	__ftrace_regs_caller
232UNWIND(.fnend)
233ENDPROC(ftrace_regs_caller)
234#endif
235
236#endif
237
238#ifdef CONFIG_FUNCTION_GRAPH_TRACER
239ENTRY(ftrace_graph_caller)
240UNWIND(.fnstart)
241	__ftrace_graph_caller
242UNWIND(.fnend)
243ENDPROC(ftrace_graph_caller)
244
245#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
246ENTRY(ftrace_graph_regs_caller)
247UNWIND(.fnstart)
248	__ftrace_graph_regs_caller
249UNWIND(.fnend)
250ENDPROC(ftrace_graph_regs_caller)
251#endif
252#endif
253
254.purgem mcount_enter
255.purgem mcount_get_lr
256.purgem mcount_exit
257
258#ifdef CONFIG_FUNCTION_GRAPH_TRACER
259ENTRY(return_to_handler)
 
260	stmdb	sp!, {r0-r3}
261	add	r0, sp, #16		@ sp at exit of instrumented routine
262	bl	ftrace_return_to_handler
263	mov	lr, r0			@ r0 has real ret addr
264	ldmia	sp!, {r0-r3}
265	ret	lr
266ENDPROC(return_to_handler)
267#endif
268
269ENTRY(ftrace_stub)
270.Lftrace_stub:
271	ret	lr
272ENDPROC(ftrace_stub)
273
274#ifdef CONFIG_DYNAMIC_FTRACE
275
276	__INIT
277
278	.macro	init_tramp, dst:req
279ENTRY(\dst\()_from_init)
280	ldr	pc, =\dst
281ENDPROC(\dst\()_from_init)
282	.endm
283
284	init_tramp	ftrace_caller
285#ifdef CONFIG_DYNAMIC_FTRACE_WITH_REGS
286	init_tramp	ftrace_regs_caller
287#endif
288#endif