Linux Audio

Check our new training course

Loading...
v6.2
  1/* SPDX-License-Identifier: GPL-2.0 */
  2/*
  3 * Copyright (C) 2000 Anton Blanchard (anton@linuxcare.com)
  4 *
  5 * This file implements mcount(), which is used to collect profiling data.
  6 * This can also be tweaked for kernel stack overflow detection.
  7 */
  8
  9#include <linux/linkage.h>
 10#include <asm/export.h>
 11
 12/*
 13 * This is the main variant and is called by C code.  GCC's -pg option
 14 * automatically instruments every C function with a call to this.
 15 */
 16
 17	.text
 18	.align		32
 19	.globl		_mcount
 20	.type		_mcount,#function
 21	EXPORT_SYMBOL(_mcount)
 22	.globl		mcount
 23	.type		mcount,#function
 24_mcount:
 25mcount:
 26#ifdef CONFIG_FUNCTION_TRACER
 27#ifdef CONFIG_DYNAMIC_FTRACE
 28	/* Do nothing, the retl/nop below is all we need.  */
 29#else
 30	sethi		%hi(ftrace_trace_function), %g1
 
 
 
 31	sethi		%hi(ftrace_stub), %g2
 32	ldx		[%g1 + %lo(ftrace_trace_function)], %g1
 33	or		%g2, %lo(ftrace_stub), %g2
 34	cmp		%g1, %g2
 35	be,pn		%icc, 1f
 36	 mov		%i7, %g3
 37	save		%sp, -176, %sp
 38	mov		%g3, %o1
 39	jmpl		%g1, %o7
 40	 mov		%i7, %o0
 41	ret
 42	 restore
 43	/* not reached */
 441:
 45#ifdef CONFIG_FUNCTION_GRAPH_TRACER
 46	sethi		%hi(ftrace_graph_return), %g1
 47	ldx		[%g1 + %lo(ftrace_graph_return)], %g3
 48	cmp		%g2, %g3
 49	bne,pn		%xcc, 5f
 50	 sethi		%hi(ftrace_graph_entry_stub), %g2
 51	sethi		%hi(ftrace_graph_entry), %g1
 52	or		%g2, %lo(ftrace_graph_entry_stub), %g2
 53	ldx		[%g1 + %lo(ftrace_graph_entry)], %g1
 54	cmp		%g1, %g2
 55	be,pt		%xcc, 2f
 56	 nop
 575:	mov		%i7, %g2
 58	mov		%fp, %g3
 59	save		%sp, -176, %sp
 60	mov		%g2, %l0
 61	ba,pt		%xcc, ftrace_graph_caller
 62	 mov		%g3, %l1
 63#endif
 642:
 65#endif
 66#endif
 67	retl
 68	 nop
 69	.size		_mcount,.-_mcount
 70	.size		mcount,.-mcount
 71
 72#ifdef CONFIG_FUNCTION_TRACER
 73	.globl		ftrace_stub
 74	.type		ftrace_stub,#function
 75ftrace_stub:
 76	retl
 77	 nop
 78	.size		ftrace_stub,.-ftrace_stub
 79#ifdef CONFIG_DYNAMIC_FTRACE
 80	.globl		ftrace_caller
 81	.type		ftrace_caller,#function
 82ftrace_caller:
 
 83	mov		%i7, %g2
 84	mov		%fp, %g3
 
 
 85	save		%sp, -176, %sp
 86	mov		%g2, %o1
 87	mov		%g2, %l0
 88	mov		%g3, %l1
 89	.globl		ftrace_call
 90ftrace_call:
 91	call		ftrace_stub
 92	 mov		%i7, %o0
 93#ifdef CONFIG_FUNCTION_GRAPH_TRACER
 94	.globl		ftrace_graph_call
 95ftrace_graph_call:
 96	call		ftrace_stub
 97	 nop
 98#endif
 99	ret
100	 restore
101#ifdef CONFIG_FUNCTION_GRAPH_TRACER
102	.size		ftrace_graph_call,.-ftrace_graph_call
103#endif
104	.size		ftrace_call,.-ftrace_call
105	.size		ftrace_caller,.-ftrace_caller
106#endif
107#endif
108
109#ifdef CONFIG_FUNCTION_GRAPH_TRACER
110ENTRY(ftrace_graph_caller)
111	mov		%l0, %o0
112	mov		%i7, %o1
113	call		prepare_ftrace_return
114	 mov		%l1, %o2
115	ret
116	 restore	%o0, -8, %i7
117END(ftrace_graph_caller)
118
119ENTRY(return_to_handler)
120	save		%sp, -176, %sp
121	call		ftrace_return_to_handler
122	 mov		%fp, %o0
123	jmpl		%o0 + 8, %g0
124	 restore
125END(return_to_handler)
126#endif
v3.5.6
 
  1/*
  2 * Copyright (C) 2000 Anton Blanchard (anton@linuxcare.com)
  3 *
  4 * This file implements mcount(), which is used to collect profiling data.
  5 * This can also be tweaked for kernel stack overflow detection.
  6 */
  7
  8#include <linux/linkage.h>
 
  9
 10/*
 11 * This is the main variant and is called by C code.  GCC's -pg option
 12 * automatically instruments every C function with a call to this.
 13 */
 14
 15	.text
 16	.align		32
 17	.globl		_mcount
 18	.type		_mcount,#function
 
 19	.globl		mcount
 20	.type		mcount,#function
 21_mcount:
 22mcount:
 23#ifdef CONFIG_FUNCTION_TRACER
 24#ifdef CONFIG_DYNAMIC_FTRACE
 25	/* Do nothing, the retl/nop below is all we need.  */
 26#else
 27	sethi		%hi(function_trace_stop), %g1
 28	lduw		[%g1 + %lo(function_trace_stop)], %g2
 29	brnz,pn		%g2, 2f
 30	 sethi		%hi(ftrace_trace_function), %g1
 31	sethi		%hi(ftrace_stub), %g2
 32	ldx		[%g1 + %lo(ftrace_trace_function)], %g1
 33	or		%g2, %lo(ftrace_stub), %g2
 34	cmp		%g1, %g2
 35	be,pn		%icc, 1f
 36	 mov		%i7, %g3
 37	save		%sp, -176, %sp
 38	mov		%g3, %o1
 39	jmpl		%g1, %o7
 40	 mov		%i7, %o0
 41	ret
 42	 restore
 43	/* not reached */
 441:
 45#ifdef CONFIG_FUNCTION_GRAPH_TRACER
 46	sethi		%hi(ftrace_graph_return), %g1
 47	ldx		[%g1 + %lo(ftrace_graph_return)], %g3
 48	cmp		%g2, %g3
 49	bne,pn		%xcc, 5f
 50	 sethi		%hi(ftrace_graph_entry_stub), %g2
 51	sethi		%hi(ftrace_graph_entry), %g1
 52	or		%g2, %lo(ftrace_graph_entry_stub), %g2
 53	ldx		[%g1 + %lo(ftrace_graph_entry)], %g1
 54	cmp		%g1, %g2
 55	be,pt		%xcc, 2f
 56	 nop
 575:	mov		%i7, %g2
 58	mov		%fp, %g3
 59	save		%sp, -176, %sp
 60	mov		%g2, %l0
 61	ba,pt		%xcc, ftrace_graph_caller
 62	 mov		%g3, %l1
 63#endif
 642:
 65#endif
 66#endif
 67	retl
 68	 nop
 69	.size		_mcount,.-_mcount
 70	.size		mcount,.-mcount
 71
 72#ifdef CONFIG_FUNCTION_TRACER
 73	.globl		ftrace_stub
 74	.type		ftrace_stub,#function
 75ftrace_stub:
 76	retl
 77	 nop
 78	.size		ftrace_stub,.-ftrace_stub
 79#ifdef CONFIG_DYNAMIC_FTRACE
 80	.globl		ftrace_caller
 81	.type		ftrace_caller,#function
 82ftrace_caller:
 83	sethi		%hi(function_trace_stop), %g1
 84	mov		%i7, %g2
 85	lduw		[%g1 + %lo(function_trace_stop)], %g1
 86	brnz,pn		%g1, ftrace_stub
 87	 mov		%fp, %g3
 88	save		%sp, -176, %sp
 89	mov		%g2, %o1
 90	mov		%g2, %l0
 91	mov		%g3, %l1
 92	.globl		ftrace_call
 93ftrace_call:
 94	call		ftrace_stub
 95	 mov		%i7, %o0
 96#ifdef CONFIG_FUNCTION_GRAPH_TRACER
 97	.globl		ftrace_graph_call
 98ftrace_graph_call:
 99	call		ftrace_stub
100	 nop
101#endif
102	ret
103	 restore
104#ifdef CONFIG_FUNCTION_GRAPH_TRACER
105	.size		ftrace_graph_call,.-ftrace_graph_call
106#endif
107	.size		ftrace_call,.-ftrace_call
108	.size		ftrace_caller,.-ftrace_caller
109#endif
110#endif
111
112#ifdef CONFIG_FUNCTION_GRAPH_TRACER
113ENTRY(ftrace_graph_caller)
114	mov		%l0, %o0
115	mov		%i7, %o1
116	call		prepare_ftrace_return
117	 mov		%l1, %o2
118	ret
119	 restore	%o0, -8, %i7
120END(ftrace_graph_caller)
121
122ENTRY(return_to_handler)
123	save		%sp, -176, %sp
124	call		ftrace_return_to_handler
125	 mov		%fp, %o0
126	jmpl		%o0 + 8, %g0
127	 restore
128END(return_to_handler)
129#endif