Loading...
1/*
2 * Copyright (C) 2000 Anton Blanchard (anton@linuxcare.com)
3 *
4 * This file implements mcount(), which is used to collect profiling data.
5 * This can also be tweaked for kernel stack overflow detection.
6 */
7
8#include <linux/linkage.h>
9
10/*
11 * This is the main variant and is called by C code. GCC's -pg option
12 * automatically instruments every C function with a call to this.
13 */
14
15 .text
16 .align 32
17 .globl _mcount
18 .type _mcount,#function
19 .globl mcount
20 .type mcount,#function
21_mcount:
22mcount:
23#ifdef CONFIG_FUNCTION_TRACER
24#ifdef CONFIG_DYNAMIC_FTRACE
25 /* Do nothing, the retl/nop below is all we need. */
26#else
27 sethi %hi(function_trace_stop), %g1
28 lduw [%g1 + %lo(function_trace_stop)], %g2
29 brnz,pn %g2, 2f
30 sethi %hi(ftrace_trace_function), %g1
31 sethi %hi(ftrace_stub), %g2
32 ldx [%g1 + %lo(ftrace_trace_function)], %g1
33 or %g2, %lo(ftrace_stub), %g2
34 cmp %g1, %g2
35 be,pn %icc, 1f
36 mov %i7, %g3
37 save %sp, -176, %sp
38 mov %g3, %o1
39 jmpl %g1, %o7
40 mov %i7, %o0
41 ret
42 restore
43 /* not reached */
441:
45#ifdef CONFIG_FUNCTION_GRAPH_TRACER
46 sethi %hi(ftrace_graph_return), %g1
47 ldx [%g1 + %lo(ftrace_graph_return)], %g3
48 cmp %g2, %g3
49 bne,pn %xcc, 5f
50 sethi %hi(ftrace_graph_entry_stub), %g2
51 sethi %hi(ftrace_graph_entry), %g1
52 or %g2, %lo(ftrace_graph_entry_stub), %g2
53 ldx [%g1 + %lo(ftrace_graph_entry)], %g1
54 cmp %g1, %g2
55 be,pt %xcc, 2f
56 nop
575: mov %i7, %g2
58 mov %fp, %g3
59 save %sp, -176, %sp
60 mov %g2, %l0
61 ba,pt %xcc, ftrace_graph_caller
62 mov %g3, %l1
63#endif
642:
65#endif
66#endif
67 retl
68 nop
69 .size _mcount,.-_mcount
70 .size mcount,.-mcount
71
72#ifdef CONFIG_FUNCTION_TRACER
73 .globl ftrace_stub
74 .type ftrace_stub,#function
75ftrace_stub:
76 retl
77 nop
78 .size ftrace_stub,.-ftrace_stub
79#ifdef CONFIG_DYNAMIC_FTRACE
80 .globl ftrace_caller
81 .type ftrace_caller,#function
82ftrace_caller:
83 sethi %hi(function_trace_stop), %g1
84 mov %i7, %g2
85 lduw [%g1 + %lo(function_trace_stop)], %g1
86 brnz,pn %g1, ftrace_stub
87 mov %fp, %g3
88 save %sp, -176, %sp
89 mov %g2, %o1
90 mov %g2, %l0
91 mov %g3, %l1
92 .globl ftrace_call
93ftrace_call:
94 call ftrace_stub
95 mov %i7, %o0
96#ifdef CONFIG_FUNCTION_GRAPH_TRACER
97 .globl ftrace_graph_call
98ftrace_graph_call:
99 call ftrace_stub
100 nop
101#endif
102 ret
103 restore
104#ifdef CONFIG_FUNCTION_GRAPH_TRACER
105 .size ftrace_graph_call,.-ftrace_graph_call
106#endif
107 .size ftrace_call,.-ftrace_call
108 .size ftrace_caller,.-ftrace_caller
109#endif
110#endif
111
112#ifdef CONFIG_FUNCTION_GRAPH_TRACER
113ENTRY(ftrace_graph_caller)
114 mov %l0, %o0
115 mov %i7, %o1
116 call prepare_ftrace_return
117 mov %l1, %o2
118 ret
119 restore %o0, -8, %i7
120END(ftrace_graph_caller)
121
122ENTRY(return_to_handler)
123 save %sp, -176, %sp
124 call ftrace_return_to_handler
125 mov %fp, %o0
126 jmpl %o0 + 8, %g0
127 restore
128END(return_to_handler)
129#endif
1/*
2 * Copyright (C) 2000 Anton Blanchard (anton@linuxcare.com)
3 *
4 * This file implements mcount(), which is used to collect profiling data.
5 * This can also be tweaked for kernel stack overflow detection.
6 */
7
8#include <linux/linkage.h>
9#include <asm/export.h>
10
11/*
12 * This is the main variant and is called by C code. GCC's -pg option
13 * automatically instruments every C function with a call to this.
14 */
15
16 .text
17 .align 32
18 .globl _mcount
19 .type _mcount,#function
20 EXPORT_SYMBOL(_mcount)
21 .globl mcount
22 .type mcount,#function
23_mcount:
24mcount:
25#ifdef CONFIG_FUNCTION_TRACER
26#ifdef CONFIG_DYNAMIC_FTRACE
27 /* Do nothing, the retl/nop below is all we need. */
28#else
29 sethi %hi(ftrace_trace_function), %g1
30 sethi %hi(ftrace_stub), %g2
31 ldx [%g1 + %lo(ftrace_trace_function)], %g1
32 or %g2, %lo(ftrace_stub), %g2
33 cmp %g1, %g2
34 be,pn %icc, 1f
35 mov %i7, %g3
36 save %sp, -176, %sp
37 mov %g3, %o1
38 jmpl %g1, %o7
39 mov %i7, %o0
40 ret
41 restore
42 /* not reached */
431:
44#ifdef CONFIG_FUNCTION_GRAPH_TRACER
45 sethi %hi(ftrace_graph_return), %g1
46 ldx [%g1 + %lo(ftrace_graph_return)], %g3
47 cmp %g2, %g3
48 bne,pn %xcc, 5f
49 sethi %hi(ftrace_graph_entry_stub), %g2
50 sethi %hi(ftrace_graph_entry), %g1
51 or %g2, %lo(ftrace_graph_entry_stub), %g2
52 ldx [%g1 + %lo(ftrace_graph_entry)], %g1
53 cmp %g1, %g2
54 be,pt %xcc, 2f
55 nop
565: mov %i7, %g2
57 mov %fp, %g3
58 save %sp, -176, %sp
59 mov %g2, %l0
60 ba,pt %xcc, ftrace_graph_caller
61 mov %g3, %l1
62#endif
632:
64#endif
65#endif
66 retl
67 nop
68 .size _mcount,.-_mcount
69 .size mcount,.-mcount
70
71#ifdef CONFIG_FUNCTION_TRACER
72 .globl ftrace_stub
73 .type ftrace_stub,#function
74ftrace_stub:
75 retl
76 nop
77 .size ftrace_stub,.-ftrace_stub
78#ifdef CONFIG_DYNAMIC_FTRACE
79 .globl ftrace_caller
80 .type ftrace_caller,#function
81ftrace_caller:
82 mov %i7, %g2
83 mov %fp, %g3
84 save %sp, -176, %sp
85 mov %g2, %o1
86 mov %g2, %l0
87 mov %g3, %l1
88 .globl ftrace_call
89ftrace_call:
90 call ftrace_stub
91 mov %i7, %o0
92#ifdef CONFIG_FUNCTION_GRAPH_TRACER
93 .globl ftrace_graph_call
94ftrace_graph_call:
95 call ftrace_stub
96 nop
97#endif
98 ret
99 restore
100#ifdef CONFIG_FUNCTION_GRAPH_TRACER
101 .size ftrace_graph_call,.-ftrace_graph_call
102#endif
103 .size ftrace_call,.-ftrace_call
104 .size ftrace_caller,.-ftrace_caller
105#endif
106#endif
107
108#ifdef CONFIG_FUNCTION_GRAPH_TRACER
109ENTRY(ftrace_graph_caller)
110 mov %l0, %o0
111 mov %i7, %o1
112 call prepare_ftrace_return
113 mov %l1, %o2
114 ret
115 restore %o0, -8, %i7
116END(ftrace_graph_caller)
117
118ENTRY(return_to_handler)
119 save %sp, -176, %sp
120 call ftrace_return_to_handler
121 mov %fp, %o0
122 jmpl %o0 + 8, %g0
123 restore
124END(return_to_handler)
125#endif