Loading...
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Traceprobe fetch helper inlines
4 */
5
6static nokprobe_inline void
7fetch_store_raw(unsigned long val, struct fetch_insn *code, void *buf)
8{
9 switch (code->size) {
10 case 1:
11 *(u8 *)buf = (u8)val;
12 break;
13 case 2:
14 *(u16 *)buf = (u16)val;
15 break;
16 case 4:
17 *(u32 *)buf = (u32)val;
18 break;
19 case 8:
20 //TBD: 32bit signed
21 *(u64 *)buf = (u64)val;
22 break;
23 default:
24 *(unsigned long *)buf = val;
25 }
26}
27
28static nokprobe_inline void
29fetch_apply_bitfield(struct fetch_insn *code, void *buf)
30{
31 switch (code->basesize) {
32 case 1:
33 *(u8 *)buf <<= code->lshift;
34 *(u8 *)buf >>= code->rshift;
35 break;
36 case 2:
37 *(u16 *)buf <<= code->lshift;
38 *(u16 *)buf >>= code->rshift;
39 break;
40 case 4:
41 *(u32 *)buf <<= code->lshift;
42 *(u32 *)buf >>= code->rshift;
43 break;
44 case 8:
45 *(u64 *)buf <<= code->lshift;
46 *(u64 *)buf >>= code->rshift;
47 break;
48 }
49}
50
51/*
52 * These functions must be defined for each callsite.
53 * Return consumed dynamic data size (>= 0), or error (< 0).
54 * If dest is NULL, don't store result and return required dynamic data size.
55 */
56static int
57process_fetch_insn(struct fetch_insn *code, void *rec,
58 void *dest, void *base);
59static nokprobe_inline int fetch_store_strlen(unsigned long addr);
60static nokprobe_inline int
61fetch_store_string(unsigned long addr, void *dest, void *base);
62static nokprobe_inline int fetch_store_strlen_user(unsigned long addr);
63static nokprobe_inline int
64fetch_store_string_user(unsigned long addr, void *dest, void *base);
65static nokprobe_inline int
66probe_mem_read(void *dest, void *src, size_t size);
67static nokprobe_inline int
68probe_mem_read_user(void *dest, void *src, size_t size);
69
70static nokprobe_inline int
71fetch_store_symstrlen(unsigned long addr)
72{
73 char namebuf[KSYM_SYMBOL_LEN];
74 int ret;
75
76 ret = sprint_symbol(namebuf, addr);
77 if (ret < 0)
78 return 0;
79
80 return ret + 1;
81}
82
83/*
84 * Fetch a null-terminated symbol string + offset. Caller MUST set *(u32 *)buf
85 * with max length and relative data location.
86 */
87static nokprobe_inline int
88fetch_store_symstring(unsigned long addr, void *dest, void *base)
89{
90 int maxlen = get_loc_len(*(u32 *)dest);
91 void *__dest;
92
93 if (unlikely(!maxlen))
94 return -ENOMEM;
95
96 __dest = get_loc_data(dest, base);
97
98 return sprint_symbol(__dest, addr);
99}
100
101/* From the 2nd stage, routine is same */
102static nokprobe_inline int
103process_fetch_insn_bottom(struct fetch_insn *code, unsigned long val,
104 void *dest, void *base)
105{
106 struct fetch_insn *s3 = NULL;
107 int total = 0, ret = 0, i = 0;
108 u32 loc = 0;
109 unsigned long lval = val;
110
111stage2:
112 /* 2nd stage: dereference memory if needed */
113 do {
114 if (code->op == FETCH_OP_DEREF) {
115 lval = val;
116 ret = probe_mem_read(&val, (void *)val + code->offset,
117 sizeof(val));
118 } else if (code->op == FETCH_OP_UDEREF) {
119 lval = val;
120 ret = probe_mem_read_user(&val,
121 (void *)val + code->offset, sizeof(val));
122 } else
123 break;
124 if (ret)
125 return ret;
126 code++;
127 } while (1);
128
129 s3 = code;
130stage3:
131 /* 3rd stage: store value to buffer */
132 if (unlikely(!dest)) {
133 switch (code->op) {
134 case FETCH_OP_ST_STRING:
135 ret = fetch_store_strlen(val + code->offset);
136 code++;
137 goto array;
138 case FETCH_OP_ST_USTRING:
139 ret += fetch_store_strlen_user(val + code->offset);
140 code++;
141 goto array;
142 case FETCH_OP_ST_SYMSTR:
143 ret += fetch_store_symstrlen(val + code->offset);
144 code++;
145 goto array;
146 default:
147 return -EILSEQ;
148 }
149 }
150
151 switch (code->op) {
152 case FETCH_OP_ST_RAW:
153 fetch_store_raw(val, code, dest);
154 break;
155 case FETCH_OP_ST_MEM:
156 probe_mem_read(dest, (void *)val + code->offset, code->size);
157 break;
158 case FETCH_OP_ST_UMEM:
159 probe_mem_read_user(dest, (void *)val + code->offset, code->size);
160 break;
161 case FETCH_OP_ST_STRING:
162 loc = *(u32 *)dest;
163 ret = fetch_store_string(val + code->offset, dest, base);
164 break;
165 case FETCH_OP_ST_USTRING:
166 loc = *(u32 *)dest;
167 ret = fetch_store_string_user(val + code->offset, dest, base);
168 break;
169 case FETCH_OP_ST_SYMSTR:
170 loc = *(u32 *)dest;
171 ret = fetch_store_symstring(val + code->offset, dest, base);
172 break;
173 default:
174 return -EILSEQ;
175 }
176 code++;
177
178 /* 4th stage: modify stored value if needed */
179 if (code->op == FETCH_OP_MOD_BF) {
180 fetch_apply_bitfield(code, dest);
181 code++;
182 }
183
184array:
185 /* the last stage: Loop on array */
186 if (code->op == FETCH_OP_LP_ARRAY) {
187 total += ret;
188 if (++i < code->param) {
189 code = s3;
190 if (s3->op != FETCH_OP_ST_STRING &&
191 s3->op != FETCH_OP_ST_USTRING) {
192 dest += s3->size;
193 val += s3->size;
194 goto stage3;
195 }
196 code--;
197 val = lval + sizeof(char *);
198 if (dest) {
199 dest += sizeof(u32);
200 *(u32 *)dest = update_data_loc(loc, ret);
201 }
202 goto stage2;
203 }
204 code++;
205 ret = total;
206 }
207
208 return code->op == FETCH_OP_END ? ret : -EILSEQ;
209}
210
211/* Sum up total data length for dynamic arrays (strings) */
212static nokprobe_inline int
213__get_data_size(struct trace_probe *tp, struct pt_regs *regs)
214{
215 struct probe_arg *arg;
216 int i, len, ret = 0;
217
218 for (i = 0; i < tp->nr_args; i++) {
219 arg = tp->args + i;
220 if (unlikely(arg->dynamic)) {
221 len = process_fetch_insn(arg->code, regs, NULL, NULL);
222 if (len > 0)
223 ret += len;
224 }
225 }
226
227 return ret;
228}
229
230/* Store the value of each argument */
231static nokprobe_inline void
232store_trace_args(void *data, struct trace_probe *tp, void *rec,
233 int header_size, int maxlen)
234{
235 struct probe_arg *arg;
236 void *base = data - header_size;
237 void *dyndata = data + tp->size;
238 u32 *dl; /* Data location */
239 int ret, i;
240
241 for (i = 0; i < tp->nr_args; i++) {
242 arg = tp->args + i;
243 dl = data + arg->offset;
244 /* Point the dynamic data area if needed */
245 if (unlikely(arg->dynamic))
246 *dl = make_data_loc(maxlen, dyndata - base);
247 ret = process_fetch_insn(arg->code, rec, dl, base);
248 if (unlikely(ret < 0 && arg->dynamic)) {
249 *dl = make_data_loc(0, dyndata - base);
250 } else {
251 dyndata += ret;
252 maxlen -= ret;
253 }
254 }
255}
256
257static inline int
258print_probe_args(struct trace_seq *s, struct probe_arg *args, int nr_args,
259 u8 *data, void *field)
260{
261 void *p;
262 int i, j;
263
264 for (i = 0; i < nr_args; i++) {
265 struct probe_arg *a = args + i;
266
267 trace_seq_printf(s, " %s=", a->name);
268 if (likely(!a->count)) {
269 if (!a->type->print(s, data + a->offset, field))
270 return -ENOMEM;
271 continue;
272 }
273 trace_seq_putc(s, '{');
274 p = data + a->offset;
275 for (j = 0; j < a->count; j++) {
276 if (!a->type->print(s, p, field))
277 return -ENOMEM;
278 trace_seq_putc(s, j == a->count - 1 ? '}' : ',');
279 p += a->type->size;
280 }
281 }
282 return 0;
283}
1/* SPDX-License-Identifier: GPL-2.0 */
2/*
3 * Traceprobe fetch helper inlines
4 */
5
6static nokprobe_inline void
7fetch_store_raw(unsigned long val, struct fetch_insn *code, void *buf)
8{
9 switch (code->size) {
10 case 1:
11 *(u8 *)buf = (u8)val;
12 break;
13 case 2:
14 *(u16 *)buf = (u16)val;
15 break;
16 case 4:
17 *(u32 *)buf = (u32)val;
18 break;
19 case 8:
20 //TBD: 32bit signed
21 *(u64 *)buf = (u64)val;
22 break;
23 default:
24 *(unsigned long *)buf = val;
25 }
26}
27
28static nokprobe_inline void
29fetch_apply_bitfield(struct fetch_insn *code, void *buf)
30{
31 switch (code->basesize) {
32 case 1:
33 *(u8 *)buf <<= code->lshift;
34 *(u8 *)buf >>= code->rshift;
35 break;
36 case 2:
37 *(u16 *)buf <<= code->lshift;
38 *(u16 *)buf >>= code->rshift;
39 break;
40 case 4:
41 *(u32 *)buf <<= code->lshift;
42 *(u32 *)buf >>= code->rshift;
43 break;
44 case 8:
45 *(u64 *)buf <<= code->lshift;
46 *(u64 *)buf >>= code->rshift;
47 break;
48 }
49}
50
51/*
52 * These functions must be defined for each callsite.
53 * Return consumed dynamic data size (>= 0), or error (< 0).
54 * If dest is NULL, don't store result and return required dynamic data size.
55 */
56static int
57process_fetch_insn(struct fetch_insn *code, void *rec,
58 void *dest, void *base);
59static nokprobe_inline int fetch_store_strlen(unsigned long addr);
60static nokprobe_inline int
61fetch_store_string(unsigned long addr, void *dest, void *base);
62static nokprobe_inline int fetch_store_strlen_user(unsigned long addr);
63static nokprobe_inline int
64fetch_store_string_user(unsigned long addr, void *dest, void *base);
65static nokprobe_inline int
66probe_mem_read(void *dest, void *src, size_t size);
67static nokprobe_inline int
68probe_mem_read_user(void *dest, void *src, size_t size);
69
70static nokprobe_inline int
71fetch_store_symstrlen(unsigned long addr)
72{
73 char namebuf[KSYM_SYMBOL_LEN];
74 int ret;
75
76 ret = sprint_symbol(namebuf, addr);
77 if (ret < 0)
78 return 0;
79
80 return ret + 1;
81}
82
83/*
84 * Fetch a null-terminated symbol string + offset. Caller MUST set *(u32 *)buf
85 * with max length and relative data location.
86 */
87static nokprobe_inline int
88fetch_store_symstring(unsigned long addr, void *dest, void *base)
89{
90 int maxlen = get_loc_len(*(u32 *)dest);
91 void *__dest;
92
93 if (unlikely(!maxlen))
94 return -ENOMEM;
95
96 __dest = get_loc_data(dest, base);
97
98 return sprint_symbol(__dest, addr);
99}
100
101/* common part of process_fetch_insn*/
102static nokprobe_inline int
103process_common_fetch_insn(struct fetch_insn *code, unsigned long *val)
104{
105 switch (code->op) {
106 case FETCH_OP_IMM:
107 *val = code->immediate;
108 break;
109 case FETCH_OP_COMM:
110 *val = (unsigned long)current->comm;
111 break;
112 case FETCH_OP_DATA:
113 *val = (unsigned long)code->data;
114 break;
115 default:
116 return -EILSEQ;
117 }
118 return 0;
119}
120
121/* From the 2nd stage, routine is same */
122static nokprobe_inline int
123process_fetch_insn_bottom(struct fetch_insn *code, unsigned long val,
124 void *dest, void *base)
125{
126 struct fetch_insn *s3 = NULL;
127 int total = 0, ret = 0, i = 0;
128 u32 loc = 0;
129 unsigned long lval = val;
130
131stage2:
132 /* 2nd stage: dereference memory if needed */
133 do {
134 if (code->op == FETCH_OP_DEREF) {
135 lval = val;
136 ret = probe_mem_read(&val, (void *)val + code->offset,
137 sizeof(val));
138 } else if (code->op == FETCH_OP_UDEREF) {
139 lval = val;
140 ret = probe_mem_read_user(&val,
141 (void *)val + code->offset, sizeof(val));
142 } else
143 break;
144 if (ret)
145 return ret;
146 code++;
147 } while (1);
148
149 s3 = code;
150stage3:
151 /* 3rd stage: store value to buffer */
152 if (unlikely(!dest)) {
153 switch (code->op) {
154 case FETCH_OP_ST_STRING:
155 ret = fetch_store_strlen(val + code->offset);
156 code++;
157 goto array;
158 case FETCH_OP_ST_USTRING:
159 ret = fetch_store_strlen_user(val + code->offset);
160 code++;
161 goto array;
162 case FETCH_OP_ST_SYMSTR:
163 ret = fetch_store_symstrlen(val + code->offset);
164 code++;
165 goto array;
166 default:
167 return -EILSEQ;
168 }
169 }
170
171 switch (code->op) {
172 case FETCH_OP_ST_RAW:
173 fetch_store_raw(val, code, dest);
174 break;
175 case FETCH_OP_ST_MEM:
176 probe_mem_read(dest, (void *)val + code->offset, code->size);
177 break;
178 case FETCH_OP_ST_UMEM:
179 probe_mem_read_user(dest, (void *)val + code->offset, code->size);
180 break;
181 case FETCH_OP_ST_STRING:
182 loc = *(u32 *)dest;
183 ret = fetch_store_string(val + code->offset, dest, base);
184 break;
185 case FETCH_OP_ST_USTRING:
186 loc = *(u32 *)dest;
187 ret = fetch_store_string_user(val + code->offset, dest, base);
188 break;
189 case FETCH_OP_ST_SYMSTR:
190 loc = *(u32 *)dest;
191 ret = fetch_store_symstring(val + code->offset, dest, base);
192 break;
193 default:
194 return -EILSEQ;
195 }
196 code++;
197
198 /* 4th stage: modify stored value if needed */
199 if (code->op == FETCH_OP_MOD_BF) {
200 fetch_apply_bitfield(code, dest);
201 code++;
202 }
203
204array:
205 /* the last stage: Loop on array */
206 if (code->op == FETCH_OP_LP_ARRAY) {
207 if (ret < 0)
208 ret = 0;
209 total += ret;
210 if (++i < code->param) {
211 code = s3;
212 if (s3->op != FETCH_OP_ST_STRING &&
213 s3->op != FETCH_OP_ST_USTRING) {
214 dest += s3->size;
215 val += s3->size;
216 goto stage3;
217 }
218 code--;
219 val = lval + sizeof(char *);
220 if (dest) {
221 dest += sizeof(u32);
222 *(u32 *)dest = update_data_loc(loc, ret);
223 }
224 goto stage2;
225 }
226 code++;
227 ret = total;
228 }
229
230 return code->op == FETCH_OP_END ? ret : -EILSEQ;
231}
232
233/* Sum up total data length for dynamic arrays (strings) */
234static nokprobe_inline int
235__get_data_size(struct trace_probe *tp, struct pt_regs *regs)
236{
237 struct probe_arg *arg;
238 int i, len, ret = 0;
239
240 for (i = 0; i < tp->nr_args; i++) {
241 arg = tp->args + i;
242 if (unlikely(arg->dynamic)) {
243 len = process_fetch_insn(arg->code, regs, NULL, NULL);
244 if (len > 0)
245 ret += len;
246 }
247 }
248
249 return ret;
250}
251
252/* Store the value of each argument */
253static nokprobe_inline void
254store_trace_args(void *data, struct trace_probe *tp, void *rec,
255 int header_size, int maxlen)
256{
257 struct probe_arg *arg;
258 void *base = data - header_size;
259 void *dyndata = data + tp->size;
260 u32 *dl; /* Data location */
261 int ret, i;
262
263 for (i = 0; i < tp->nr_args; i++) {
264 arg = tp->args + i;
265 dl = data + arg->offset;
266 /* Point the dynamic data area if needed */
267 if (unlikely(arg->dynamic))
268 *dl = make_data_loc(maxlen, dyndata - base);
269 ret = process_fetch_insn(arg->code, rec, dl, base);
270 if (arg->dynamic && likely(ret > 0)) {
271 dyndata += ret;
272 maxlen -= ret;
273 }
274 }
275}