Loading...
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
4 */
5
6#include <stdio.h>
7#include <stdlib.h>
8
9#define unlikely(cond) (cond)
10#include <asm/insn.h>
11#include "../../../arch/x86/lib/inat.c"
12#include "../../../arch/x86/lib/insn.c"
13
14#include "../../elf.h"
15#include "../../arch.h"
16#include "../../warn.h"
17
18static unsigned char op_to_cfi_reg[][2] = {
19 {CFI_AX, CFI_R8},
20 {CFI_CX, CFI_R9},
21 {CFI_DX, CFI_R10},
22 {CFI_BX, CFI_R11},
23 {CFI_SP, CFI_R12},
24 {CFI_BP, CFI_R13},
25 {CFI_SI, CFI_R14},
26 {CFI_DI, CFI_R15},
27};
28
29static int is_x86_64(struct elf *elf)
30{
31 switch (elf->ehdr.e_machine) {
32 case EM_X86_64:
33 return 1;
34 case EM_386:
35 return 0;
36 default:
37 WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
38 return -1;
39 }
40}
41
42bool arch_callee_saved_reg(unsigned char reg)
43{
44 switch (reg) {
45 case CFI_BP:
46 case CFI_BX:
47 case CFI_R12:
48 case CFI_R13:
49 case CFI_R14:
50 case CFI_R15:
51 return true;
52
53 case CFI_AX:
54 case CFI_CX:
55 case CFI_DX:
56 case CFI_SI:
57 case CFI_DI:
58 case CFI_SP:
59 case CFI_R8:
60 case CFI_R9:
61 case CFI_R10:
62 case CFI_R11:
63 case CFI_RA:
64 default:
65 return false;
66 }
67}
68
69int arch_decode_instruction(struct elf *elf, struct section *sec,
70 unsigned long offset, unsigned int maxlen,
71 unsigned int *len, enum insn_type *type,
72 unsigned long *immediate, struct stack_op *op)
73{
74 struct insn insn;
75 int x86_64, sign;
76 unsigned char op1, op2, rex = 0, rex_b = 0, rex_r = 0, rex_w = 0,
77 rex_x = 0, modrm = 0, modrm_mod = 0, modrm_rm = 0,
78 modrm_reg = 0, sib = 0;
79
80 x86_64 = is_x86_64(elf);
81 if (x86_64 == -1)
82 return -1;
83
84 insn_init(&insn, sec->data->d_buf + offset, maxlen, x86_64);
85 insn_get_length(&insn);
86
87 if (!insn_complete(&insn)) {
88 WARN_FUNC("can't decode instruction", sec, offset);
89 return -1;
90 }
91
92 *len = insn.length;
93 *type = INSN_OTHER;
94
95 if (insn.vex_prefix.nbytes)
96 return 0;
97
98 op1 = insn.opcode.bytes[0];
99 op2 = insn.opcode.bytes[1];
100
101 if (insn.rex_prefix.nbytes) {
102 rex = insn.rex_prefix.bytes[0];
103 rex_w = X86_REX_W(rex) >> 3;
104 rex_r = X86_REX_R(rex) >> 2;
105 rex_x = X86_REX_X(rex) >> 1;
106 rex_b = X86_REX_B(rex);
107 }
108
109 if (insn.modrm.nbytes) {
110 modrm = insn.modrm.bytes[0];
111 modrm_mod = X86_MODRM_MOD(modrm);
112 modrm_reg = X86_MODRM_REG(modrm);
113 modrm_rm = X86_MODRM_RM(modrm);
114 }
115
116 if (insn.sib.nbytes)
117 sib = insn.sib.bytes[0];
118
119 switch (op1) {
120
121 case 0x1:
122 case 0x29:
123 if (rex_w && !rex_b && modrm_mod == 3 && modrm_rm == 4) {
124
125 /* add/sub reg, %rsp */
126 *type = INSN_STACK;
127 op->src.type = OP_SRC_ADD;
128 op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
129 op->dest.type = OP_DEST_REG;
130 op->dest.reg = CFI_SP;
131 }
132 break;
133
134 case 0x50 ... 0x57:
135
136 /* push reg */
137 *type = INSN_STACK;
138 op->src.type = OP_SRC_REG;
139 op->src.reg = op_to_cfi_reg[op1 & 0x7][rex_b];
140 op->dest.type = OP_DEST_PUSH;
141
142 break;
143
144 case 0x58 ... 0x5f:
145
146 /* pop reg */
147 *type = INSN_STACK;
148 op->src.type = OP_SRC_POP;
149 op->dest.type = OP_DEST_REG;
150 op->dest.reg = op_to_cfi_reg[op1 & 0x7][rex_b];
151
152 break;
153
154 case 0x68:
155 case 0x6a:
156 /* push immediate */
157 *type = INSN_STACK;
158 op->src.type = OP_SRC_CONST;
159 op->dest.type = OP_DEST_PUSH;
160 break;
161
162 case 0x70 ... 0x7f:
163 *type = INSN_JUMP_CONDITIONAL;
164 break;
165
166 case 0x81:
167 case 0x83:
168 if (rex != 0x48)
169 break;
170
171 if (modrm == 0xe4) {
172 /* and imm, %rsp */
173 *type = INSN_STACK;
174 op->src.type = OP_SRC_AND;
175 op->src.reg = CFI_SP;
176 op->src.offset = insn.immediate.value;
177 op->dest.type = OP_DEST_REG;
178 op->dest.reg = CFI_SP;
179 break;
180 }
181
182 if (modrm == 0xc4)
183 sign = 1;
184 else if (modrm == 0xec)
185 sign = -1;
186 else
187 break;
188
189 /* add/sub imm, %rsp */
190 *type = INSN_STACK;
191 op->src.type = OP_SRC_ADD;
192 op->src.reg = CFI_SP;
193 op->src.offset = insn.immediate.value * sign;
194 op->dest.type = OP_DEST_REG;
195 op->dest.reg = CFI_SP;
196 break;
197
198 case 0x89:
199 if (rex_w && !rex_r && modrm_mod == 3 && modrm_reg == 4) {
200
201 /* mov %rsp, reg */
202 *type = INSN_STACK;
203 op->src.type = OP_SRC_REG;
204 op->src.reg = CFI_SP;
205 op->dest.type = OP_DEST_REG;
206 op->dest.reg = op_to_cfi_reg[modrm_rm][rex_b];
207 break;
208 }
209
210 if (rex_w && !rex_b && modrm_mod == 3 && modrm_rm == 4) {
211
212 /* mov reg, %rsp */
213 *type = INSN_STACK;
214 op->src.type = OP_SRC_REG;
215 op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
216 op->dest.type = OP_DEST_REG;
217 op->dest.reg = CFI_SP;
218 break;
219 }
220
221 /* fallthrough */
222 case 0x88:
223 if (!rex_b &&
224 (modrm_mod == 1 || modrm_mod == 2) && modrm_rm == 5) {
225
226 /* mov reg, disp(%rbp) */
227 *type = INSN_STACK;
228 op->src.type = OP_SRC_REG;
229 op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
230 op->dest.type = OP_DEST_REG_INDIRECT;
231 op->dest.reg = CFI_BP;
232 op->dest.offset = insn.displacement.value;
233
234 } else if (rex_w && !rex_b && modrm_rm == 4 && sib == 0x24) {
235
236 /* mov reg, disp(%rsp) */
237 *type = INSN_STACK;
238 op->src.type = OP_SRC_REG;
239 op->src.reg = op_to_cfi_reg[modrm_reg][rex_r];
240 op->dest.type = OP_DEST_REG_INDIRECT;
241 op->dest.reg = CFI_SP;
242 op->dest.offset = insn.displacement.value;
243 }
244
245 break;
246
247 case 0x8b:
248 if (rex_w && !rex_b && modrm_mod == 1 && modrm_rm == 5) {
249
250 /* mov disp(%rbp), reg */
251 *type = INSN_STACK;
252 op->src.type = OP_SRC_REG_INDIRECT;
253 op->src.reg = CFI_BP;
254 op->src.offset = insn.displacement.value;
255 op->dest.type = OP_DEST_REG;
256 op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
257
258 } else if (rex_w && !rex_b && sib == 0x24 &&
259 modrm_mod != 3 && modrm_rm == 4) {
260
261 /* mov disp(%rsp), reg */
262 *type = INSN_STACK;
263 op->src.type = OP_SRC_REG_INDIRECT;
264 op->src.reg = CFI_SP;
265 op->src.offset = insn.displacement.value;
266 op->dest.type = OP_DEST_REG;
267 op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
268 }
269
270 break;
271
272 case 0x8d:
273 if (sib == 0x24 && rex_w && !rex_b && !rex_x) {
274
275 *type = INSN_STACK;
276 if (!insn.displacement.value) {
277 /* lea (%rsp), reg */
278 op->src.type = OP_SRC_REG;
279 } else {
280 /* lea disp(%rsp), reg */
281 op->src.type = OP_SRC_ADD;
282 op->src.offset = insn.displacement.value;
283 }
284 op->src.reg = CFI_SP;
285 op->dest.type = OP_DEST_REG;
286 op->dest.reg = op_to_cfi_reg[modrm_reg][rex_r];
287
288 } else if (rex == 0x48 && modrm == 0x65) {
289
290 /* lea disp(%rbp), %rsp */
291 *type = INSN_STACK;
292 op->src.type = OP_SRC_ADD;
293 op->src.reg = CFI_BP;
294 op->src.offset = insn.displacement.value;
295 op->dest.type = OP_DEST_REG;
296 op->dest.reg = CFI_SP;
297
298 } else if (rex == 0x49 && modrm == 0x62 &&
299 insn.displacement.value == -8) {
300
301 /*
302 * lea -0x8(%r10), %rsp
303 *
304 * Restoring rsp back to its original value after a
305 * stack realignment.
306 */
307 *type = INSN_STACK;
308 op->src.type = OP_SRC_ADD;
309 op->src.reg = CFI_R10;
310 op->src.offset = -8;
311 op->dest.type = OP_DEST_REG;
312 op->dest.reg = CFI_SP;
313
314 } else if (rex == 0x49 && modrm == 0x65 &&
315 insn.displacement.value == -16) {
316
317 /*
318 * lea -0x10(%r13), %rsp
319 *
320 * Restoring rsp back to its original value after a
321 * stack realignment.
322 */
323 *type = INSN_STACK;
324 op->src.type = OP_SRC_ADD;
325 op->src.reg = CFI_R13;
326 op->src.offset = -16;
327 op->dest.type = OP_DEST_REG;
328 op->dest.reg = CFI_SP;
329 }
330
331 break;
332
333 case 0x8f:
334 /* pop to mem */
335 *type = INSN_STACK;
336 op->src.type = OP_SRC_POP;
337 op->dest.type = OP_DEST_MEM;
338 break;
339
340 case 0x90:
341 *type = INSN_NOP;
342 break;
343
344 case 0x9c:
345 /* pushf */
346 *type = INSN_STACK;
347 op->src.type = OP_SRC_CONST;
348 op->dest.type = OP_DEST_PUSHF;
349 break;
350
351 case 0x9d:
352 /* popf */
353 *type = INSN_STACK;
354 op->src.type = OP_SRC_POPF;
355 op->dest.type = OP_DEST_MEM;
356 break;
357
358 case 0x0f:
359
360 if (op2 == 0x01) {
361
362 if (modrm == 0xca)
363 *type = INSN_CLAC;
364 else if (modrm == 0xcb)
365 *type = INSN_STAC;
366
367 } else if (op2 >= 0x80 && op2 <= 0x8f) {
368
369 *type = INSN_JUMP_CONDITIONAL;
370
371 } else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
372 op2 == 0x35) {
373
374 /* sysenter, sysret */
375 *type = INSN_CONTEXT_SWITCH;
376
377 } else if (op2 == 0x0b || op2 == 0xb9) {
378
379 /* ud2 */
380 *type = INSN_BUG;
381
382 } else if (op2 == 0x0d || op2 == 0x1f) {
383
384 /* nopl/nopw */
385 *type = INSN_NOP;
386
387 } else if (op2 == 0xa0 || op2 == 0xa8) {
388
389 /* push fs/gs */
390 *type = INSN_STACK;
391 op->src.type = OP_SRC_CONST;
392 op->dest.type = OP_DEST_PUSH;
393
394 } else if (op2 == 0xa1 || op2 == 0xa9) {
395
396 /* pop fs/gs */
397 *type = INSN_STACK;
398 op->src.type = OP_SRC_POP;
399 op->dest.type = OP_DEST_MEM;
400 }
401
402 break;
403
404 case 0xc9:
405 /*
406 * leave
407 *
408 * equivalent to:
409 * mov bp, sp
410 * pop bp
411 */
412 *type = INSN_STACK;
413 op->dest.type = OP_DEST_LEAVE;
414
415 break;
416
417 case 0xe3:
418 /* jecxz/jrcxz */
419 *type = INSN_JUMP_CONDITIONAL;
420 break;
421
422 case 0xe9:
423 case 0xeb:
424 *type = INSN_JUMP_UNCONDITIONAL;
425 break;
426
427 case 0xc2:
428 case 0xc3:
429 *type = INSN_RETURN;
430 break;
431
432 case 0xca: /* retf */
433 case 0xcb: /* retf */
434 case 0xcf: /* iret */
435 *type = INSN_CONTEXT_SWITCH;
436 break;
437
438 case 0xe8:
439 *type = INSN_CALL;
440 break;
441
442 case 0xfc:
443 *type = INSN_CLD;
444 break;
445
446 case 0xfd:
447 *type = INSN_STD;
448 break;
449
450 case 0xff:
451 if (modrm_reg == 2 || modrm_reg == 3)
452
453 *type = INSN_CALL_DYNAMIC;
454
455 else if (modrm_reg == 4)
456
457 *type = INSN_JUMP_DYNAMIC;
458
459 else if (modrm_reg == 5)
460
461 /* jmpf */
462 *type = INSN_CONTEXT_SWITCH;
463
464 else if (modrm_reg == 6) {
465
466 /* push from mem */
467 *type = INSN_STACK;
468 op->src.type = OP_SRC_CONST;
469 op->dest.type = OP_DEST_PUSH;
470 }
471
472 break;
473
474 default:
475 break;
476 }
477
478 *immediate = insn.immediate.nbytes ? insn.immediate.value : 0;
479
480 return 0;
481}
482
483void arch_initial_func_cfi_state(struct cfi_state *state)
484{
485 int i;
486
487 for (i = 0; i < CFI_NUM_REGS; i++) {
488 state->regs[i].base = CFI_UNDEFINED;
489 state->regs[i].offset = 0;
490 }
491
492 /* initial CFA (call frame address) */
493 state->cfa.base = CFI_SP;
494 state->cfa.offset = 8;
495
496 /* initial RA (return address) */
497 state->regs[16].base = CFI_CFA;
498 state->regs[16].offset = -8;
499}
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
4 */
5
6#include <stdio.h>
7#include <stdlib.h>
8
9#define unlikely(cond) (cond)
10#include <asm/insn.h>
11#include "../../../arch/x86/lib/inat.c"
12#include "../../../arch/x86/lib/insn.c"
13
14#define CONFIG_64BIT 1
15#include <asm/nops.h>
16
17#include <asm/orc_types.h>
18#include <objtool/check.h>
19#include <objtool/elf.h>
20#include <objtool/arch.h>
21#include <objtool/warn.h>
22#include <objtool/endianness.h>
23#include <objtool/builtin.h>
24#include <arch/elf.h>
25
26int arch_ftrace_match(char *name)
27{
28 return !strcmp(name, "__fentry__");
29}
30
31static int is_x86_64(const struct elf *elf)
32{
33 switch (elf->ehdr.e_machine) {
34 case EM_X86_64:
35 return 1;
36 case EM_386:
37 return 0;
38 default:
39 WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
40 return -1;
41 }
42}
43
44bool arch_callee_saved_reg(unsigned char reg)
45{
46 switch (reg) {
47 case CFI_BP:
48 case CFI_BX:
49 case CFI_R12:
50 case CFI_R13:
51 case CFI_R14:
52 case CFI_R15:
53 return true;
54
55 case CFI_AX:
56 case CFI_CX:
57 case CFI_DX:
58 case CFI_SI:
59 case CFI_DI:
60 case CFI_SP:
61 case CFI_R8:
62 case CFI_R9:
63 case CFI_R10:
64 case CFI_R11:
65 case CFI_RA:
66 default:
67 return false;
68 }
69}
70
71unsigned long arch_dest_reloc_offset(int addend)
72{
73 return addend + 4;
74}
75
76unsigned long arch_jump_destination(struct instruction *insn)
77{
78 return insn->offset + insn->len + insn->immediate;
79}
80
81bool arch_pc_relative_reloc(struct reloc *reloc)
82{
83 /*
84 * All relocation types where P (the address of the target)
85 * is included in the computation.
86 */
87 switch (reloc->type) {
88 case R_X86_64_PC8:
89 case R_X86_64_PC16:
90 case R_X86_64_PC32:
91 case R_X86_64_PC64:
92
93 case R_X86_64_PLT32:
94 case R_X86_64_GOTPC32:
95 case R_X86_64_GOTPCREL:
96 return true;
97
98 default:
99 break;
100 }
101
102 return false;
103}
104
105#define ADD_OP(op) \
106 if (!(op = calloc(1, sizeof(*op)))) \
107 return -1; \
108 else for (list_add_tail(&op->list, ops_list); op; op = NULL)
109
110/*
111 * Helpers to decode ModRM/SIB:
112 *
113 * r/m| AX CX DX BX | SP | BP | SI DI |
114 * | R8 R9 R10 R11 | R12 | R13 | R14 R15 |
115 * Mod+----------------+-----+-----+---------+
116 * 00 | [r/m] |[SIB]|[IP+]| [r/m] |
117 * 01 | [r/m + d8] |[S+d]| [r/m + d8] |
118 * 10 | [r/m + d32] |[S+D]| [r/m + d32] |
119 * 11 | r/ m |
120 */
121
122#define mod_is_mem() (modrm_mod != 3)
123#define mod_is_reg() (modrm_mod == 3)
124
125#define is_RIP() ((modrm_rm & 7) == CFI_BP && modrm_mod == 0)
126#define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem())
127
128#define rm_is(reg) (have_SIB() ? \
129 sib_base == (reg) && sib_index == CFI_SP : \
130 modrm_rm == (reg))
131
132#define rm_is_mem(reg) (mod_is_mem() && !is_RIP() && rm_is(reg))
133#define rm_is_reg(reg) (mod_is_reg() && modrm_rm == (reg))
134
135static bool has_notrack_prefix(struct insn *insn)
136{
137 int i;
138
139 for (i = 0; i < insn->prefixes.nbytes; i++) {
140 if (insn->prefixes.bytes[i] == 0x3e)
141 return true;
142 }
143
144 return false;
145}
146
147int arch_decode_instruction(struct objtool_file *file, const struct section *sec,
148 unsigned long offset, unsigned int maxlen,
149 unsigned int *len, enum insn_type *type,
150 unsigned long *immediate,
151 struct list_head *ops_list)
152{
153 const struct elf *elf = file->elf;
154 struct insn insn;
155 int x86_64, ret;
156 unsigned char op1, op2, op3, prefix,
157 rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0,
158 modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0,
159 sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0;
160 struct stack_op *op = NULL;
161 struct symbol *sym;
162 u64 imm;
163
164 x86_64 = is_x86_64(elf);
165 if (x86_64 == -1)
166 return -1;
167
168 ret = insn_decode(&insn, sec->data->d_buf + offset, maxlen,
169 x86_64 ? INSN_MODE_64 : INSN_MODE_32);
170 if (ret < 0) {
171 WARN("can't decode instruction at %s:0x%lx", sec->name, offset);
172 return -1;
173 }
174
175 *len = insn.length;
176 *type = INSN_OTHER;
177
178 if (insn.vex_prefix.nbytes)
179 return 0;
180
181 prefix = insn.prefixes.bytes[0];
182
183 op1 = insn.opcode.bytes[0];
184 op2 = insn.opcode.bytes[1];
185 op3 = insn.opcode.bytes[2];
186
187 if (insn.rex_prefix.nbytes) {
188 rex = insn.rex_prefix.bytes[0];
189 rex_w = X86_REX_W(rex) >> 3;
190 rex_r = X86_REX_R(rex) >> 2;
191 rex_x = X86_REX_X(rex) >> 1;
192 rex_b = X86_REX_B(rex);
193 }
194
195 if (insn.modrm.nbytes) {
196 modrm = insn.modrm.bytes[0];
197 modrm_mod = X86_MODRM_MOD(modrm);
198 modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r;
199 modrm_rm = X86_MODRM_RM(modrm) + 8*rex_b;
200 }
201
202 if (insn.sib.nbytes) {
203 sib = insn.sib.bytes[0];
204 /* sib_scale = X86_SIB_SCALE(sib); */
205 sib_index = X86_SIB_INDEX(sib) + 8*rex_x;
206 sib_base = X86_SIB_BASE(sib) + 8*rex_b;
207 }
208
209 switch (op1) {
210
211 case 0x1:
212 case 0x29:
213 if (rex_w && rm_is_reg(CFI_SP)) {
214
215 /* add/sub reg, %rsp */
216 ADD_OP(op) {
217 op->src.type = OP_SRC_ADD;
218 op->src.reg = modrm_reg;
219 op->dest.type = OP_DEST_REG;
220 op->dest.reg = CFI_SP;
221 }
222 }
223 break;
224
225 case 0x50 ... 0x57:
226
227 /* push reg */
228 ADD_OP(op) {
229 op->src.type = OP_SRC_REG;
230 op->src.reg = (op1 & 0x7) + 8*rex_b;
231 op->dest.type = OP_DEST_PUSH;
232 }
233
234 break;
235
236 case 0x58 ... 0x5f:
237
238 /* pop reg */
239 ADD_OP(op) {
240 op->src.type = OP_SRC_POP;
241 op->dest.type = OP_DEST_REG;
242 op->dest.reg = (op1 & 0x7) + 8*rex_b;
243 }
244
245 break;
246
247 case 0x68:
248 case 0x6a:
249 /* push immediate */
250 ADD_OP(op) {
251 op->src.type = OP_SRC_CONST;
252 op->dest.type = OP_DEST_PUSH;
253 }
254 break;
255
256 case 0x70 ... 0x7f:
257 *type = INSN_JUMP_CONDITIONAL;
258 break;
259
260 case 0x80 ... 0x83:
261 /*
262 * 1000 00sw : mod OP r/m : immediate
263 *
264 * s - sign extend immediate
265 * w - imm8 / imm32
266 *
267 * OP: 000 ADD 100 AND
268 * 001 OR 101 SUB
269 * 010 ADC 110 XOR
270 * 011 SBB 111 CMP
271 */
272
273 /* 64bit only */
274 if (!rex_w)
275 break;
276
277 /* %rsp target only */
278 if (!rm_is_reg(CFI_SP))
279 break;
280
281 imm = insn.immediate.value;
282 if (op1 & 2) { /* sign extend */
283 if (op1 & 1) { /* imm32 */
284 imm <<= 32;
285 imm = (s64)imm >> 32;
286 } else { /* imm8 */
287 imm <<= 56;
288 imm = (s64)imm >> 56;
289 }
290 }
291
292 switch (modrm_reg & 7) {
293 case 5:
294 imm = -imm;
295 /* fallthrough */
296 case 0:
297 /* add/sub imm, %rsp */
298 ADD_OP(op) {
299 op->src.type = OP_SRC_ADD;
300 op->src.reg = CFI_SP;
301 op->src.offset = imm;
302 op->dest.type = OP_DEST_REG;
303 op->dest.reg = CFI_SP;
304 }
305 break;
306
307 case 4:
308 /* and imm, %rsp */
309 ADD_OP(op) {
310 op->src.type = OP_SRC_AND;
311 op->src.reg = CFI_SP;
312 op->src.offset = insn.immediate.value;
313 op->dest.type = OP_DEST_REG;
314 op->dest.reg = CFI_SP;
315 }
316 break;
317
318 default:
319 /* WARN ? */
320 break;
321 }
322
323 break;
324
325 case 0x89:
326 if (!rex_w)
327 break;
328
329 if (modrm_reg == CFI_SP) {
330
331 if (mod_is_reg()) {
332 /* mov %rsp, reg */
333 ADD_OP(op) {
334 op->src.type = OP_SRC_REG;
335 op->src.reg = CFI_SP;
336 op->dest.type = OP_DEST_REG;
337 op->dest.reg = modrm_rm;
338 }
339 break;
340
341 } else {
342 /* skip RIP relative displacement */
343 if (is_RIP())
344 break;
345
346 /* skip nontrivial SIB */
347 if (have_SIB()) {
348 modrm_rm = sib_base;
349 if (sib_index != CFI_SP)
350 break;
351 }
352
353 /* mov %rsp, disp(%reg) */
354 ADD_OP(op) {
355 op->src.type = OP_SRC_REG;
356 op->src.reg = CFI_SP;
357 op->dest.type = OP_DEST_REG_INDIRECT;
358 op->dest.reg = modrm_rm;
359 op->dest.offset = insn.displacement.value;
360 }
361 break;
362 }
363
364 break;
365 }
366
367 if (rm_is_reg(CFI_SP)) {
368
369 /* mov reg, %rsp */
370 ADD_OP(op) {
371 op->src.type = OP_SRC_REG;
372 op->src.reg = modrm_reg;
373 op->dest.type = OP_DEST_REG;
374 op->dest.reg = CFI_SP;
375 }
376 break;
377 }
378
379 /* fallthrough */
380 case 0x88:
381 if (!rex_w)
382 break;
383
384 if (rm_is_mem(CFI_BP)) {
385
386 /* mov reg, disp(%rbp) */
387 ADD_OP(op) {
388 op->src.type = OP_SRC_REG;
389 op->src.reg = modrm_reg;
390 op->dest.type = OP_DEST_REG_INDIRECT;
391 op->dest.reg = CFI_BP;
392 op->dest.offset = insn.displacement.value;
393 }
394 break;
395 }
396
397 if (rm_is_mem(CFI_SP)) {
398
399 /* mov reg, disp(%rsp) */
400 ADD_OP(op) {
401 op->src.type = OP_SRC_REG;
402 op->src.reg = modrm_reg;
403 op->dest.type = OP_DEST_REG_INDIRECT;
404 op->dest.reg = CFI_SP;
405 op->dest.offset = insn.displacement.value;
406 }
407 break;
408 }
409
410 break;
411
412 case 0x8b:
413 if (!rex_w)
414 break;
415
416 if (rm_is_mem(CFI_BP)) {
417
418 /* mov disp(%rbp), reg */
419 ADD_OP(op) {
420 op->src.type = OP_SRC_REG_INDIRECT;
421 op->src.reg = CFI_BP;
422 op->src.offset = insn.displacement.value;
423 op->dest.type = OP_DEST_REG;
424 op->dest.reg = modrm_reg;
425 }
426 break;
427 }
428
429 if (rm_is_mem(CFI_SP)) {
430
431 /* mov disp(%rsp), reg */
432 ADD_OP(op) {
433 op->src.type = OP_SRC_REG_INDIRECT;
434 op->src.reg = CFI_SP;
435 op->src.offset = insn.displacement.value;
436 op->dest.type = OP_DEST_REG;
437 op->dest.reg = modrm_reg;
438 }
439 break;
440 }
441
442 break;
443
444 case 0x8d:
445 if (mod_is_reg()) {
446 WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset);
447 break;
448 }
449
450 /* skip non 64bit ops */
451 if (!rex_w)
452 break;
453
454 /* skip RIP relative displacement */
455 if (is_RIP())
456 break;
457
458 /* skip nontrivial SIB */
459 if (have_SIB()) {
460 modrm_rm = sib_base;
461 if (sib_index != CFI_SP)
462 break;
463 }
464
465 /* lea disp(%src), %dst */
466 ADD_OP(op) {
467 op->src.offset = insn.displacement.value;
468 if (!op->src.offset) {
469 /* lea (%src), %dst */
470 op->src.type = OP_SRC_REG;
471 } else {
472 /* lea disp(%src), %dst */
473 op->src.type = OP_SRC_ADD;
474 }
475 op->src.reg = modrm_rm;
476 op->dest.type = OP_DEST_REG;
477 op->dest.reg = modrm_reg;
478 }
479 break;
480
481 case 0x8f:
482 /* pop to mem */
483 ADD_OP(op) {
484 op->src.type = OP_SRC_POP;
485 op->dest.type = OP_DEST_MEM;
486 }
487 break;
488
489 case 0x90:
490 *type = INSN_NOP;
491 break;
492
493 case 0x9c:
494 /* pushf */
495 ADD_OP(op) {
496 op->src.type = OP_SRC_CONST;
497 op->dest.type = OP_DEST_PUSHF;
498 }
499 break;
500
501 case 0x9d:
502 /* popf */
503 ADD_OP(op) {
504 op->src.type = OP_SRC_POPF;
505 op->dest.type = OP_DEST_MEM;
506 }
507 break;
508
509 case 0x0f:
510
511 if (op2 == 0x01) {
512
513 if (modrm == 0xca)
514 *type = INSN_CLAC;
515 else if (modrm == 0xcb)
516 *type = INSN_STAC;
517
518 } else if (op2 >= 0x80 && op2 <= 0x8f) {
519
520 *type = INSN_JUMP_CONDITIONAL;
521
522 } else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
523 op2 == 0x35) {
524
525 /* sysenter, sysret */
526 *type = INSN_CONTEXT_SWITCH;
527
528 } else if (op2 == 0x0b || op2 == 0xb9) {
529
530 /* ud2 */
531 *type = INSN_BUG;
532
533 } else if (op2 == 0x0d || op2 == 0x1f) {
534
535 /* nopl/nopw */
536 *type = INSN_NOP;
537
538 } else if (op2 == 0x1e) {
539
540 if (prefix == 0xf3 && (modrm == 0xfa || modrm == 0xfb))
541 *type = INSN_ENDBR;
542
543
544 } else if (op2 == 0x38 && op3 == 0xf8) {
545 if (insn.prefixes.nbytes == 1 &&
546 insn.prefixes.bytes[0] == 0xf2) {
547 /* ENQCMD cannot be used in the kernel. */
548 WARN("ENQCMD instruction at %s:%lx", sec->name,
549 offset);
550 }
551
552 } else if (op2 == 0xa0 || op2 == 0xa8) {
553
554 /* push fs/gs */
555 ADD_OP(op) {
556 op->src.type = OP_SRC_CONST;
557 op->dest.type = OP_DEST_PUSH;
558 }
559
560 } else if (op2 == 0xa1 || op2 == 0xa9) {
561
562 /* pop fs/gs */
563 ADD_OP(op) {
564 op->src.type = OP_SRC_POP;
565 op->dest.type = OP_DEST_MEM;
566 }
567 }
568
569 break;
570
571 case 0xc9:
572 /*
573 * leave
574 *
575 * equivalent to:
576 * mov bp, sp
577 * pop bp
578 */
579 ADD_OP(op) {
580 op->src.type = OP_SRC_REG;
581 op->src.reg = CFI_BP;
582 op->dest.type = OP_DEST_REG;
583 op->dest.reg = CFI_SP;
584 }
585 ADD_OP(op) {
586 op->src.type = OP_SRC_POP;
587 op->dest.type = OP_DEST_REG;
588 op->dest.reg = CFI_BP;
589 }
590 break;
591
592 case 0xcc:
593 /* int3 */
594 *type = INSN_TRAP;
595 break;
596
597 case 0xe3:
598 /* jecxz/jrcxz */
599 *type = INSN_JUMP_CONDITIONAL;
600 break;
601
602 case 0xe9:
603 case 0xeb:
604 *type = INSN_JUMP_UNCONDITIONAL;
605 break;
606
607 case 0xc2:
608 case 0xc3:
609 *type = INSN_RETURN;
610 break;
611
612 case 0xc7: /* mov imm, r/m */
613 if (!opts.noinstr)
614 break;
615
616 if (insn.length == 3+4+4 && !strncmp(sec->name, ".init.text", 10)) {
617 struct reloc *immr, *disp;
618 struct symbol *func;
619 int idx;
620
621 immr = find_reloc_by_dest(elf, (void *)sec, offset+3);
622 disp = find_reloc_by_dest(elf, (void *)sec, offset+7);
623
624 if (!immr || strcmp(immr->sym->name, "pv_ops"))
625 break;
626
627 idx = (immr->addend + 8) / sizeof(void *);
628
629 func = disp->sym;
630 if (disp->sym->type == STT_SECTION)
631 func = find_symbol_by_offset(disp->sym->sec, disp->addend);
632 if (!func) {
633 WARN("no func for pv_ops[]");
634 return -1;
635 }
636
637 objtool_pv_add(file, idx, func);
638 }
639
640 break;
641
642 case 0xcf: /* iret */
643 /*
644 * Handle sync_core(), which has an IRET to self.
645 * All other IRET are in STT_NONE entry code.
646 */
647 sym = find_symbol_containing(sec, offset);
648 if (sym && sym->type == STT_FUNC) {
649 ADD_OP(op) {
650 /* add $40, %rsp */
651 op->src.type = OP_SRC_ADD;
652 op->src.reg = CFI_SP;
653 op->src.offset = 5*8;
654 op->dest.type = OP_DEST_REG;
655 op->dest.reg = CFI_SP;
656 }
657 break;
658 }
659
660 /* fallthrough */
661
662 case 0xca: /* retf */
663 case 0xcb: /* retf */
664 *type = INSN_CONTEXT_SWITCH;
665 break;
666
667 case 0xe0: /* loopne */
668 case 0xe1: /* loope */
669 case 0xe2: /* loop */
670 *type = INSN_JUMP_CONDITIONAL;
671 break;
672
673 case 0xe8:
674 *type = INSN_CALL;
675 /*
676 * For the impact on the stack, a CALL behaves like
677 * a PUSH of an immediate value (the return address).
678 */
679 ADD_OP(op) {
680 op->src.type = OP_SRC_CONST;
681 op->dest.type = OP_DEST_PUSH;
682 }
683 break;
684
685 case 0xfc:
686 *type = INSN_CLD;
687 break;
688
689 case 0xfd:
690 *type = INSN_STD;
691 break;
692
693 case 0xff:
694 if (modrm_reg == 2 || modrm_reg == 3) {
695
696 *type = INSN_CALL_DYNAMIC;
697 if (has_notrack_prefix(&insn))
698 WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
699
700 } else if (modrm_reg == 4) {
701
702 *type = INSN_JUMP_DYNAMIC;
703 if (has_notrack_prefix(&insn))
704 WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
705
706 } else if (modrm_reg == 5) {
707
708 /* jmpf */
709 *type = INSN_CONTEXT_SWITCH;
710
711 } else if (modrm_reg == 6) {
712
713 /* push from mem */
714 ADD_OP(op) {
715 op->src.type = OP_SRC_CONST;
716 op->dest.type = OP_DEST_PUSH;
717 }
718 }
719
720 break;
721
722 default:
723 break;
724 }
725
726 *immediate = insn.immediate.nbytes ? insn.immediate.value : 0;
727
728 return 0;
729}
730
731void arch_initial_func_cfi_state(struct cfi_init_state *state)
732{
733 int i;
734
735 for (i = 0; i < CFI_NUM_REGS; i++) {
736 state->regs[i].base = CFI_UNDEFINED;
737 state->regs[i].offset = 0;
738 }
739
740 /* initial CFA (call frame address) */
741 state->cfa.base = CFI_SP;
742 state->cfa.offset = 8;
743
744 /* initial RA (return address) */
745 state->regs[CFI_RA].base = CFI_CFA;
746 state->regs[CFI_RA].offset = -8;
747}
748
749const char *arch_nop_insn(int len)
750{
751 static const char nops[5][5] = {
752 { BYTES_NOP1 },
753 { BYTES_NOP2 },
754 { BYTES_NOP3 },
755 { BYTES_NOP4 },
756 { BYTES_NOP5 },
757 };
758
759 if (len < 1 || len > 5) {
760 WARN("invalid NOP size: %d\n", len);
761 return NULL;
762 }
763
764 return nops[len-1];
765}
766
767#define BYTE_RET 0xC3
768
769const char *arch_ret_insn(int len)
770{
771 static const char ret[5][5] = {
772 { BYTE_RET },
773 { BYTE_RET, 0xcc },
774 { BYTE_RET, 0xcc, BYTES_NOP1 },
775 { BYTE_RET, 0xcc, BYTES_NOP2 },
776 { BYTE_RET, 0xcc, BYTES_NOP3 },
777 };
778
779 if (len < 1 || len > 5) {
780 WARN("invalid RET size: %d\n", len);
781 return NULL;
782 }
783
784 return ret[len-1];
785}
786
787int arch_decode_hint_reg(u8 sp_reg, int *base)
788{
789 switch (sp_reg) {
790 case ORC_REG_UNDEFINED:
791 *base = CFI_UNDEFINED;
792 break;
793 case ORC_REG_SP:
794 *base = CFI_SP;
795 break;
796 case ORC_REG_BP:
797 *base = CFI_BP;
798 break;
799 case ORC_REG_SP_INDIRECT:
800 *base = CFI_SP_INDIRECT;
801 break;
802 case ORC_REG_R10:
803 *base = CFI_R10;
804 break;
805 case ORC_REG_R13:
806 *base = CFI_R13;
807 break;
808 case ORC_REG_DI:
809 *base = CFI_DI;
810 break;
811 case ORC_REG_DX:
812 *base = CFI_DX;
813 break;
814 default:
815 return -1;
816 }
817
818 return 0;
819}
820
821bool arch_is_retpoline(struct symbol *sym)
822{
823 return !strncmp(sym->name, "__x86_indirect_", 15);
824}
825
826bool arch_is_rethunk(struct symbol *sym)
827{
828 return !strcmp(sym->name, "__x86_return_thunk");
829}