Loading...
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
4 */
5
6#include <stdio.h>
7#include <stdlib.h>
8
9#define unlikely(cond) (cond)
10#include <asm/insn.h>
11#include "../../../arch/x86/lib/inat.c"
12#include "../../../arch/x86/lib/insn.c"
13
14#define CONFIG_64BIT 1
15#include <asm/nops.h>
16
17#include <asm/orc_types.h>
18#include <objtool/check.h>
19#include <objtool/elf.h>
20#include <objtool/arch.h>
21#include <objtool/warn.h>
22#include <objtool/endianness.h>
23#include <objtool/builtin.h>
24#include <arch/elf.h>
25
26int arch_ftrace_match(char *name)
27{
28 return !strcmp(name, "__fentry__");
29}
30
31static int is_x86_64(const struct elf *elf)
32{
33 switch (elf->ehdr.e_machine) {
34 case EM_X86_64:
35 return 1;
36 case EM_386:
37 return 0;
38 default:
39 WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
40 return -1;
41 }
42}
43
44bool arch_callee_saved_reg(unsigned char reg)
45{
46 switch (reg) {
47 case CFI_BP:
48 case CFI_BX:
49 case CFI_R12:
50 case CFI_R13:
51 case CFI_R14:
52 case CFI_R15:
53 return true;
54
55 case CFI_AX:
56 case CFI_CX:
57 case CFI_DX:
58 case CFI_SI:
59 case CFI_DI:
60 case CFI_SP:
61 case CFI_R8:
62 case CFI_R9:
63 case CFI_R10:
64 case CFI_R11:
65 case CFI_RA:
66 default:
67 return false;
68 }
69}
70
71unsigned long arch_dest_reloc_offset(int addend)
72{
73 return addend + 4;
74}
75
76unsigned long arch_jump_destination(struct instruction *insn)
77{
78 return insn->offset + insn->len + insn->immediate;
79}
80
81bool arch_pc_relative_reloc(struct reloc *reloc)
82{
83 /*
84 * All relocation types where P (the address of the target)
85 * is included in the computation.
86 */
87 switch (reloc_type(reloc)) {
88 case R_X86_64_PC8:
89 case R_X86_64_PC16:
90 case R_X86_64_PC32:
91 case R_X86_64_PC64:
92
93 case R_X86_64_PLT32:
94 case R_X86_64_GOTPC32:
95 case R_X86_64_GOTPCREL:
96 return true;
97
98 default:
99 break;
100 }
101
102 return false;
103}
104
105#define ADD_OP(op) \
106 if (!(op = calloc(1, sizeof(*op)))) \
107 return -1; \
108 else for (*ops_list = op, ops_list = &op->next; op; op = NULL)
109
110/*
111 * Helpers to decode ModRM/SIB:
112 *
113 * r/m| AX CX DX BX | SP | BP | SI DI |
114 * | R8 R9 R10 R11 | R12 | R13 | R14 R15 |
115 * Mod+----------------+-----+-----+---------+
116 * 00 | [r/m] |[SIB]|[IP+]| [r/m] |
117 * 01 | [r/m + d8] |[S+d]| [r/m + d8] |
118 * 10 | [r/m + d32] |[S+D]| [r/m + d32] |
119 * 11 | r/ m |
120 */
121
122#define mod_is_mem() (modrm_mod != 3)
123#define mod_is_reg() (modrm_mod == 3)
124
125#define is_RIP() ((modrm_rm & 7) == CFI_BP && modrm_mod == 0)
126#define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem())
127
128#define rm_is(reg) (have_SIB() ? \
129 sib_base == (reg) && sib_index == CFI_SP : \
130 modrm_rm == (reg))
131
132#define rm_is_mem(reg) (mod_is_mem() && !is_RIP() && rm_is(reg))
133#define rm_is_reg(reg) (mod_is_reg() && modrm_rm == (reg))
134
135static bool has_notrack_prefix(struct insn *insn)
136{
137 int i;
138
139 for (i = 0; i < insn->prefixes.nbytes; i++) {
140 if (insn->prefixes.bytes[i] == 0x3e)
141 return true;
142 }
143
144 return false;
145}
146
147int arch_decode_instruction(struct objtool_file *file, const struct section *sec,
148 unsigned long offset, unsigned int maxlen,
149 struct instruction *insn)
150{
151 struct stack_op **ops_list = &insn->stack_ops;
152 const struct elf *elf = file->elf;
153 struct insn ins;
154 int x86_64, ret;
155 unsigned char op1, op2, op3, prefix,
156 rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0,
157 modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0,
158 sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0;
159 struct stack_op *op = NULL;
160 struct symbol *sym;
161 u64 imm;
162
163 x86_64 = is_x86_64(elf);
164 if (x86_64 == -1)
165 return -1;
166
167 ret = insn_decode(&ins, sec->data->d_buf + offset, maxlen,
168 x86_64 ? INSN_MODE_64 : INSN_MODE_32);
169 if (ret < 0) {
170 WARN("can't decode instruction at %s:0x%lx", sec->name, offset);
171 return -1;
172 }
173
174 insn->len = ins.length;
175 insn->type = INSN_OTHER;
176
177 if (ins.vex_prefix.nbytes)
178 return 0;
179
180 prefix = ins.prefixes.bytes[0];
181
182 op1 = ins.opcode.bytes[0];
183 op2 = ins.opcode.bytes[1];
184 op3 = ins.opcode.bytes[2];
185
186 if (ins.rex_prefix.nbytes) {
187 rex = ins.rex_prefix.bytes[0];
188 rex_w = X86_REX_W(rex) >> 3;
189 rex_r = X86_REX_R(rex) >> 2;
190 rex_x = X86_REX_X(rex) >> 1;
191 rex_b = X86_REX_B(rex);
192 }
193
194 if (ins.modrm.nbytes) {
195 modrm = ins.modrm.bytes[0];
196 modrm_mod = X86_MODRM_MOD(modrm);
197 modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r;
198 modrm_rm = X86_MODRM_RM(modrm) + 8*rex_b;
199 }
200
201 if (ins.sib.nbytes) {
202 sib = ins.sib.bytes[0];
203 /* sib_scale = X86_SIB_SCALE(sib); */
204 sib_index = X86_SIB_INDEX(sib) + 8*rex_x;
205 sib_base = X86_SIB_BASE(sib) + 8*rex_b;
206 }
207
208 switch (op1) {
209
210 case 0x1:
211 case 0x29:
212 if (rex_w && rm_is_reg(CFI_SP)) {
213
214 /* add/sub reg, %rsp */
215 ADD_OP(op) {
216 op->src.type = OP_SRC_ADD;
217 op->src.reg = modrm_reg;
218 op->dest.type = OP_DEST_REG;
219 op->dest.reg = CFI_SP;
220 }
221 }
222 break;
223
224 case 0x50 ... 0x57:
225
226 /* push reg */
227 ADD_OP(op) {
228 op->src.type = OP_SRC_REG;
229 op->src.reg = (op1 & 0x7) + 8*rex_b;
230 op->dest.type = OP_DEST_PUSH;
231 }
232
233 break;
234
235 case 0x58 ... 0x5f:
236
237 /* pop reg */
238 ADD_OP(op) {
239 op->src.type = OP_SRC_POP;
240 op->dest.type = OP_DEST_REG;
241 op->dest.reg = (op1 & 0x7) + 8*rex_b;
242 }
243
244 break;
245
246 case 0x68:
247 case 0x6a:
248 /* push immediate */
249 ADD_OP(op) {
250 op->src.type = OP_SRC_CONST;
251 op->dest.type = OP_DEST_PUSH;
252 }
253 break;
254
255 case 0x70 ... 0x7f:
256 insn->type = INSN_JUMP_CONDITIONAL;
257 break;
258
259 case 0x80 ... 0x83:
260 /*
261 * 1000 00sw : mod OP r/m : immediate
262 *
263 * s - sign extend immediate
264 * w - imm8 / imm32
265 *
266 * OP: 000 ADD 100 AND
267 * 001 OR 101 SUB
268 * 010 ADC 110 XOR
269 * 011 SBB 111 CMP
270 */
271
272 /* 64bit only */
273 if (!rex_w)
274 break;
275
276 /* %rsp target only */
277 if (!rm_is_reg(CFI_SP))
278 break;
279
280 imm = ins.immediate.value;
281 if (op1 & 2) { /* sign extend */
282 if (op1 & 1) { /* imm32 */
283 imm <<= 32;
284 imm = (s64)imm >> 32;
285 } else { /* imm8 */
286 imm <<= 56;
287 imm = (s64)imm >> 56;
288 }
289 }
290
291 switch (modrm_reg & 7) {
292 case 5:
293 imm = -imm;
294 fallthrough;
295 case 0:
296 /* add/sub imm, %rsp */
297 ADD_OP(op) {
298 op->src.type = OP_SRC_ADD;
299 op->src.reg = CFI_SP;
300 op->src.offset = imm;
301 op->dest.type = OP_DEST_REG;
302 op->dest.reg = CFI_SP;
303 }
304 break;
305
306 case 4:
307 /* and imm, %rsp */
308 ADD_OP(op) {
309 op->src.type = OP_SRC_AND;
310 op->src.reg = CFI_SP;
311 op->src.offset = ins.immediate.value;
312 op->dest.type = OP_DEST_REG;
313 op->dest.reg = CFI_SP;
314 }
315 break;
316
317 default:
318 /* WARN ? */
319 break;
320 }
321
322 break;
323
324 case 0x89:
325 if (!rex_w)
326 break;
327
328 if (modrm_reg == CFI_SP) {
329
330 if (mod_is_reg()) {
331 /* mov %rsp, reg */
332 ADD_OP(op) {
333 op->src.type = OP_SRC_REG;
334 op->src.reg = CFI_SP;
335 op->dest.type = OP_DEST_REG;
336 op->dest.reg = modrm_rm;
337 }
338 break;
339
340 } else {
341 /* skip RIP relative displacement */
342 if (is_RIP())
343 break;
344
345 /* skip nontrivial SIB */
346 if (have_SIB()) {
347 modrm_rm = sib_base;
348 if (sib_index != CFI_SP)
349 break;
350 }
351
352 /* mov %rsp, disp(%reg) */
353 ADD_OP(op) {
354 op->src.type = OP_SRC_REG;
355 op->src.reg = CFI_SP;
356 op->dest.type = OP_DEST_REG_INDIRECT;
357 op->dest.reg = modrm_rm;
358 op->dest.offset = ins.displacement.value;
359 }
360 break;
361 }
362
363 break;
364 }
365
366 if (rm_is_reg(CFI_SP)) {
367
368 /* mov reg, %rsp */
369 ADD_OP(op) {
370 op->src.type = OP_SRC_REG;
371 op->src.reg = modrm_reg;
372 op->dest.type = OP_DEST_REG;
373 op->dest.reg = CFI_SP;
374 }
375 break;
376 }
377
378 fallthrough;
379 case 0x88:
380 if (!rex_w)
381 break;
382
383 if (rm_is_mem(CFI_BP)) {
384
385 /* mov reg, disp(%rbp) */
386 ADD_OP(op) {
387 op->src.type = OP_SRC_REG;
388 op->src.reg = modrm_reg;
389 op->dest.type = OP_DEST_REG_INDIRECT;
390 op->dest.reg = CFI_BP;
391 op->dest.offset = ins.displacement.value;
392 }
393 break;
394 }
395
396 if (rm_is_mem(CFI_SP)) {
397
398 /* mov reg, disp(%rsp) */
399 ADD_OP(op) {
400 op->src.type = OP_SRC_REG;
401 op->src.reg = modrm_reg;
402 op->dest.type = OP_DEST_REG_INDIRECT;
403 op->dest.reg = CFI_SP;
404 op->dest.offset = ins.displacement.value;
405 }
406 break;
407 }
408
409 break;
410
411 case 0x8b:
412 if (!rex_w)
413 break;
414
415 if (rm_is_mem(CFI_BP)) {
416
417 /* mov disp(%rbp), reg */
418 ADD_OP(op) {
419 op->src.type = OP_SRC_REG_INDIRECT;
420 op->src.reg = CFI_BP;
421 op->src.offset = ins.displacement.value;
422 op->dest.type = OP_DEST_REG;
423 op->dest.reg = modrm_reg;
424 }
425 break;
426 }
427
428 if (rm_is_mem(CFI_SP)) {
429
430 /* mov disp(%rsp), reg */
431 ADD_OP(op) {
432 op->src.type = OP_SRC_REG_INDIRECT;
433 op->src.reg = CFI_SP;
434 op->src.offset = ins.displacement.value;
435 op->dest.type = OP_DEST_REG;
436 op->dest.reg = modrm_reg;
437 }
438 break;
439 }
440
441 break;
442
443 case 0x8d:
444 if (mod_is_reg()) {
445 WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset);
446 break;
447 }
448
449 /* skip non 64bit ops */
450 if (!rex_w)
451 break;
452
453 /* skip RIP relative displacement */
454 if (is_RIP())
455 break;
456
457 /* skip nontrivial SIB */
458 if (have_SIB()) {
459 modrm_rm = sib_base;
460 if (sib_index != CFI_SP)
461 break;
462 }
463
464 /* lea disp(%src), %dst */
465 ADD_OP(op) {
466 op->src.offset = ins.displacement.value;
467 if (!op->src.offset) {
468 /* lea (%src), %dst */
469 op->src.type = OP_SRC_REG;
470 } else {
471 /* lea disp(%src), %dst */
472 op->src.type = OP_SRC_ADD;
473 }
474 op->src.reg = modrm_rm;
475 op->dest.type = OP_DEST_REG;
476 op->dest.reg = modrm_reg;
477 }
478 break;
479
480 case 0x8f:
481 /* pop to mem */
482 ADD_OP(op) {
483 op->src.type = OP_SRC_POP;
484 op->dest.type = OP_DEST_MEM;
485 }
486 break;
487
488 case 0x90:
489 insn->type = INSN_NOP;
490 break;
491
492 case 0x9c:
493 /* pushf */
494 ADD_OP(op) {
495 op->src.type = OP_SRC_CONST;
496 op->dest.type = OP_DEST_PUSHF;
497 }
498 break;
499
500 case 0x9d:
501 /* popf */
502 ADD_OP(op) {
503 op->src.type = OP_SRC_POPF;
504 op->dest.type = OP_DEST_MEM;
505 }
506 break;
507
508 case 0x0f:
509
510 if (op2 == 0x01) {
511
512 if (modrm == 0xca)
513 insn->type = INSN_CLAC;
514 else if (modrm == 0xcb)
515 insn->type = INSN_STAC;
516
517 } else if (op2 >= 0x80 && op2 <= 0x8f) {
518
519 insn->type = INSN_JUMP_CONDITIONAL;
520
521 } else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
522 op2 == 0x35) {
523
524 /* sysenter, sysret */
525 insn->type = INSN_CONTEXT_SWITCH;
526
527 } else if (op2 == 0x0b || op2 == 0xb9) {
528
529 /* ud2 */
530 insn->type = INSN_BUG;
531
532 } else if (op2 == 0x0d || op2 == 0x1f) {
533
534 /* nopl/nopw */
535 insn->type = INSN_NOP;
536
537 } else if (op2 == 0x1e) {
538
539 if (prefix == 0xf3 && (modrm == 0xfa || modrm == 0xfb))
540 insn->type = INSN_ENDBR;
541
542
543 } else if (op2 == 0x38 && op3 == 0xf8) {
544 if (ins.prefixes.nbytes == 1 &&
545 ins.prefixes.bytes[0] == 0xf2) {
546 /* ENQCMD cannot be used in the kernel. */
547 WARN("ENQCMD instruction at %s:%lx", sec->name,
548 offset);
549 }
550
551 } else if (op2 == 0xa0 || op2 == 0xa8) {
552
553 /* push fs/gs */
554 ADD_OP(op) {
555 op->src.type = OP_SRC_CONST;
556 op->dest.type = OP_DEST_PUSH;
557 }
558
559 } else if (op2 == 0xa1 || op2 == 0xa9) {
560
561 /* pop fs/gs */
562 ADD_OP(op) {
563 op->src.type = OP_SRC_POP;
564 op->dest.type = OP_DEST_MEM;
565 }
566 }
567
568 break;
569
570 case 0xc9:
571 /*
572 * leave
573 *
574 * equivalent to:
575 * mov bp, sp
576 * pop bp
577 */
578 ADD_OP(op) {
579 op->src.type = OP_SRC_REG;
580 op->src.reg = CFI_BP;
581 op->dest.type = OP_DEST_REG;
582 op->dest.reg = CFI_SP;
583 }
584 ADD_OP(op) {
585 op->src.type = OP_SRC_POP;
586 op->dest.type = OP_DEST_REG;
587 op->dest.reg = CFI_BP;
588 }
589 break;
590
591 case 0xcc:
592 /* int3 */
593 insn->type = INSN_TRAP;
594 break;
595
596 case 0xe3:
597 /* jecxz/jrcxz */
598 insn->type = INSN_JUMP_CONDITIONAL;
599 break;
600
601 case 0xe9:
602 case 0xeb:
603 insn->type = INSN_JUMP_UNCONDITIONAL;
604 break;
605
606 case 0xc2:
607 case 0xc3:
608 insn->type = INSN_RETURN;
609 break;
610
611 case 0xc7: /* mov imm, r/m */
612 if (!opts.noinstr)
613 break;
614
615 if (ins.length == 3+4+4 && !strncmp(sec->name, ".init.text", 10)) {
616 struct reloc *immr, *disp;
617 struct symbol *func;
618 int idx;
619
620 immr = find_reloc_by_dest(elf, (void *)sec, offset+3);
621 disp = find_reloc_by_dest(elf, (void *)sec, offset+7);
622
623 if (!immr || strcmp(immr->sym->name, "pv_ops"))
624 break;
625
626 idx = (reloc_addend(immr) + 8) / sizeof(void *);
627
628 func = disp->sym;
629 if (disp->sym->type == STT_SECTION)
630 func = find_symbol_by_offset(disp->sym->sec, reloc_addend(disp));
631 if (!func) {
632 WARN("no func for pv_ops[]");
633 return -1;
634 }
635
636 objtool_pv_add(file, idx, func);
637 }
638
639 break;
640
641 case 0xcf: /* iret */
642 /*
643 * Handle sync_core(), which has an IRET to self.
644 * All other IRET are in STT_NONE entry code.
645 */
646 sym = find_symbol_containing(sec, offset);
647 if (sym && sym->type == STT_FUNC) {
648 ADD_OP(op) {
649 /* add $40, %rsp */
650 op->src.type = OP_SRC_ADD;
651 op->src.reg = CFI_SP;
652 op->src.offset = 5*8;
653 op->dest.type = OP_DEST_REG;
654 op->dest.reg = CFI_SP;
655 }
656 break;
657 }
658
659 fallthrough;
660
661 case 0xca: /* retf */
662 case 0xcb: /* retf */
663 insn->type = INSN_CONTEXT_SWITCH;
664 break;
665
666 case 0xe0: /* loopne */
667 case 0xe1: /* loope */
668 case 0xe2: /* loop */
669 insn->type = INSN_JUMP_CONDITIONAL;
670 break;
671
672 case 0xe8:
673 insn->type = INSN_CALL;
674 /*
675 * For the impact on the stack, a CALL behaves like
676 * a PUSH of an immediate value (the return address).
677 */
678 ADD_OP(op) {
679 op->src.type = OP_SRC_CONST;
680 op->dest.type = OP_DEST_PUSH;
681 }
682 break;
683
684 case 0xfc:
685 insn->type = INSN_CLD;
686 break;
687
688 case 0xfd:
689 insn->type = INSN_STD;
690 break;
691
692 case 0xff:
693 if (modrm_reg == 2 || modrm_reg == 3) {
694
695 insn->type = INSN_CALL_DYNAMIC;
696 if (has_notrack_prefix(&ins))
697 WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
698
699 } else if (modrm_reg == 4) {
700
701 insn->type = INSN_JUMP_DYNAMIC;
702 if (has_notrack_prefix(&ins))
703 WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
704
705 } else if (modrm_reg == 5) {
706
707 /* jmpf */
708 insn->type = INSN_CONTEXT_SWITCH;
709
710 } else if (modrm_reg == 6) {
711
712 /* push from mem */
713 ADD_OP(op) {
714 op->src.type = OP_SRC_CONST;
715 op->dest.type = OP_DEST_PUSH;
716 }
717 }
718
719 break;
720
721 default:
722 break;
723 }
724
725 insn->immediate = ins.immediate.nbytes ? ins.immediate.value : 0;
726
727 return 0;
728}
729
730void arch_initial_func_cfi_state(struct cfi_init_state *state)
731{
732 int i;
733
734 for (i = 0; i < CFI_NUM_REGS; i++) {
735 state->regs[i].base = CFI_UNDEFINED;
736 state->regs[i].offset = 0;
737 }
738
739 /* initial CFA (call frame address) */
740 state->cfa.base = CFI_SP;
741 state->cfa.offset = 8;
742
743 /* initial RA (return address) */
744 state->regs[CFI_RA].base = CFI_CFA;
745 state->regs[CFI_RA].offset = -8;
746}
747
748const char *arch_nop_insn(int len)
749{
750 static const char nops[5][5] = {
751 { BYTES_NOP1 },
752 { BYTES_NOP2 },
753 { BYTES_NOP3 },
754 { BYTES_NOP4 },
755 { BYTES_NOP5 },
756 };
757
758 if (len < 1 || len > 5) {
759 WARN("invalid NOP size: %d\n", len);
760 return NULL;
761 }
762
763 return nops[len-1];
764}
765
766#define BYTE_RET 0xC3
767
768const char *arch_ret_insn(int len)
769{
770 static const char ret[5][5] = {
771 { BYTE_RET },
772 { BYTE_RET, 0xcc },
773 { BYTE_RET, 0xcc, BYTES_NOP1 },
774 { BYTE_RET, 0xcc, BYTES_NOP2 },
775 { BYTE_RET, 0xcc, BYTES_NOP3 },
776 };
777
778 if (len < 1 || len > 5) {
779 WARN("invalid RET size: %d\n", len);
780 return NULL;
781 }
782
783 return ret[len-1];
784}
785
786int arch_decode_hint_reg(u8 sp_reg, int *base)
787{
788 switch (sp_reg) {
789 case ORC_REG_UNDEFINED:
790 *base = CFI_UNDEFINED;
791 break;
792 case ORC_REG_SP:
793 *base = CFI_SP;
794 break;
795 case ORC_REG_BP:
796 *base = CFI_BP;
797 break;
798 case ORC_REG_SP_INDIRECT:
799 *base = CFI_SP_INDIRECT;
800 break;
801 case ORC_REG_R10:
802 *base = CFI_R10;
803 break;
804 case ORC_REG_R13:
805 *base = CFI_R13;
806 break;
807 case ORC_REG_DI:
808 *base = CFI_DI;
809 break;
810 case ORC_REG_DX:
811 *base = CFI_DX;
812 break;
813 default:
814 return -1;
815 }
816
817 return 0;
818}
819
820bool arch_is_retpoline(struct symbol *sym)
821{
822 return !strncmp(sym->name, "__x86_indirect_", 15);
823}
824
825bool arch_is_rethunk(struct symbol *sym)
826{
827 return !strcmp(sym->name, "__x86_return_thunk");
828}
829
830bool arch_is_embedded_insn(struct symbol *sym)
831{
832 return !strcmp(sym->name, "retbleed_return_thunk") ||
833 !strcmp(sym->name, "srso_safe_ret");
834}
1// SPDX-License-Identifier: GPL-2.0-or-later
2/*
3 * Copyright (C) 2015 Josh Poimboeuf <jpoimboe@redhat.com>
4 */
5
6#include <stdio.h>
7#include <stdlib.h>
8
9#define unlikely(cond) (cond)
10#include <asm/insn.h>
11#include "../../../arch/x86/lib/inat.c"
12#include "../../../arch/x86/lib/insn.c"
13
14#define CONFIG_64BIT 1
15#include <asm/nops.h>
16
17#include <asm/orc_types.h>
18#include <objtool/check.h>
19#include <objtool/elf.h>
20#include <objtool/arch.h>
21#include <objtool/warn.h>
22#include <objtool/endianness.h>
23#include <objtool/builtin.h>
24#include <arch/elf.h>
25
26int arch_ftrace_match(char *name)
27{
28 return !strcmp(name, "__fentry__");
29}
30
31static int is_x86_64(const struct elf *elf)
32{
33 switch (elf->ehdr.e_machine) {
34 case EM_X86_64:
35 return 1;
36 case EM_386:
37 return 0;
38 default:
39 WARN("unexpected ELF machine type %d", elf->ehdr.e_machine);
40 return -1;
41 }
42}
43
44bool arch_callee_saved_reg(unsigned char reg)
45{
46 switch (reg) {
47 case CFI_BP:
48 case CFI_BX:
49 case CFI_R12:
50 case CFI_R13:
51 case CFI_R14:
52 case CFI_R15:
53 return true;
54
55 case CFI_AX:
56 case CFI_CX:
57 case CFI_DX:
58 case CFI_SI:
59 case CFI_DI:
60 case CFI_SP:
61 case CFI_R8:
62 case CFI_R9:
63 case CFI_R10:
64 case CFI_R11:
65 case CFI_RA:
66 default:
67 return false;
68 }
69}
70
71unsigned long arch_dest_reloc_offset(int addend)
72{
73 return addend + 4;
74}
75
76unsigned long arch_jump_destination(struct instruction *insn)
77{
78 return insn->offset + insn->len + insn->immediate;
79}
80
81bool arch_pc_relative_reloc(struct reloc *reloc)
82{
83 /*
84 * All relocation types where P (the address of the target)
85 * is included in the computation.
86 */
87 switch (reloc_type(reloc)) {
88 case R_X86_64_PC8:
89 case R_X86_64_PC16:
90 case R_X86_64_PC32:
91 case R_X86_64_PC64:
92
93 case R_X86_64_PLT32:
94 case R_X86_64_GOTPC32:
95 case R_X86_64_GOTPCREL:
96 return true;
97
98 default:
99 break;
100 }
101
102 return false;
103}
104
105#define ADD_OP(op) \
106 if (!(op = calloc(1, sizeof(*op)))) \
107 return -1; \
108 else for (*ops_list = op, ops_list = &op->next; op; op = NULL)
109
110/*
111 * Helpers to decode ModRM/SIB:
112 *
113 * r/m| AX CX DX BX | SP | BP | SI DI |
114 * | R8 R9 R10 R11 | R12 | R13 | R14 R15 |
115 * Mod+----------------+-----+-----+---------+
116 * 00 | [r/m] |[SIB]|[IP+]| [r/m] |
117 * 01 | [r/m + d8] |[S+d]| [r/m + d8] |
118 * 10 | [r/m + d32] |[S+D]| [r/m + d32] |
119 * 11 | r/ m |
120 */
121
122#define mod_is_mem() (modrm_mod != 3)
123#define mod_is_reg() (modrm_mod == 3)
124
125#define is_RIP() ((modrm_rm & 7) == CFI_BP && modrm_mod == 0)
126#define have_SIB() ((modrm_rm & 7) == CFI_SP && mod_is_mem())
127
128/*
129 * Check the ModRM register. If there is a SIB byte then check with
130 * the SIB base register. But if the SIB base is 5 (i.e. CFI_BP) and
131 * ModRM mod is 0 then there is no base register.
132 */
133#define rm_is(reg) (have_SIB() ? \
134 sib_base == (reg) && sib_index == CFI_SP && \
135 (sib_base != CFI_BP || modrm_mod != 0) : \
136 modrm_rm == (reg))
137
138#define rm_is_mem(reg) (mod_is_mem() && !is_RIP() && rm_is(reg))
139#define rm_is_reg(reg) (mod_is_reg() && modrm_rm == (reg))
140
141static bool has_notrack_prefix(struct insn *insn)
142{
143 int i;
144
145 for (i = 0; i < insn->prefixes.nbytes; i++) {
146 if (insn->prefixes.bytes[i] == 0x3e)
147 return true;
148 }
149
150 return false;
151}
152
153int arch_decode_instruction(struct objtool_file *file, const struct section *sec,
154 unsigned long offset, unsigned int maxlen,
155 struct instruction *insn)
156{
157 struct stack_op **ops_list = &insn->stack_ops;
158 const struct elf *elf = file->elf;
159 struct insn ins;
160 int x86_64, ret;
161 unsigned char op1, op2, op3, prefix,
162 rex = 0, rex_b = 0, rex_r = 0, rex_w = 0, rex_x = 0,
163 modrm = 0, modrm_mod = 0, modrm_rm = 0, modrm_reg = 0,
164 sib = 0, /* sib_scale = 0, */ sib_index = 0, sib_base = 0;
165 struct stack_op *op = NULL;
166 struct symbol *sym;
167 u64 imm;
168
169 x86_64 = is_x86_64(elf);
170 if (x86_64 == -1)
171 return -1;
172
173 ret = insn_decode(&ins, sec->data->d_buf + offset, maxlen,
174 x86_64 ? INSN_MODE_64 : INSN_MODE_32);
175 if (ret < 0) {
176 WARN("can't decode instruction at %s:0x%lx", sec->name, offset);
177 return -1;
178 }
179
180 insn->len = ins.length;
181 insn->type = INSN_OTHER;
182
183 if (ins.vex_prefix.nbytes)
184 return 0;
185
186 prefix = ins.prefixes.bytes[0];
187
188 op1 = ins.opcode.bytes[0];
189 op2 = ins.opcode.bytes[1];
190 op3 = ins.opcode.bytes[2];
191
192 if (ins.rex_prefix.nbytes) {
193 rex = ins.rex_prefix.bytes[0];
194 rex_w = X86_REX_W(rex) >> 3;
195 rex_r = X86_REX_R(rex) >> 2;
196 rex_x = X86_REX_X(rex) >> 1;
197 rex_b = X86_REX_B(rex);
198 }
199
200 if (ins.modrm.nbytes) {
201 modrm = ins.modrm.bytes[0];
202 modrm_mod = X86_MODRM_MOD(modrm);
203 modrm_reg = X86_MODRM_REG(modrm) + 8*rex_r;
204 modrm_rm = X86_MODRM_RM(modrm) + 8*rex_b;
205 }
206
207 if (ins.sib.nbytes) {
208 sib = ins.sib.bytes[0];
209 /* sib_scale = X86_SIB_SCALE(sib); */
210 sib_index = X86_SIB_INDEX(sib) + 8*rex_x;
211 sib_base = X86_SIB_BASE(sib) + 8*rex_b;
212 }
213
214 switch (op1) {
215
216 case 0x1:
217 case 0x29:
218 if (rex_w && rm_is_reg(CFI_SP)) {
219
220 /* add/sub reg, %rsp */
221 ADD_OP(op) {
222 op->src.type = OP_SRC_ADD;
223 op->src.reg = modrm_reg;
224 op->dest.type = OP_DEST_REG;
225 op->dest.reg = CFI_SP;
226 }
227 }
228 break;
229
230 case 0x50 ... 0x57:
231
232 /* push reg */
233 ADD_OP(op) {
234 op->src.type = OP_SRC_REG;
235 op->src.reg = (op1 & 0x7) + 8*rex_b;
236 op->dest.type = OP_DEST_PUSH;
237 }
238
239 break;
240
241 case 0x58 ... 0x5f:
242
243 /* pop reg */
244 ADD_OP(op) {
245 op->src.type = OP_SRC_POP;
246 op->dest.type = OP_DEST_REG;
247 op->dest.reg = (op1 & 0x7) + 8*rex_b;
248 }
249
250 break;
251
252 case 0x68:
253 case 0x6a:
254 /* push immediate */
255 ADD_OP(op) {
256 op->src.type = OP_SRC_CONST;
257 op->dest.type = OP_DEST_PUSH;
258 }
259 break;
260
261 case 0x70 ... 0x7f:
262 insn->type = INSN_JUMP_CONDITIONAL;
263 break;
264
265 case 0x80 ... 0x83:
266 /*
267 * 1000 00sw : mod OP r/m : immediate
268 *
269 * s - sign extend immediate
270 * w - imm8 / imm32
271 *
272 * OP: 000 ADD 100 AND
273 * 001 OR 101 SUB
274 * 010 ADC 110 XOR
275 * 011 SBB 111 CMP
276 */
277
278 /* 64bit only */
279 if (!rex_w)
280 break;
281
282 /* %rsp target only */
283 if (!rm_is_reg(CFI_SP))
284 break;
285
286 imm = ins.immediate.value;
287 if (op1 & 2) { /* sign extend */
288 if (op1 & 1) { /* imm32 */
289 imm <<= 32;
290 imm = (s64)imm >> 32;
291 } else { /* imm8 */
292 imm <<= 56;
293 imm = (s64)imm >> 56;
294 }
295 }
296
297 switch (modrm_reg & 7) {
298 case 5:
299 imm = -imm;
300 fallthrough;
301 case 0:
302 /* add/sub imm, %rsp */
303 ADD_OP(op) {
304 op->src.type = OP_SRC_ADD;
305 op->src.reg = CFI_SP;
306 op->src.offset = imm;
307 op->dest.type = OP_DEST_REG;
308 op->dest.reg = CFI_SP;
309 }
310 break;
311
312 case 4:
313 /* and imm, %rsp */
314 ADD_OP(op) {
315 op->src.type = OP_SRC_AND;
316 op->src.reg = CFI_SP;
317 op->src.offset = ins.immediate.value;
318 op->dest.type = OP_DEST_REG;
319 op->dest.reg = CFI_SP;
320 }
321 break;
322
323 default:
324 /* WARN ? */
325 break;
326 }
327
328 break;
329
330 case 0x89:
331 if (!rex_w)
332 break;
333
334 if (modrm_reg == CFI_SP) {
335
336 if (mod_is_reg()) {
337 /* mov %rsp, reg */
338 ADD_OP(op) {
339 op->src.type = OP_SRC_REG;
340 op->src.reg = CFI_SP;
341 op->dest.type = OP_DEST_REG;
342 op->dest.reg = modrm_rm;
343 }
344 break;
345
346 } else {
347 /* skip RIP relative displacement */
348 if (is_RIP())
349 break;
350
351 /* skip nontrivial SIB */
352 if (have_SIB()) {
353 modrm_rm = sib_base;
354 if (sib_index != CFI_SP)
355 break;
356 }
357
358 /* mov %rsp, disp(%reg) */
359 ADD_OP(op) {
360 op->src.type = OP_SRC_REG;
361 op->src.reg = CFI_SP;
362 op->dest.type = OP_DEST_REG_INDIRECT;
363 op->dest.reg = modrm_rm;
364 op->dest.offset = ins.displacement.value;
365 }
366 break;
367 }
368
369 break;
370 }
371
372 if (rm_is_reg(CFI_SP)) {
373
374 /* mov reg, %rsp */
375 ADD_OP(op) {
376 op->src.type = OP_SRC_REG;
377 op->src.reg = modrm_reg;
378 op->dest.type = OP_DEST_REG;
379 op->dest.reg = CFI_SP;
380 }
381 break;
382 }
383
384 fallthrough;
385 case 0x88:
386 if (!rex_w)
387 break;
388
389 if (rm_is_mem(CFI_BP)) {
390
391 /* mov reg, disp(%rbp) */
392 ADD_OP(op) {
393 op->src.type = OP_SRC_REG;
394 op->src.reg = modrm_reg;
395 op->dest.type = OP_DEST_REG_INDIRECT;
396 op->dest.reg = CFI_BP;
397 op->dest.offset = ins.displacement.value;
398 }
399 break;
400 }
401
402 if (rm_is_mem(CFI_SP)) {
403
404 /* mov reg, disp(%rsp) */
405 ADD_OP(op) {
406 op->src.type = OP_SRC_REG;
407 op->src.reg = modrm_reg;
408 op->dest.type = OP_DEST_REG_INDIRECT;
409 op->dest.reg = CFI_SP;
410 op->dest.offset = ins.displacement.value;
411 }
412 break;
413 }
414
415 break;
416
417 case 0x8b:
418 if (!rex_w)
419 break;
420
421 if (rm_is_mem(CFI_BP)) {
422
423 /* mov disp(%rbp), reg */
424 ADD_OP(op) {
425 op->src.type = OP_SRC_REG_INDIRECT;
426 op->src.reg = CFI_BP;
427 op->src.offset = ins.displacement.value;
428 op->dest.type = OP_DEST_REG;
429 op->dest.reg = modrm_reg;
430 }
431 break;
432 }
433
434 if (rm_is_mem(CFI_SP)) {
435
436 /* mov disp(%rsp), reg */
437 ADD_OP(op) {
438 op->src.type = OP_SRC_REG_INDIRECT;
439 op->src.reg = CFI_SP;
440 op->src.offset = ins.displacement.value;
441 op->dest.type = OP_DEST_REG;
442 op->dest.reg = modrm_reg;
443 }
444 break;
445 }
446
447 break;
448
449 case 0x8d:
450 if (mod_is_reg()) {
451 WARN("invalid LEA encoding at %s:0x%lx", sec->name, offset);
452 break;
453 }
454
455 /* skip non 64bit ops */
456 if (!rex_w)
457 break;
458
459 /* skip nontrivial SIB */
460 if (have_SIB()) {
461 modrm_rm = sib_base;
462 if (sib_index != CFI_SP)
463 break;
464 }
465
466 /* lea disp(%rip), %dst */
467 if (is_RIP()) {
468 insn->type = INSN_LEA_RIP;
469 break;
470 }
471
472 /* lea disp(%src), %dst */
473 ADD_OP(op) {
474 op->src.offset = ins.displacement.value;
475 if (!op->src.offset) {
476 /* lea (%src), %dst */
477 op->src.type = OP_SRC_REG;
478 } else {
479 /* lea disp(%src), %dst */
480 op->src.type = OP_SRC_ADD;
481 }
482 op->src.reg = modrm_rm;
483 op->dest.type = OP_DEST_REG;
484 op->dest.reg = modrm_reg;
485 }
486 break;
487
488 case 0x8f:
489 /* pop to mem */
490 ADD_OP(op) {
491 op->src.type = OP_SRC_POP;
492 op->dest.type = OP_DEST_MEM;
493 }
494 break;
495
496 case 0x90:
497 insn->type = INSN_NOP;
498 break;
499
500 case 0x9c:
501 /* pushf */
502 ADD_OP(op) {
503 op->src.type = OP_SRC_CONST;
504 op->dest.type = OP_DEST_PUSHF;
505 }
506 break;
507
508 case 0x9d:
509 /* popf */
510 ADD_OP(op) {
511 op->src.type = OP_SRC_POPF;
512 op->dest.type = OP_DEST_MEM;
513 }
514 break;
515
516 case 0x0f:
517
518 if (op2 == 0x01) {
519
520 switch (insn_last_prefix_id(&ins)) {
521 case INAT_PFX_REPE:
522 case INAT_PFX_REPNE:
523 if (modrm == 0xca)
524 /* eretu/erets */
525 insn->type = INSN_CONTEXT_SWITCH;
526 break;
527 default:
528 if (modrm == 0xca)
529 insn->type = INSN_CLAC;
530 else if (modrm == 0xcb)
531 insn->type = INSN_STAC;
532 break;
533 }
534 } else if (op2 >= 0x80 && op2 <= 0x8f) {
535
536 insn->type = INSN_JUMP_CONDITIONAL;
537
538 } else if (op2 == 0x05 || op2 == 0x07 || op2 == 0x34 ||
539 op2 == 0x35) {
540
541 /* sysenter, sysret */
542 insn->type = INSN_CONTEXT_SWITCH;
543
544 } else if (op2 == 0x0b || op2 == 0xb9) {
545
546 /* ud2 */
547 insn->type = INSN_BUG;
548
549 } else if (op2 == 0x0d || op2 == 0x1f) {
550
551 /* nopl/nopw */
552 insn->type = INSN_NOP;
553
554 } else if (op2 == 0x1e) {
555
556 if (prefix == 0xf3 && (modrm == 0xfa || modrm == 0xfb))
557 insn->type = INSN_ENDBR;
558
559
560 } else if (op2 == 0x38 && op3 == 0xf8) {
561 if (ins.prefixes.nbytes == 1 &&
562 ins.prefixes.bytes[0] == 0xf2) {
563 /* ENQCMD cannot be used in the kernel. */
564 WARN("ENQCMD instruction at %s:%lx", sec->name,
565 offset);
566 }
567
568 } else if (op2 == 0xa0 || op2 == 0xa8) {
569
570 /* push fs/gs */
571 ADD_OP(op) {
572 op->src.type = OP_SRC_CONST;
573 op->dest.type = OP_DEST_PUSH;
574 }
575
576 } else if (op2 == 0xa1 || op2 == 0xa9) {
577
578 /* pop fs/gs */
579 ADD_OP(op) {
580 op->src.type = OP_SRC_POP;
581 op->dest.type = OP_DEST_MEM;
582 }
583 }
584
585 break;
586
587 case 0xc9:
588 /*
589 * leave
590 *
591 * equivalent to:
592 * mov bp, sp
593 * pop bp
594 */
595 ADD_OP(op) {
596 op->src.type = OP_SRC_REG;
597 op->src.reg = CFI_BP;
598 op->dest.type = OP_DEST_REG;
599 op->dest.reg = CFI_SP;
600 }
601 ADD_OP(op) {
602 op->src.type = OP_SRC_POP;
603 op->dest.type = OP_DEST_REG;
604 op->dest.reg = CFI_BP;
605 }
606 break;
607
608 case 0xcc:
609 /* int3 */
610 insn->type = INSN_TRAP;
611 break;
612
613 case 0xe3:
614 /* jecxz/jrcxz */
615 insn->type = INSN_JUMP_CONDITIONAL;
616 break;
617
618 case 0xe9:
619 case 0xeb:
620 insn->type = INSN_JUMP_UNCONDITIONAL;
621 break;
622
623 case 0xc2:
624 case 0xc3:
625 insn->type = INSN_RETURN;
626 break;
627
628 case 0xc7: /* mov imm, r/m */
629 if (!opts.noinstr)
630 break;
631
632 if (ins.length == 3+4+4 && !strncmp(sec->name, ".init.text", 10)) {
633 struct reloc *immr, *disp;
634 struct symbol *func;
635 int idx;
636
637 immr = find_reloc_by_dest(elf, (void *)sec, offset+3);
638 disp = find_reloc_by_dest(elf, (void *)sec, offset+7);
639
640 if (!immr || strcmp(immr->sym->name, "pv_ops"))
641 break;
642
643 idx = (reloc_addend(immr) + 8) / sizeof(void *);
644
645 func = disp->sym;
646 if (disp->sym->type == STT_SECTION)
647 func = find_symbol_by_offset(disp->sym->sec, reloc_addend(disp));
648 if (!func) {
649 WARN("no func for pv_ops[]");
650 return -1;
651 }
652
653 objtool_pv_add(file, idx, func);
654 }
655
656 break;
657
658 case 0xcf: /* iret */
659 /*
660 * Handle sync_core(), which has an IRET to self.
661 * All other IRET are in STT_NONE entry code.
662 */
663 sym = find_symbol_containing(sec, offset);
664 if (sym && sym->type == STT_FUNC) {
665 ADD_OP(op) {
666 /* add $40, %rsp */
667 op->src.type = OP_SRC_ADD;
668 op->src.reg = CFI_SP;
669 op->src.offset = 5*8;
670 op->dest.type = OP_DEST_REG;
671 op->dest.reg = CFI_SP;
672 }
673 break;
674 }
675
676 fallthrough;
677
678 case 0xca: /* retf */
679 case 0xcb: /* retf */
680 insn->type = INSN_CONTEXT_SWITCH;
681 break;
682
683 case 0xe0: /* loopne */
684 case 0xe1: /* loope */
685 case 0xe2: /* loop */
686 insn->type = INSN_JUMP_CONDITIONAL;
687 break;
688
689 case 0xe8:
690 insn->type = INSN_CALL;
691 /*
692 * For the impact on the stack, a CALL behaves like
693 * a PUSH of an immediate value (the return address).
694 */
695 ADD_OP(op) {
696 op->src.type = OP_SRC_CONST;
697 op->dest.type = OP_DEST_PUSH;
698 }
699 break;
700
701 case 0xfc:
702 insn->type = INSN_CLD;
703 break;
704
705 case 0xfd:
706 insn->type = INSN_STD;
707 break;
708
709 case 0xff:
710 if (modrm_reg == 2 || modrm_reg == 3) {
711
712 insn->type = INSN_CALL_DYNAMIC;
713 if (has_notrack_prefix(&ins))
714 WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
715
716 } else if (modrm_reg == 4) {
717
718 insn->type = INSN_JUMP_DYNAMIC;
719 if (has_notrack_prefix(&ins))
720 WARN("notrack prefix found at %s:0x%lx", sec->name, offset);
721
722 } else if (modrm_reg == 5) {
723
724 /* jmpf */
725 insn->type = INSN_CONTEXT_SWITCH;
726
727 } else if (modrm_reg == 6) {
728
729 /* push from mem */
730 ADD_OP(op) {
731 op->src.type = OP_SRC_CONST;
732 op->dest.type = OP_DEST_PUSH;
733 }
734 }
735
736 break;
737
738 default:
739 break;
740 }
741
742 if (ins.immediate.nbytes)
743 insn->immediate = ins.immediate.value;
744 else if (ins.displacement.nbytes)
745 insn->immediate = ins.displacement.value;
746
747 return 0;
748}
749
750void arch_initial_func_cfi_state(struct cfi_init_state *state)
751{
752 int i;
753
754 for (i = 0; i < CFI_NUM_REGS; i++) {
755 state->regs[i].base = CFI_UNDEFINED;
756 state->regs[i].offset = 0;
757 }
758
759 /* initial CFA (call frame address) */
760 state->cfa.base = CFI_SP;
761 state->cfa.offset = 8;
762
763 /* initial RA (return address) */
764 state->regs[CFI_RA].base = CFI_CFA;
765 state->regs[CFI_RA].offset = -8;
766}
767
768const char *arch_nop_insn(int len)
769{
770 static const char nops[5][5] = {
771 { BYTES_NOP1 },
772 { BYTES_NOP2 },
773 { BYTES_NOP3 },
774 { BYTES_NOP4 },
775 { BYTES_NOP5 },
776 };
777
778 if (len < 1 || len > 5) {
779 WARN("invalid NOP size: %d\n", len);
780 return NULL;
781 }
782
783 return nops[len-1];
784}
785
786#define BYTE_RET 0xC3
787
788const char *arch_ret_insn(int len)
789{
790 static const char ret[5][5] = {
791 { BYTE_RET },
792 { BYTE_RET, 0xcc },
793 { BYTE_RET, 0xcc, BYTES_NOP1 },
794 { BYTE_RET, 0xcc, BYTES_NOP2 },
795 { BYTE_RET, 0xcc, BYTES_NOP3 },
796 };
797
798 if (len < 1 || len > 5) {
799 WARN("invalid RET size: %d\n", len);
800 return NULL;
801 }
802
803 return ret[len-1];
804}
805
806int arch_decode_hint_reg(u8 sp_reg, int *base)
807{
808 switch (sp_reg) {
809 case ORC_REG_UNDEFINED:
810 *base = CFI_UNDEFINED;
811 break;
812 case ORC_REG_SP:
813 *base = CFI_SP;
814 break;
815 case ORC_REG_BP:
816 *base = CFI_BP;
817 break;
818 case ORC_REG_SP_INDIRECT:
819 *base = CFI_SP_INDIRECT;
820 break;
821 case ORC_REG_R10:
822 *base = CFI_R10;
823 break;
824 case ORC_REG_R13:
825 *base = CFI_R13;
826 break;
827 case ORC_REG_DI:
828 *base = CFI_DI;
829 break;
830 case ORC_REG_DX:
831 *base = CFI_DX;
832 break;
833 default:
834 return -1;
835 }
836
837 return 0;
838}
839
840bool arch_is_retpoline(struct symbol *sym)
841{
842 return !strncmp(sym->name, "__x86_indirect_", 15);
843}
844
845bool arch_is_rethunk(struct symbol *sym)
846{
847 return !strcmp(sym->name, "__x86_return_thunk");
848}
849
850bool arch_is_embedded_insn(struct symbol *sym)
851{
852 return !strcmp(sym->name, "retbleed_return_thunk") ||
853 !strcmp(sym->name, "srso_safe_ret");
854}